aes_bear.c revision 1.1 1 /* $NetBSD: aes_bear.c,v 1.1 2020/06/29 23:27:52 riastradh Exp $ */
2
3 /*-
4 * Copyright (c) 2020 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #include <sys/cdefs.h>
30 __KERNEL_RCSID(1, "$NetBSD: aes_bear.c,v 1.1 2020/06/29 23:27:52 riastradh Exp $");
31
32 #include <sys/types.h>
33 #include <sys/endian.h>
34 #include <sys/systm.h>
35
36 #include <crypto/aes/aes.h>
37 #include <crypto/aes/aes_bear.h>
38
39 static void
40 aesbear_setkey(uint32_t rk[static 60], const void *key, uint32_t nrounds)
41 {
42 size_t key_len;
43
44 switch (nrounds) {
45 case 10:
46 key_len = 16;
47 break;
48 case 12:
49 key_len = 24;
50 break;
51 case 14:
52 key_len = 32;
53 break;
54 default:
55 panic("invalid AES nrounds: %u", nrounds);
56 }
57
58 br_aes_ct_keysched(rk, key, key_len);
59 }
60
61 static void
62 aesbear_setenckey(struct aesenc *enc, const uint8_t *key, uint32_t nrounds)
63 {
64
65 aesbear_setkey(enc->aese_aes.aes_rk, key, nrounds);
66 }
67
68 static void
69 aesbear_setdeckey(struct aesdec *dec, const uint8_t *key, uint32_t nrounds)
70 {
71
72 /*
73 * BearSSL computes InvMixColumns on the fly -- no need for
74 * distinct decryption round keys.
75 */
76 aesbear_setkey(dec->aesd_aes.aes_rk, key, nrounds);
77 }
78
79 static void
80 aesbear_enc(const struct aesenc *enc, const uint8_t in[static 16],
81 uint8_t out[static 16], uint32_t nrounds)
82 {
83 uint32_t sk_exp[120];
84 uint32_t q[8];
85
86 /* Expand round keys for bitslicing. */
87 br_aes_ct_skey_expand(sk_exp, nrounds, enc->aese_aes.aes_rk);
88
89 /* Load input block interleaved with garbage block. */
90 q[2*0] = le32dec(in + 4*0);
91 q[2*1] = le32dec(in + 4*1);
92 q[2*2] = le32dec(in + 4*2);
93 q[2*3] = le32dec(in + 4*3);
94 q[1] = q[3] = q[5] = q[7] = 0;
95
96 /* Transform to bitslice, decrypt, transform from bitslice. */
97 br_aes_ct_ortho(q);
98 br_aes_ct_bitslice_encrypt(nrounds, sk_exp, q);
99 br_aes_ct_ortho(q);
100
101 /* Store output block. */
102 le32enc(out + 4*0, q[2*0]);
103 le32enc(out + 4*1, q[2*1]);
104 le32enc(out + 4*2, q[2*2]);
105 le32enc(out + 4*3, q[2*3]);
106
107 /* Paranoia: Zero temporary buffers. */
108 explicit_memset(sk_exp, 0, sizeof sk_exp);
109 explicit_memset(q, 0, sizeof q);
110 }
111
112 static void
113 aesbear_dec(const struct aesdec *dec, const uint8_t in[static 16],
114 uint8_t out[static 16], uint32_t nrounds)
115 {
116 uint32_t sk_exp[120];
117 uint32_t q[8];
118
119 /* Expand round keys for bitslicing. */
120 br_aes_ct_skey_expand(sk_exp, nrounds, dec->aesd_aes.aes_rk);
121
122 /* Load input block interleaved with garbage. */
123 q[2*0] = le32dec(in + 4*0);
124 q[2*1] = le32dec(in + 4*1);
125 q[2*2] = le32dec(in + 4*2);
126 q[2*3] = le32dec(in + 4*3);
127 q[1] = q[3] = q[5] = q[7] = 0;
128
129 /* Transform to bitslice, decrypt, transform from bitslice. */
130 br_aes_ct_ortho(q);
131 br_aes_ct_bitslice_decrypt(nrounds, sk_exp, q);
132 br_aes_ct_ortho(q);
133
134 /* Store output block. */
135 le32enc(out + 4*0, q[2*0]);
136 le32enc(out + 4*1, q[2*1]);
137 le32enc(out + 4*2, q[2*2]);
138 le32enc(out + 4*3, q[2*3]);
139
140 /* Paranoia: Zero temporary buffers. */
141 explicit_memset(sk_exp, 0, sizeof sk_exp);
142 explicit_memset(q, 0, sizeof q);
143 }
144
145 static void
146 aesbear_cbc_enc(const struct aesenc *enc, const uint8_t in[static 16],
147 uint8_t out[static 16], size_t nbytes, uint8_t iv[static 16],
148 uint32_t nrounds)
149 {
150 uint32_t sk_exp[120];
151 uint32_t q[8];
152 uint32_t cv0, cv1, cv2, cv3;
153
154 KASSERT(nbytes % 16 == 0);
155
156 /* Skip if there's nothing to do. */
157 if (nbytes == 0)
158 return;
159
160 /* Expand round keys for bitslicing. */
161 br_aes_ct_skey_expand(sk_exp, nrounds, enc->aese_aes.aes_rk);
162
163 /* Initialize garbage block. */
164 q[1] = q[3] = q[5] = q[7] = 0;
165
166 /* Load IV. */
167 cv0 = le32dec(iv + 4*0);
168 cv1 = le32dec(iv + 4*1);
169 cv2 = le32dec(iv + 4*2);
170 cv3 = le32dec(iv + 4*3);
171
172 for (; nbytes; nbytes -= 16, in += 16, out += 16) {
173 /* Load input block and apply CV. */
174 q[2*0] = cv0 ^ le32dec(in + 4*0);
175 q[2*1] = cv1 ^ le32dec(in + 4*1);
176 q[2*2] = cv2 ^ le32dec(in + 4*2);
177 q[2*3] = cv3 ^ le32dec(in + 4*3);
178
179 /* Transform to bitslice, encrypt, transform from bitslice. */
180 br_aes_ct_ortho(q);
181 br_aes_ct_bitslice_encrypt(nrounds, sk_exp, q);
182 br_aes_ct_ortho(q);
183
184 /* Remember ciphertext as CV and store output block. */
185 cv0 = q[2*0];
186 cv1 = q[2*1];
187 cv2 = q[2*2];
188 cv3 = q[2*3];
189 le32enc(out + 4*0, cv0);
190 le32enc(out + 4*1, cv1);
191 le32enc(out + 4*2, cv2);
192 le32enc(out + 4*3, cv3);
193 }
194
195 /* Store updated IV. */
196 le32enc(iv + 4*0, cv0);
197 le32enc(iv + 4*1, cv1);
198 le32enc(iv + 4*2, cv2);
199 le32enc(iv + 4*3, cv3);
200
201 /* Paranoia: Zero temporary buffers. */
202 explicit_memset(sk_exp, 0, sizeof sk_exp);
203 explicit_memset(q, 0, sizeof q);
204 }
205
206 static void
207 aesbear_cbc_dec(const struct aesdec *dec, const uint8_t in[static 16],
208 uint8_t out[static 16], size_t nbytes, uint8_t iv[static 16],
209 uint32_t nrounds)
210 {
211 uint32_t sk_exp[120];
212 uint32_t q[8];
213 uint32_t cv0, cv1, cv2, cv3, iv0, iv1, iv2, iv3;
214
215 KASSERT(nbytes % 16 == 0);
216
217 /* Skip if there's nothing to do. */
218 if (nbytes == 0)
219 return;
220
221 /* Expand round keys for bitslicing. */
222 br_aes_ct_skey_expand(sk_exp, nrounds, dec->aesd_aes.aes_rk);
223
224 /* Load the IV. */
225 iv0 = le32dec(iv + 4*0);
226 iv1 = le32dec(iv + 4*1);
227 iv2 = le32dec(iv + 4*2);
228 iv3 = le32dec(iv + 4*3);
229
230 /* Load the last cipher block. */
231 cv0 = le32dec(in + nbytes - 16 + 4*0);
232 cv1 = le32dec(in + nbytes - 16 + 4*1);
233 cv2 = le32dec(in + nbytes - 16 + 4*2);
234 cv3 = le32dec(in + nbytes - 16 + 4*3);
235
236 /* Store the updated IV. */
237 le32enc(iv + 4*0, cv0);
238 le32enc(iv + 4*1, cv1);
239 le32enc(iv + 4*2, cv2);
240 le32enc(iv + 4*3, cv3);
241
242 /* Handle the last cipher block separately if odd number. */
243 if (nbytes % 32) {
244 KASSERT(nbytes % 32 == 16);
245
246 /* Set up the last cipher block and a garbage block. */
247 q[2*0] = cv0;
248 q[2*1] = cv1;
249 q[2*2] = cv2;
250 q[2*3] = cv3;
251 q[1] = q[3] = q[5] = q[7] = 0;
252
253 /* Decrypt. */
254 br_aes_ct_ortho(q);
255 br_aes_ct_bitslice_decrypt(nrounds, sk_exp, q);
256 br_aes_ct_ortho(q);
257
258 /* If this was the only cipher block, we're done. */
259 nbytes -= 16;
260 if (nbytes == 0)
261 goto out;
262
263 /*
264 * Otherwise, load up the penultimate cipher block, and
265 * store the output block.
266 */
267 cv0 = le32dec(in + nbytes - 16 + 4*0);
268 cv1 = le32dec(in + nbytes - 16 + 4*1);
269 cv2 = le32dec(in + nbytes - 16 + 4*2);
270 cv3 = le32dec(in + nbytes - 16 + 4*3);
271 le32enc(out + nbytes + 4*0, cv0 ^ q[2*0]);
272 le32enc(out + nbytes + 4*1, cv1 ^ q[2*1]);
273 le32enc(out + nbytes + 4*2, cv2 ^ q[2*2]);
274 le32enc(out + nbytes + 4*3, cv3 ^ q[2*3]);
275 }
276
277 for (;;) {
278 KASSERT(nbytes >= 32);
279
280 /*
281 * 1. Set up upper cipher block from cvN.
282 * 2. Load lower cipher block into cvN and set it up.
283 * 3. Decrypt.
284 */
285 q[2*0 + 1] = cv0;
286 q[2*1 + 1] = cv1;
287 q[2*2 + 1] = cv2;
288 q[2*3 + 1] = cv3;
289 cv0 = q[2*0] = le32dec(in + nbytes - 32 + 4*0);
290 cv1 = q[2*1] = le32dec(in + nbytes - 32 + 4*1);
291 cv2 = q[2*2] = le32dec(in + nbytes - 32 + 4*2);
292 cv3 = q[2*3] = le32dec(in + nbytes - 32 + 4*3);
293
294 br_aes_ct_ortho(q);
295 br_aes_ct_bitslice_decrypt(nrounds, sk_exp, q);
296 br_aes_ct_ortho(q);
297
298 /* Store the upper output block. */
299 le32enc(out + nbytes - 16 + 4*0, q[2*0 + 1] ^ cv0);
300 le32enc(out + nbytes - 16 + 4*1, q[2*1 + 1] ^ cv1);
301 le32enc(out + nbytes - 16 + 4*2, q[2*2 + 1] ^ cv2);
302 le32enc(out + nbytes - 16 + 4*3, q[2*3 + 1] ^ cv3);
303
304 /* Stop if we've reached the first output block. */
305 nbytes -= 32;
306 if (nbytes == 0)
307 goto out;
308
309 /*
310 * Load the preceding cipher block, and apply it as the
311 * chaining value to this one.
312 */
313 cv0 = le32dec(in + nbytes - 16 + 4*0);
314 cv1 = le32dec(in + nbytes - 16 + 4*1);
315 cv2 = le32dec(in + nbytes - 16 + 4*2);
316 cv3 = le32dec(in + nbytes - 16 + 4*3);
317 le32enc(out + nbytes + 4*0, q[2*0] ^ cv0);
318 le32enc(out + nbytes + 4*1, q[2*1] ^ cv1);
319 le32enc(out + nbytes + 4*2, q[2*2] ^ cv2);
320 le32enc(out + nbytes + 4*3, q[2*3] ^ cv3);
321 }
322
323 out: /* Store the first output block. */
324 le32enc(out + 4*0, q[2*0] ^ iv0);
325 le32enc(out + 4*1, q[2*1] ^ iv1);
326 le32enc(out + 4*2, q[2*2] ^ iv2);
327 le32enc(out + 4*3, q[2*3] ^ iv3);
328
329 /* Paranoia: Zero temporary buffers. */
330 explicit_memset(sk_exp, 0, sizeof sk_exp);
331 explicit_memset(q, 0, sizeof q);
332 }
333
334 static inline void
335 aesbear_xts_update(uint32_t *t0, uint32_t *t1, uint32_t *t2, uint32_t *t3)
336 {
337 uint32_t s0, s1, s2, s3;
338
339 s0 = *t0 >> 31;
340 s1 = *t1 >> 31;
341 s2 = *t2 >> 31;
342 s3 = *t3 >> 31;
343 *t0 = (*t0 << 1) ^ (-s3 & 0x87);
344 *t1 = (*t1 << 1) ^ s0;
345 *t2 = (*t2 << 1) ^ s1;
346 *t3 = (*t3 << 1) ^ s2;
347 }
348
349 static int
350 aesbear_xts_update_selftest(void)
351 {
352 static const struct {
353 uint32_t in[4], out[4];
354 } cases[] = {
355 { {1}, {2} },
356 { {0x80000000U,0,0,0}, {0,1,0,0} },
357 { {0,0x80000000U,0,0}, {0,0,1,0} },
358 { {0,0,0x80000000U,0}, {0,0,0,1} },
359 { {0,0,0,0x80000000U}, {0x87,0,0,0} },
360 { {0,0x80000000U,0,0x80000000U}, {0x87,0,1,0} },
361 };
362 unsigned i;
363 uint32_t t0, t1, t2, t3;
364
365 for (i = 0; i < sizeof(cases)/sizeof(cases[0]); i++) {
366 t0 = cases[i].in[0];
367 t1 = cases[i].in[1];
368 t2 = cases[i].in[2];
369 t3 = cases[i].in[3];
370 aesbear_xts_update(&t0, &t1, &t2, &t3);
371 if (t0 != cases[i].out[0] ||
372 t1 != cases[i].out[1] ||
373 t2 != cases[i].out[2] ||
374 t3 != cases[i].out[3])
375 return -1;
376 }
377
378 /* Success! */
379 return 0;
380 }
381
382 static void
383 aesbear_xts_enc(const struct aesenc *enc, const uint8_t in[static 16],
384 uint8_t out[static 16], size_t nbytes, uint8_t tweak[static 16],
385 uint32_t nrounds)
386 {
387 uint32_t sk_exp[120];
388 uint32_t q[8];
389 uint32_t t0, t1, t2, t3, u0, u1, u2, u3;
390
391 KASSERT(nbytes % 16 == 0);
392
393 /* Skip if there's nothing to do. */
394 if (nbytes == 0)
395 return;
396
397 /* Expand round keys for bitslicing. */
398 br_aes_ct_skey_expand(sk_exp, nrounds, enc->aese_aes.aes_rk);
399
400 /* Load tweak. */
401 t0 = le32dec(tweak + 4*0);
402 t1 = le32dec(tweak + 4*1);
403 t2 = le32dec(tweak + 4*2);
404 t3 = le32dec(tweak + 4*3);
405
406 /* Handle the first block separately if odd number. */
407 if (nbytes % 32) {
408 KASSERT(nbytes % 32 == 16);
409
410 /* Load up the first block and a garbage block. */
411 q[2*0] = le32dec(in + 4*0) ^ t0;
412 q[2*1] = le32dec(in + 4*1) ^ t1;
413 q[2*2] = le32dec(in + 4*2) ^ t2;
414 q[2*3] = le32dec(in + 4*3) ^ t3;
415 q[1] = q[3] = q[5] = q[7] = 0;
416
417 /* Encrypt two blocks. */
418 br_aes_ct_ortho(q);
419 br_aes_ct_bitslice_encrypt(nrounds, sk_exp, q);
420 br_aes_ct_ortho(q);
421
422 /* Store the first cipher block. */
423 le32enc(out + 4*0, q[2*0] ^ t0);
424 le32enc(out + 4*1, q[2*1] ^ t1);
425 le32enc(out + 4*2, q[2*2] ^ t2);
426 le32enc(out + 4*3, q[2*3] ^ t3);
427
428 /* Advance to the next block. */
429 aesbear_xts_update(&t0, &t1, &t2, &t3);
430 if ((nbytes -= 16) == 0)
431 goto out;
432 in += 16;
433 out += 16;
434 }
435
436 do {
437 KASSERT(nbytes >= 32);
438
439 /* Compute the upper tweak. */
440 u0 = t0; u1 = t1; u2 = t2; u3 = t3;
441 aesbear_xts_update(&u0, &u1, &u2, &u3);
442
443 /* Load lower and upper blocks. */
444 q[2*0] = le32dec(in + 4*0) ^ t0;
445 q[2*1] = le32dec(in + 4*1) ^ t1;
446 q[2*2] = le32dec(in + 4*2) ^ t2;
447 q[2*3] = le32dec(in + 4*3) ^ t3;
448 q[2*0 + 1] = le32dec(in + 16 + 4*0) ^ u0;
449 q[2*1 + 1] = le32dec(in + 16 + 4*1) ^ u1;
450 q[2*2 + 1] = le32dec(in + 16 + 4*2) ^ u2;
451 q[2*3 + 1] = le32dec(in + 16 + 4*3) ^ u3;
452
453 /* Encrypt two blocks. */
454 br_aes_ct_ortho(q);
455 br_aes_ct_bitslice_encrypt(nrounds, sk_exp, q);
456 br_aes_ct_ortho(q);
457
458 /* Store lower and upper blocks. */
459 le32enc(out + 4*0, q[2*0] ^ t0);
460 le32enc(out + 4*1, q[2*1] ^ t1);
461 le32enc(out + 4*2, q[2*2] ^ t2);
462 le32enc(out + 4*3, q[2*3] ^ t3);
463 le32enc(out + 16 + 4*0, q[2*0 + 1] ^ u0);
464 le32enc(out + 16 + 4*1, q[2*1 + 1] ^ u1);
465 le32enc(out + 16 + 4*2, q[2*2 + 1] ^ u2);
466 le32enc(out + 16 + 4*3, q[2*3 + 1] ^ u3);
467
468 /* Advance to the next pair of blocks. */
469 t0 = u0; t1 = u1; t2 = u2; t3 = u3;
470 aesbear_xts_update(&t0, &t1, &t2, &t3);
471 in += 32;
472 out += 32;
473 } while (nbytes -= 32, nbytes);
474
475 out: /* Store the updated tweak. */
476 le32enc(tweak + 4*0, t0);
477 le32enc(tweak + 4*1, t1);
478 le32enc(tweak + 4*2, t2);
479 le32enc(tweak + 4*3, t3);
480
481 /* Paranoia: Zero temporary buffers. */
482 explicit_memset(sk_exp, 0, sizeof sk_exp);
483 explicit_memset(q, 0, sizeof q);
484 }
485
486 static void
487 aesbear_xts_dec(const struct aesdec *dec, const uint8_t in[static 16],
488 uint8_t out[static 16], size_t nbytes, uint8_t tweak[static 16],
489 uint32_t nrounds)
490 {
491 uint32_t sk_exp[120];
492 uint32_t q[8];
493 uint32_t t0, t1, t2, t3, u0, u1, u2, u3;
494
495 KASSERT(nbytes % 16 == 0);
496
497 /* Skip if there's nothing to do. */
498 if (nbytes == 0)
499 return;
500
501 /* Expand round keys for bitslicing. */
502 br_aes_ct_skey_expand(sk_exp, nrounds, dec->aesd_aes.aes_rk);
503
504 /* Load tweak. */
505 t0 = le32dec(tweak + 4*0);
506 t1 = le32dec(tweak + 4*1);
507 t2 = le32dec(tweak + 4*2);
508 t3 = le32dec(tweak + 4*3);
509
510 /* Handle the first block separately if odd number. */
511 if (nbytes % 32) {
512 KASSERT(nbytes % 32 == 16);
513
514 /* Load up the first block and a garbage block. */
515 q[2*0] = le32dec(in + 4*0) ^ t0;
516 q[2*1] = le32dec(in + 4*1) ^ t1;
517 q[2*2] = le32dec(in + 4*2) ^ t2;
518 q[2*3] = le32dec(in + 4*3) ^ t3;
519 q[1] = q[3] = q[5] = q[7] = 0;
520
521 /* Decrypt two blocks. */
522 br_aes_ct_ortho(q);
523 br_aes_ct_bitslice_decrypt(nrounds, sk_exp, q);
524 br_aes_ct_ortho(q);
525
526 /* Store the first cipher block. */
527 le32enc(out + 4*0, q[2*0] ^ t0);
528 le32enc(out + 4*1, q[2*1] ^ t1);
529 le32enc(out + 4*2, q[2*2] ^ t2);
530 le32enc(out + 4*3, q[2*3] ^ t3);
531
532 /* Advance to the next block. */
533 aesbear_xts_update(&t0, &t1, &t2, &t3);
534 if ((nbytes -= 16) == 0)
535 goto out;
536 in += 16;
537 out += 16;
538 }
539
540 do {
541 KASSERT(nbytes >= 32);
542
543 /* Compute the upper tweak. */
544 u0 = t0; u1 = t1; u2 = t2; u3 = t3;
545 aesbear_xts_update(&u0, &u1, &u2, &u3);
546
547 /* Load lower and upper blocks. */
548 q[2*0] = le32dec(in + 4*0) ^ t0;
549 q[2*1] = le32dec(in + 4*1) ^ t1;
550 q[2*2] = le32dec(in + 4*2) ^ t2;
551 q[2*3] = le32dec(in + 4*3) ^ t3;
552 q[2*0 + 1] = le32dec(in + 16 + 4*0) ^ u0;
553 q[2*1 + 1] = le32dec(in + 16 + 4*1) ^ u1;
554 q[2*2 + 1] = le32dec(in + 16 + 4*2) ^ u2;
555 q[2*3 + 1] = le32dec(in + 16 + 4*3) ^ u3;
556
557 /* Encrypt two blocks. */
558 br_aes_ct_ortho(q);
559 br_aes_ct_bitslice_decrypt(nrounds, sk_exp, q);
560 br_aes_ct_ortho(q);
561
562 /* Store lower and upper blocks. */
563 le32enc(out + 4*0, q[2*0] ^ t0);
564 le32enc(out + 4*1, q[2*1] ^ t1);
565 le32enc(out + 4*2, q[2*2] ^ t2);
566 le32enc(out + 4*3, q[2*3] ^ t3);
567 le32enc(out + 16 + 4*0, q[2*0 + 1] ^ u0);
568 le32enc(out + 16 + 4*1, q[2*1 + 1] ^ u1);
569 le32enc(out + 16 + 4*2, q[2*2 + 1] ^ u2);
570 le32enc(out + 16 + 4*3, q[2*3 + 1] ^ u3);
571
572 /* Advance to the next pair of blocks. */
573 t0 = u0; t1 = u1; t2 = u2; t3 = u3;
574 aesbear_xts_update(&t0, &t1, &t2, &t3);
575 in += 32;
576 out += 32;
577 } while (nbytes -= 32, nbytes);
578
579 out: /* Store the updated tweak. */
580 le32enc(tweak + 4*0, t0);
581 le32enc(tweak + 4*1, t1);
582 le32enc(tweak + 4*2, t2);
583 le32enc(tweak + 4*3, t3);
584
585 /* Paranoia: Zero temporary buffers. */
586 explicit_memset(sk_exp, 0, sizeof sk_exp);
587 explicit_memset(q, 0, sizeof q);
588 }
589
590 static int
591 aesbear_probe(void)
592 {
593
594 if (aesbear_xts_update_selftest())
595 return -1;
596
597 /* XXX test br_aes_ct_bitslice_decrypt */
598 /* XXX test br_aes_ct_bitslice_encrypt */
599 /* XXX test br_aes_ct_keysched */
600 /* XXX test br_aes_ct_ortho */
601 /* XXX test br_aes_ct_skey_expand */
602
603 return 0;
604 }
605
606 struct aes_impl aes_bear_impl = {
607 .ai_name = "BearSSL aes_ct",
608 .ai_probe = aesbear_probe,
609 .ai_setenckey = aesbear_setenckey,
610 .ai_setdeckey = aesbear_setdeckey,
611 .ai_enc = aesbear_enc,
612 .ai_dec = aesbear_dec,
613 .ai_cbc_enc = aesbear_cbc_enc,
614 .ai_cbc_dec = aesbear_cbc_dec,
615 .ai_xts_enc = aesbear_xts_enc,
616 .ai_xts_dec = aesbear_xts_dec,
617 };
618