Lines Matching refs:rk
208 storeroundkey(void *rkp, uint8x16_t rk)
210 vst1q_u8(rkp, rk);
249 aes_schedule_low_round(uint8x16_t rk, uint8x16_t prk)
259 subbytes(&io, &jo, rk, inv, inva);
260 rk = vqtbl1q_u8(sb1[0], io) ^ vqtbl1q_u8(sb1[1], jo);
263 return rk ^ prk;
267 aes_schedule_round(uint8x16_t rk, uint8x16_t prk, uint8x16_t *rcon_rot)
276 rk32 = vreinterpretq_u32_u8(rk);
278 rk = vreinterpretq_u8_u32(rk32);
279 rk = vextq_u8(rk, rk, 1);
281 return aes_schedule_low_round(rk, prk);
351 aes_schedule_192_smearhi(uint8x16_t rk)
353 uint64x2_t rk64 = vreinterpretq_u64_u8(rk);
365 uint8x16_t rk; /* round key */
371 rk = aes_schedule_transform(vld1q_u8(key), ipt);
372 storeroundkey(rk32, rk);
378 rk = aes_schedule_round(rk, rk, &rcon_rot);
381 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4]);
389 prk = rk;
390 rk = aes_schedule_transform(vld1q_u8(key + 8), ipt);
391 prkhi = aes_schedule_192_smearhi(rk);
393 prk = aes_schedule_round(rk, prk, &rcon_rot);
394 rk = vextq_u8(prkhi, prk, 8);
396 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4]);
399 rk = aes_schedule_192_smear(prkhi, prk);
400 prkhi = aes_schedule_192_smearhi(rk);
402 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4]);
405 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
409 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4]);
412 rk = aes_schedule_192_smear(prkhi, prk);
413 prkhi = aes_schedule_192_smearhi(rk);
420 prk = rk;
421 rk = aes_schedule_transform(vld1q_u8(key + 16), ipt);
423 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4]);
426 pprk = rk;
429 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
432 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4]);
437 rk = vreinterpretq_u8_u32(
439 vgetq_lane_u32(vreinterpretq_u32_u8(rk),
441 rk = aes_schedule_low_round(rk, pprk);
448 storeroundkey(rk32, aes_schedule_mangle_last_enc(rk, sr[i-- % 4]));
457 uint8x16_t rk; /* round key */
465 rk = aes_schedule_transform(ork, ipt);
476 rk = aes_schedule_round(rk, rk, &rcon_rot);
479 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4]);
487 prk = rk;
488 rk = aes_schedule_transform(vld1q_u8(key + 8), ipt);
489 prkhi = aes_schedule_192_smearhi(rk);
491 prk = aes_schedule_round(rk, prk, &rcon_rot);
492 rk = vextq_u8(prkhi, prk, 8);
494 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4]);
497 rk = aes_schedule_192_smear(prkhi, prk);
498 prkhi = aes_schedule_192_smearhi(rk);
500 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4]);
503 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
507 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4]);
510 rk = aes_schedule_192_smear(prkhi, prk);
511 prkhi = aes_schedule_192_smearhi(rk);
518 prk = rk;
519 rk = aes_schedule_transform(vld1q_u8(key + 16), ipt);
521 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4]);
524 pprk = rk;
527 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
530 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4]);
535 rk = vreinterpretq_u8_u32(
537 vgetq_lane_u32(vreinterpretq_u32_u8(rk),
539 rk = aes_schedule_low_round(rk, pprk);
546 storeroundkey(rk32, aes_schedule_mangle_last_dec(rk));