Lines Matching refs:prk
249 aes_schedule_low_round(uint8x16_t rk, uint8x16_t prk)
253 /* smear prk */
254 prk ^= vextq_u8(vdupq_n_u8(0), prk, 12);
255 prk ^= vextq_u8(vdupq_n_u8(0), prk, 8);
256 prk ^= s63;
263 return rk ^ prk;
267 aes_schedule_round(uint8x16_t rk, uint8x16_t prk, uint8x16_t *rcon_rot)
272 prk ^= vextq_u8(*rcon_rot, vdupq_n_u8(0), 15);
281 return aes_schedule_low_round(rk, prk);
333 aes_schedule_192_smear(uint8x16_t prkhi, uint8x16_t prk)
336 uint32x4_t prk32 = vreinterpretq_u32_u8(prk);
366 uint8x16_t prk; /* previous round key */
389 prk = rk;
393 prk = aes_schedule_round(rk, prk, &rcon_rot);
394 rk = vextq_u8(prkhi, prk, 8);
399 rk = aes_schedule_192_smear(prkhi, prk);
405 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
412 rk = aes_schedule_192_smear(prkhi, prk);
420 prk = rk;
429 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
458 uint8x16_t prk; /* previous round key */
487 prk = rk;
491 prk = aes_schedule_round(rk, prk, &rcon_rot);
492 rk = vextq_u8(prkhi, prk, 8);
497 rk = aes_schedule_192_smear(prkhi, prk);
503 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
510 rk = aes_schedule_192_smear(prkhi, prk);
518 prk = rk;
527 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);