Lines Matching defs:rk
154 storeroundkey(uint32_t *rk32, __m128i rk)
156 _mm_store_si128((void *)rk32, rk);
208 aes_schedule_low_round(__m128i rk, __m128i prk)
218 subbytes(&io, &jo, rk);
219 rk = _mm_shuffle_epi8(sb1[0].m, io) ^ _mm_shuffle_epi8(sb1[1].m, jo);
222 return rk ^ prk;
226 aes_schedule_round(__m128i rk, __m128i prk, __m128i *rcon_rot)
234 rk = _mm_shuffle_epi32(rk, 0xff);
235 rk = _mm_alignr_epi8(rk, rk, 1);
237 return aes_schedule_low_round(rk, prk);
291 __m128i rk;
293 rk = prkhi;
294 rk ^= _mm_shuffle_epi32(prkhi, 0x80);
295 rk ^= _mm_shuffle_epi32(prk, 0xfe);
297 return rk;
301 aes_schedule_192_smearhi(__m128i rk)
303 return (__m128i)_mm_movehl_ps((__m128)rk, _mm_setzero_ps());
311 __m128i rk; /* round key */
317 rk = aes_schedule_transform(_mm_loadu_epi8(key), ipt);
318 storeroundkey(rk32, rk);
324 rk = aes_schedule_round(rk, rk, &rcon_rot);
327 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4].m);
335 prk = rk;
336 rk = aes_schedule_transform(_mm_loadu_epi8(key + 8), ipt);
337 prkhi = aes_schedule_192_smearhi(rk);
339 prk = aes_schedule_round(rk, prk, &rcon_rot);
340 rk = _mm_alignr_epi8(prk, prkhi, 8);
342 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4].m);
345 rk = aes_schedule_192_smear(prkhi, prk);
346 prkhi = aes_schedule_192_smearhi(rk);
348 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4].m);
351 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
355 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4].m);
358 rk = aes_schedule_192_smear(prkhi, prk);
359 prkhi = aes_schedule_192_smearhi(rk);
366 prk = rk;
367 rk = aes_schedule_transform(_mm_loadu_epi8(key + 16), ipt);
369 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4].m);
372 pprk = rk;
375 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
378 mrk = aes_schedule_mangle_enc(rk, sr[i-- % 4].m);
383 rk = _mm_shuffle_epi32(rk, 0xff);
384 rk = aes_schedule_low_round(rk, pprk);
391 storeroundkey(rk32, aes_schedule_mangle_last_enc(rk, sr[i-- % 4].m));
400 __m128i rk; /* round key */
408 rk = aes_schedule_transform(ork, ipt);
419 rk = aes_schedule_round(rk, rk, &rcon_rot);
422 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4].m);
430 prk = rk;
431 rk = aes_schedule_transform(_mm_loadu_epi8(key + 8), ipt);
432 prkhi = aes_schedule_192_smearhi(rk);
434 prk = aes_schedule_round(rk, prk, &rcon_rot);
435 rk = _mm_alignr_epi8(prk, prkhi, 8);
437 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4].m);
440 rk = aes_schedule_192_smear(prkhi, prk);
441 prkhi = aes_schedule_192_smearhi(rk);
443 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4].m);
446 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
450 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4].m);
453 rk = aes_schedule_192_smear(prkhi, prk);
454 prkhi = aes_schedule_192_smearhi(rk);
461 prk = rk;
462 rk = aes_schedule_transform(_mm_loadu_epi8(key + 16), ipt);
464 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4].m);
467 pprk = rk;
470 rk = prk = aes_schedule_round(rk, prk, &rcon_rot);
473 mrk = aes_schedule_mangle_dec(rk, sr[i-- % 4].m);
478 rk = _mm_shuffle_epi32(rk, 0xff);
479 rk = aes_schedule_low_round(rk, pprk);
486 storeroundkey(rk32, aes_schedule_mangle_last_dec(rk));