HomeSort by: relevance | last modified time | path
    Searched refs:Xi (Results 1 - 25 of 146) sorted by relevancy

1 2 3 4 5 6

  /src/crypto/external/apache2/openssl/dist/providers/implementations/ciphers/
cipher_aes_gcm_hw_armv8.inc 16 const void *key, unsigned char ivec[16], u64 *Xi)
24 unroll8_eor3_aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
26 aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
31 unroll8_eor3_aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
33 aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
38 unroll8_eor3_aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
40 aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
48 const void *key, unsigned char ivec[16], u64 *Xi)
56 unroll8_eor3_aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
58 aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key)
    [all...]
cipher_aes_gcm_hw_vaes_avx512.inc 42 void ossl_gcm_gmult_avx512(u64 Xi[2], const void *gcm128ctx);
68 gcmctx->Xi.u[0] = 0; /* AAD hash */
69 gcmctx->Xi.u[1] = 0;
112 gcmctx->Xi.c[15 - ares] ^= *(aad++);
118 ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
137 gcmctx->Xi.c[15 - i] ^= aad[i];
159 ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
184 memcpy(tag, gcmctx->Xi.c,
185 ctx->taglen <= sizeof(gcmctx->Xi.c) ? ctx->taglen :
186 sizeof(gcmctx->Xi.c))
    [all...]
  /src/crypto/external/apache2/openssl/dist/crypto/modes/asm/
ghash-x86_64.pl 138 $Xi="%rdi";
262 movzb 15($Xi),$Zlo
265 &loop ($Xi);
267 mov $Zlo,8($Xi)
268 mov $Zhi,($Xi)
345 &mov ($Zlo,"8($Xi)");
346 &mov ($Zhi,"0($Xi)");
356 &mov ("($Xi)",$Zhi);
357 &mov ("8($Xi)","%rdx");
392 &mov ($dat,"$j($Xi)") if (--$j%4==0)
    [all...]
ghash-riscv64-zvkg.pl 107 # void gcm_gmult_rv64i_zvkg(u64 Xi[2], const u128 Htable[16]);
109 # input: Xi: current hash value
111 # output: Xi: next hash value Xi
113 my ($Xi,$Htable) = ("a0","a1");
123 @{[vle32_v $VD, $Xi]}
125 @{[vse32_v $VD, $Xi]}
132 # void gcm_ghash_rv64i_zvkg(u64 Xi[2], const u128 Htable[16],
135 # input: Xi: current hash value
139 # output: Xi: Xi+1 (next hash value Xi
    [all...]
ghash-x86.pl 273 &mov ($inp,&wparam(0)); # load Xi
276 &mov ($Zhh,&DWP(0,$inp)); # load Xi[16]
283 &mov (&DWP(0,"esp"),$Zhh); # copy Xi[16] on stack
306 &mov ($Zll,&wparam(0)); # load Xi
313 &mov ($Zhh,&DWP(0,$Zll)); # load Xi[16]
344 &mov ($inp,&wparam(0)); # load Xi
434 &mov ($inp,&wparam(0)); # load Xi
446 &mov ($inp,&wparam(0)); # load Xi
458 &mov ($Zhh,&wparam(0)); # load Xi
472 &mov ($Zll,&DWP(12,$Zhh)); # load Xi[16
    [all...]
ghash-s390x.pl 67 $Xi="%r2"; # argument block
76 $xi="%r10";
96 aghi $Xi,-1
101 lg $Zlo,8+1($Xi) # Xi
119 # Chaining Value (XI) 128byte
121 lmg %r0,%r1,0($Xi)
130 stmg %r0,%r1,0($Xi)
141 aghi $Xi,-1
146 lg $Zlo,8+1($Xi) # X
    [all...]
ghash-riscv64.pl 131 # void gcm_gmult_rv64i_zbc(u64 Xi[2], const u128 Htable[16]);
132 # void gcm_gmult_rv64i_zbc__zbkb(u64 Xi[2], const u128 Htable[16]);
134 # input: Xi: current hash value
136 # output: Xi: next hash value Xi
138 # Compute GMULT (Xi*H mod f) using the Zbc (clmul) and Zbb (basic bit manip)
147 my ($Xi,$Htable,$x0,$x1,$y0,$y1) = ("a0","a1","a4","a5","a6","a7");
155 # Load Xi and bit-reverse it
156 ld $x0, 0($Xi)
157 ld $x1, 8($Xi)
    [all...]
aesni-gcm-x86_64.pl 85 $Z0,$Z1,$Z2,$Z3,$Xi) = map("%xmm$_",(0..8));
143 vpxor $Z0,$Xi,$Xi # modulo-scheduled
154 vpxor 16+8(%rsp),$Xi,$Xi # modulo-scheduled [vpxor $Z3,$Xi,$Xi]
218 vpxor 0x70+8(%rsp),$Xi,$Xi # accumulate I[0]
232 vpclmulqdq \$0x10,$Hkey,$Xi,$Z
    [all...]
ghash-riscv64-zvkb-zvbc.pl 118 # void gcm_gmult_rv64i_zvkb_zvbc(u64 Xi[2], const u128 Htable[16]);
120 # input: Xi: current hash value
122 # output: Xi: next hash value Xi = (Xi * H mod f)
124 my ($Xi,$Htable,$TMP0,$TMP1,$TMP2,$TMP3,$TMP4) = ("a0","a1","t0","t1","t2","t3","t4");
141 add $Xi, $Xi, 8
146 @{[vlse64_v $V5, $Xi, $TMP4]} # vlse64.v v5, (a0), t4
229 @{[vsse64_v $V2, $Xi, $TMP4]} # vsse64.v v2, (a0), t
    [all...]
ghash-c64xplus.pl 66 || MV $Xip,${xip} ; reassign Xi
70 || LDBU *++${xip}[15],$x1 ; Xi[15]
72 || LDBU *--${xip},$x0 ; Xi[14]
94 || MV $Xip,${xip} ; reassign Xi
113 || [B0] XOR $H0x,$Z0,$Z0 ; Xi^=inp
118 || [B0] SHRU $Z1,24,$xia ; Xi[15], avoid cross-path stall
120 || [B0] SHRU $Z1,16,$x0 ; Xi[14]
125 || [B0] MV $Z0,$xia ; Xi[15], avoid cross-path stall
127 || [B0] SHRU $Z0,8,$x0 ; Xi[14]
163 XORMPY $H0,$xia,$H0x ; 0 ; H·(Xi[i]<<1
    [all...]
  /src/crypto/external/bsd/openssl/dist/crypto/modes/asm/
ghash-x86_64.pl 138 $Xi="%rdi";
262 movzb 15($Xi),$Zlo
265 &loop ($Xi);
267 mov $Zlo,8($Xi)
268 mov $Zhi,($Xi)
345 &mov ($Zlo,"8($Xi)");
346 &mov ($Zhi,"0($Xi)");
356 &mov ("($Xi)",$Zhi);
357 &mov ("8($Xi)","%rdx");
392 &mov ($dat,"$j($Xi)") if (--$j%4==0)
    [all...]
ghash-x86.pl 273 &mov ($inp,&wparam(0)); # load Xi
276 &mov ($Zhh,&DWP(0,$inp)); # load Xi[16]
283 &mov (&DWP(0,"esp"),$Zhh); # copy Xi[16] on stack
306 &mov ($Zll,&wparam(0)); # load Xi
313 &mov ($Zhh,&DWP(0,$Zll)); # load Xi[16]
344 &mov ($inp,&wparam(0)); # load Xi
434 &mov ($inp,&wparam(0)); # load Xi
446 &mov ($inp,&wparam(0)); # load Xi
458 &mov ($Zhh,&wparam(0)); # load Xi
472 &mov ($Zll,&DWP(12,$Zhh)); # load Xi[16
    [all...]
ghash-s390x.pl 67 $Xi="%r2"; # argument block
76 $xi="%r10";
103 la %r1,0($Xi) # H lies right after Xi in gcm128_context
115 aghi $Xi,-1
120 lg $Zlo,8+1($Xi) # Xi
136 la %r1,0($Xi) # H lies right after Xi in gcm128_context
149 aghi $Xi,-
    [all...]
aesni-gcm-x86_64.pl 83 $Z0,$Z1,$Z2,$Z3,$Xi) = map("%xmm$_",(0..8));
141 vpxor $Z0,$Xi,$Xi # modulo-scheduled
152 vpxor 16+8(%rsp),$Xi,$Xi # modulo-scheduled [vpxor $Z3,$Xi,$Xi]
216 vpxor 0x70+8(%rsp),$Xi,$Xi # accumulate I[0]
230 vpclmulqdq \$0x10,$Hkey,$Xi,$Z
    [all...]
  /src/crypto/external/bsd/openssl.old/dist/crypto/modes/asm/
ghash-x86_64.pl 136 $Xi="%rdi";
259 movzb 15($Xi),$Zlo
262 &loop ($Xi);
264 mov $Zlo,8($Xi)
265 mov $Zhi,($Xi)
341 &mov ($Zlo,"8($Xi)");
342 &mov ($Zhi,"0($Xi)");
352 &mov ("($Xi)",$Zhi);
353 &mov ("8($Xi)","%rdx");
388 &mov ($dat,"$j($Xi)") if (--$j%4==0)
    [all...]
ghash-x86.pl 274 &mov ($inp,&wparam(0)); # load Xi
277 &mov ($Zhh,&DWP(0,$inp)); # load Xi[16]
284 &mov (&DWP(0,"esp"),$Zhh); # copy Xi[16] on stack
307 &mov ($Zll,&wparam(0)); # load Xi
314 &mov ($Zhh,&DWP(0,$Zll)); # load Xi[16]
345 &mov ($inp,&wparam(0)); # load Xi
435 &mov ($inp,&wparam(0)); # load Xi
447 &mov ($inp,&wparam(0)); # load Xi
459 &mov ($Zhh,&wparam(0)); # load Xi
473 &mov ($Zll,&DWP(12,$Zhh)); # load Xi[16
    [all...]
ghash-s390x.pl 65 $Xi="%r2"; # argument block
74 $xi="%r10";
101 la %r1,0($Xi) # H lies right after Xi in gcm128_context
113 aghi $Xi,-1
118 lg $Zlo,8+1($Xi) # Xi
134 la %r1,0($Xi) # H lies right after Xi in gcm128_context
147 aghi $Xi,-
    [all...]
aesni-gcm-x86_64.pl 81 $Z0,$Z1,$Z2,$Z3,$Xi) = map("%xmm$_",(0..8));
139 vpxor $Z0,$Xi,$Xi # modulo-scheduled
150 vpxor 16+8(%rsp),$Xi,$Xi # modulo-scheduled [vpxor $Z3,$Xi,$Xi]
214 vpxor 0x70+8(%rsp),$Xi,$Xi # accumulate I[0]
228 vpclmulqdq \$0x10,$Hkey,$Xi,$Z
    [all...]
  /src/crypto/external/apache2/openssl/dist/crypto/sha/asm/
sha1-mb-x86_64.pl 101 @Xi=map("%xmm$_",(10..14));
107 @Xi=map("%xmm$_",(0..4));
140 movd (@ptr[0]),@Xi[0]
142 movd (@ptr[1]),@Xi[2] # borrow @Xi[2]
144 movd (@ptr[2]),@Xi[3] # borrow @Xi[3]
146 movd (@ptr[3]),@Xi[4] # borrow @Xi[4]
148 punpckldq @Xi[3],@Xi[0
    [all...]
  /src/crypto/external/bsd/openssl/dist/crypto/sha/asm/
sha1-mb-x86_64.pl 101 @Xi=map("%xmm$_",(10..14));
107 @Xi=map("%xmm$_",(0..4));
140 movd (@ptr[0]),@Xi[0]
142 movd (@ptr[1]),@Xi[2] # borrow @Xi[2]
144 movd (@ptr[2]),@Xi[3] # borrow @Xi[3]
146 movd (@ptr[3]),@Xi[4] # borrow @Xi[4]
148 punpckldq @Xi[3],@Xi[0
    [all...]
  /src/crypto/external/bsd/openssl.old/dist/crypto/sha/asm/
sha1-mb-x86_64.pl 93 @Xi=map("%xmm$_",(10..14));
99 @Xi=map("%xmm$_",(0..4));
132 movd (@ptr[0]),@Xi[0]
134 movd (@ptr[1]),@Xi[2] # borrow @Xi[2]
136 movd (@ptr[2]),@Xi[3] # borrow @Xi[3]
138 movd (@ptr[3]),@Xi[4] # borrow @Xi[4]
140 punpckldq @Xi[3],@Xi[0
    [all...]
  /src/crypto/external/apache2/openssl/dist/crypto/modes/
gcm128.c 168 static void gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16])
175 nlo = ((const u8 *)Xi)[15];
197 nlo = ((const u8 *)Xi)[cnt];
215 Xi[0] = BSWAP8(Z.hi);
216 Xi[1] = BSWAP8(Z.lo);
218 u8 *p = (u8 *)Xi;
230 Xi[0] = Z.hi;
231 Xi[1] = Z.lo;
246 static void gcm_ghash_4bit(u64 Xi[2], const u128 Htable[16],
256 nlo = ((const u8 *)Xi)[15]
    [all...]
  /src/crypto/external/bsd/openssl/dist/providers/implementations/ciphers/
cipher_aes_gcm_hw_armv8.inc 16 const void *key, unsigned char ivec[16], u64 *Xi)
25 aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
28 aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
31 aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
38 const void *key, unsigned char ivec[16], u64 *Xi)
47 aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
50 aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
53 aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
  /src/crypto/external/bsd/openssl/dist/crypto/modes/
gcm128.c 104 static void gcm_gmult_8bit(u64 Xi[2], const u128 Htable[256])
107 const u8 *xi = (const u8 *)Xi + 15; local
108 size_t rem, n = *xi;
181 if ((u8 *)Xi == xi)
184 n = *(--xi);
197 Xi[0] = BSWAP8(Z.hi);
198 Xi[1] = BSWAP8(Z.lo);
200 u8 *p = (u8 *)Xi;
567 const long *xi = (const long *)Xi; local
    [all...]
  /src/crypto/external/bsd/openssl.old/dist/crypto/modes/
gcm128.c 102 static void gcm_gmult_8bit(u64 Xi[2], const u128 Htable[256])
105 const u8 *xi = (const u8 *)Xi + 15; local
106 size_t rem, n = *xi;
182 if ((u8 *)Xi == xi)
185 n = *(--xi);
198 Xi[0] = BSWAP8(Z.hi);
199 Xi[1] = BSWAP8(Z.lo);
201 u8 *p = (u8 *)Xi;
577 const long *xi = (const long *)Xi; local
    [all...]

Completed in 76 milliseconds

1 2 3 4 5 6