HomeSort by: relevance | last modified time | path
    Searched refs:H4 (Results 1 - 25 of 33) sorted by relevancy

1 2

  /src/crypto/external/apache2/openssl/dist/crypto/poly1305/asm/
poly1305-s390x.pl 225 my ($H0, $H1, $H2, $H3, $H4) = map("%v$_",(0..4));
313 vmalof ($ACC0,$H4,$S1,$ACC0);
314 vmalof ($ACC1,$H4,$S2,$ACC1);
315 vmalof ($ACC2,$H4,$S3,$ACC2);
316 vmalof ($ACC3,$H4,$S4,$ACC3);
317 vmalof ($ACC4,$H4,$R0,$ACC4);
322 vesrlg ($H4,$ACC3,26);
326 vag ($H4,$H4,$ACC4); # h3 -> h4
    [all...]
poly1305-c64xplus.pl 32 ($H0,$H1,$H2,$H3,$H4,$H4a)=("A8","B8","A10","B10","B2",$LEN);
71 STW B8,*${CTXA}[4] ; initialize h4
142 LDW *${CTXA}[4],$H4 ; load h4
170 || ADD $PADBIT,$H4,$H4 ; h4+=padbit
173 || ADD $D3,$H4,$H4
191 MPY32 $H4,$S1,B2
    [all...]
poly1305-x86_64.pl 346 my ($H0,$H1,$H2,$H3,$H4, $T0,$T1,$T2,$T3,$T4, $D0,$D1,$D2,$D3,$D4, $MASK) =
642 vmovd $h2#d,$H4
745 vmovd $h2#d,$H4
779 vmovd 4*4($ctx),$H4
898 # d4 = h4*r0 + h3*r1 + h2*r2 + h1*r3 + h0*r4
899 # d3 = h3*r0 + h2*r1 + h1*r2 + h0*r3 + h4*5*r4
900 # d2 = h2*r0 + h1*r1 + h0*r2 + h4*5*r3 + h3*5*r4
901 # d1 = h1*r0 + h0*r1 + h4*5*r2 + h3*5*r3 + h2*5*r4
902 # d0 = h0*r0 + h4*5*r1 + h3*5*r2 + h2*5*r3 + h1*5*r4
913 vpmuludq $T4,$D4,$D4 # d4 = h4*r
    [all...]
poly1305-armv4.pl 177 my ($h0,$h1,$h2,$h3,$h4,$r0,$r1,$r2,$r3)=map("r$_",(4..12));
210 addhi $h4,$h4,#1 @ 1<<128
251 addhi $h4,$h4,#1 @ padbit
272 adc $h4,$h4,#0
274 umlal r2,r3,$h4,$s1
283 mul r0,$s2,$h4
290 mul r2,$s3,$h4
    [all...]
poly1305-ppc.pl 256 my ($h0,$h1,$h2,$h3,$h4,$t0) = map("r$_",(7..12));
266 lwz $h4,16($ctx)
275 sldi $t0,$h4,40
276 srdi $h4,$h4,24
279 addze $h2,$h4
282 ld $h4,8($ctx)
287 xor $h1,$h1,$h4
293 xor $h1,$h1,$h4
297 addze $h4,$h
    [all...]
poly1305-armv8.pl 230 my ($H0,$H1,$H2,$H3,$H4) = map("v$_.2s",(24..28));
425 fmov ${H4},x14
476 fmov ${H4},x14
567 // d4 = h0*r4 + h1*r3 + h2*r2 + h3*r1 + h4*r0
568 // d3 = h0*r3 + h1*r2 + h2*r1 + h3*r0 + h4*5*r4
569 // d2 = h0*r2 + h1*r1 + h2*r0 + h3*5*r4 + h4*5*r3
570 // d1 = h0*r1 + h1*r0 + h2*5*r4 + h3*5*r3 + h4*5*r2
571 // d0 = h0*r0 + h1*5*r4 + h2*5*r3 + h3*5*r2 + h4*5*r1
678 add $IN01_4,$IN01_4,$H4
713 add $ACC4,$ACC4,$T0.2d // h3 -> h4
    [all...]
  /src/crypto/external/bsd/openssl/dist/crypto/poly1305/asm/
poly1305-s390x.pl 225 my ($H0, $H1, $H2, $H3, $H4) = map("%v$_",(0..4));
313 vmalof ($ACC0,$H4,$S1,$ACC0);
314 vmalof ($ACC1,$H4,$S2,$ACC1);
315 vmalof ($ACC2,$H4,$S3,$ACC2);
316 vmalof ($ACC3,$H4,$S4,$ACC3);
317 vmalof ($ACC4,$H4,$R0,$ACC4);
322 vesrlg ($H4,$ACC3,26);
326 vag ($H4,$H4,$ACC4); # h3 -> h4
    [all...]
poly1305-c64xplus.pl 32 ($H0,$H1,$H2,$H3,$H4,$H4a)=("A8","B8","A10","B10","B2",$LEN);
71 STW B8,*${CTXA}[4] ; initialize h4
142 LDW *${CTXA}[4],$H4 ; load h4
170 || ADD $PADBIT,$H4,$H4 ; h4+=padbit
173 || ADD $D3,$H4,$H4
191 MPY32 $H4,$S1,B2
    [all...]
poly1305-x86_64.pl 344 my ($H0,$H1,$H2,$H3,$H4, $T0,$T1,$T2,$T3,$T4, $D0,$D1,$D2,$D3,$D4, $MASK) =
639 vmovd $h2#d,$H4
742 vmovd $h2#d,$H4
776 vmovd 4*4($ctx),$H4
895 # d4 = h4*r0 + h3*r1 + h2*r2 + h1*r3 + h0*r4
896 # d3 = h3*r0 + h2*r1 + h1*r2 + h0*r3 + h4*5*r4
897 # d2 = h2*r0 + h1*r1 + h0*r2 + h4*5*r3 + h3*5*r4
898 # d1 = h1*r0 + h0*r1 + h4*5*r2 + h3*5*r3 + h2*5*r4
899 # d0 = h0*r0 + h4*5*r1 + h3*5*r2 + h2*5*r3 + h1*5*r4
910 vpmuludq $T4,$D4,$D4 # d4 = h4*r
    [all...]
poly1305-armv4.pl 177 my ($h0,$h1,$h2,$h3,$h4,$r0,$r1,$r2,$r3)=map("r$_",(4..12));
210 addhi $h4,$h4,#1 @ 1<<128
251 addhi $h4,$h4,#1 @ padbit
272 adc $h4,$h4,#0
274 umlal r2,r3,$h4,$s1
283 mul r0,$s2,$h4
290 mul r2,$s3,$h4
    [all...]
poly1305-ppc.pl 256 my ($h0,$h1,$h2,$h3,$h4,$t0) = map("r$_",(7..12));
266 lwz $h4,16($ctx)
275 sldi $t0,$h4,40
276 srdi $h4,$h4,24
279 addze $h2,$h4
282 ld $h4,8($ctx)
287 xor $h1,$h1,$h4
293 xor $h1,$h1,$h4
297 addze $h4,$h
    [all...]
poly1305-armv8.pl 219 my ($H0,$H1,$H2,$H3,$H4) = map("v$_.2s",(24..28));
411 fmov ${H4},x14
460 fmov ${H4},x14
551 // d4 = h0*r4 + h1*r3 + h2*r2 + h3*r1 + h4*r0
552 // d3 = h0*r3 + h1*r2 + h2*r1 + h3*r0 + h4*5*r4
553 // d2 = h0*r2 + h1*r1 + h2*r0 + h3*5*r4 + h4*5*r3
554 // d1 = h0*r1 + h1*r0 + h2*5*r4 + h3*5*r3 + h4*5*r2
555 // d0 = h0*r0 + h1*5*r4 + h2*5*r3 + h3*5*r2 + h4*5*r1
662 add $IN01_4,$IN01_4,$H4
697 add $ACC4,$ACC4,$T0.2d // h3 -> h4
    [all...]
  /src/crypto/external/bsd/openssl.old/dist/crypto/poly1305/asm/
poly1305-c64xplus.pl 33 ($H0,$H1,$H2,$H3,$H4,$H4a)=("A8","B8","A10","B10","B2",$LEN);
72 STW B8,*${CTXA}[4] ; initialize h4
143 LDW *${CTXA}[4],$H4 ; load h4
171 || ADD $PADBIT,$H4,$H4 ; h4+=padbit
174 || ADD $D3,$H4,$H4
192 MPY32 $H4,$S1,B2
    [all...]
poly1305-x86_64.pl 342 my ($H0,$H1,$H2,$H3,$H4, $T0,$T1,$T2,$T3,$T4, $D0,$D1,$D2,$D3,$D4, $MASK) =
637 vmovd $h2#d,$H4
740 vmovd $h2#d,$H4
774 vmovd 4*4($ctx),$H4
893 # d4 = h4*r0 + h3*r1 + h2*r2 + h1*r3 + h0*r4
894 # d3 = h3*r0 + h2*r1 + h1*r2 + h0*r3 + h4*5*r4
895 # d2 = h2*r0 + h1*r1 + h0*r2 + h4*5*r3 + h3*5*r4
896 # d1 = h1*r0 + h0*r1 + h4*5*r2 + h3*5*r3 + h2*5*r4
897 # d0 = h0*r0 + h4*5*r1 + h3*5*r2 + h2*5*r3 + h1*5*r4
908 vpmuludq $T4,$D4,$D4 # d4 = h4*r
    [all...]
poly1305-armv4.pl 182 my ($h0,$h1,$h2,$h3,$h4,$r0,$r1,$r2,$r3)=map("r$_",(4..12));
215 addhi $h4,$h4,#1 @ 1<<128
256 addhi $h4,$h4,#1 @ padbit
277 adc $h4,$h4,#0
279 umlal r2,r3,$h4,$s1
288 mul r0,$s2,$h4
295 mul r2,$s3,$h4
    [all...]
  /src/sys/external/isc/libsodium/dist/src/libsodium/crypto_onetimeauth/poly1305/sse2/
poly1305_sse2.c 206 xmmi H0, H1, H2, H3, H4;
233 H4 = _mm_srli_epi64(T6, 40);
234 H4 = _mm_or_si128(H4, HIBIT);
246 H4 = _mm_shuffle_epi32(T2, _MM_SHUFFLE(1, 1, 0, 0));
309 T0 = H4;
314 T1 = H4;
318 T2 = H4;
324 T3 = H4;
338 T4 = H4;
    [all...]
  /src/crypto/external/apache2/openssl/dist/crypto/modes/asm/
ghashv8-armx.pl 161 my ($H3,$H34k,$H4,$H5,$H56k,$H6,$H7,$H78k,$H8) = map("q$_",(15..23));
197 veor $H4,$Yl,$t3 @ H^4
200 vext.8 $t1,$H4,$H4,#8
203 veor $t1,$t1,$H4
206 vst1.64 {$H3-$H4},[x0],#48 @ store Htable[3..5]
524 $I1,$I2,$I3,$H3,$H34,$H4,$Yl,$Ym,$Yh) = map("q$_",(4..7,15..23));
534 vld1.64 {$H3-$H4},[$Htbl] @ load twisted H^3, ..., H^4
589 vpmull.p64 $Xl,$H4,$IN @ H^4·(Xi+Ii)
591 vpmull2.p64 $Xh,$H4,$I
    [all...]
ghashp8-ppc.pl 380 $Xh3,$Xm3,$IN3,$H4l,$H4,$H4h) = map("v$_",(20..31));
434 lvx_u $H4, r9,$Htbl
491 vpmsumd $Xm,$Xh,$H4 # H^4.hi·Xi.lo+H^4.lo·Xi.hi
531 vpmsumd $Xm,$Xh,$H4 # H^4.hi·Xi.lo+H^4.lo·Xi.hi
571 vmr $H4, $H3
594 vmr $H4, $H2
608 vmr $H4, $H
  /src/crypto/external/bsd/openssl/dist/crypto/modes/asm/
ghashp8-ppc.pl 380 $Xh3,$Xm3,$IN3,$H4l,$H4,$H4h) = map("v$_",(20..31));
434 lvx_u $H4, r9,$Htbl
491 vpmsumd $Xm,$Xh,$H4 # H^4.hi·Xi.lo+H^4.lo·Xi.hi
531 vpmsumd $Xm,$Xh,$H4 # H^4.hi·Xi.lo+H^4.lo·Xi.hi
571 vmr $H4, $H3
594 vmr $H4, $H2
608 vmr $H4, $H
ghashv8-armx.pl 424 $I1,$I2,$I3,$H3,$H34,$H4,$Yl,$Ym,$Yh) = map("q$_",(4..7,15..23));
434 vld1.64 {$H3-$H4},[$Htbl] @ load twisted H^3, ..., H^4
489 vpmull.p64 $Xl,$H4,$IN @ H^4·(Xi+Ii)
491 vpmull2.p64 $Xh,$H4,$IN
544 vpmull.p64 $Xl,$H4,$IN @ H^4·(Xi+Ii)
546 vpmull2.p64 $Xh,$H4,$IN
  /src/crypto/external/bsd/openssl.old/dist/crypto/modes/asm/
ghashp8-ppc.pl 377 $Xh3,$Xm3,$IN3,$H4l,$H4,$H4h) = map("v$_",(20..31));
431 lvx_u $H4, r9,$Htbl
488 vpmsumd $Xm,$Xh,$H4 # H^4.hi·Xi.lo+H^4.lo·Xi.hi
528 vpmsumd $Xm,$Xh,$H4 # H^4.hi·Xi.lo+H^4.lo·Xi.hi
568 vmr $H4, $H3
591 vmr $H4, $H2
605 vmr $H4, $H
ghashv8-armx.pl 412 $I1,$I2,$I3,$H3,$H34,$H4,$Yl,$Ym,$Yh) = map("q$_",(4..7,15..23));
422 vld1.64 {$H3-$H4},[$Htbl] @ load twisted H^3, ..., H^4
477 vpmull.p64 $Xl,$H4,$IN @ H^4·(Xi+Ii)
479 vpmull2.p64 $Xh,$H4,$IN
532 vpmull.p64 $Xl,$H4,$IN @ H^4·(Xi+Ii)
534 vpmull2.p64 $Xh,$H4,$IN
  /src/crypto/external/bsd/heimdal/dist/tests/kdc/
check-kdc.in 61 H4=H4.$H2
67 h4=`echo "${H4}" | tr '[A-Z]' '[a-z]'`
180 ${H4} || exit 1
208 ${kadmin} add -p foo --use-defaults foo@${H4} || exit 1
209 ${kadmin} add -p foo --use-defaults foo/host.${h4}@${H4} || exit 1
274 ${kadmin} add -p cross1 --use-defaults krbtgt/${H3}@${H4} || exit 1
275 ${kadmin} add -p cross2 --use-defaults krbtgt/${H4}@${H3} || exit
    [all...]
  /src/sys/external/bsd/acpica/dist/tests/misc/
badcode.asl 142 Device (H4)
  /src/external/apache2/llvm/dist/llvm/lib/Target/AArch64/
AArch64CallingConvention.cpp 27 AArch64::H3, AArch64::H4, AArch64::H5,

Completed in 42 milliseconds

1 2