HomeSort by: relevance | last modified time | path
    Searched refs:H2 (Results 1 - 25 of 47) sorted by relevancy

1 2

  /src/crypto/external/apache2/openssl/dist/crypto/poly1305/asm/
poly1305-x86_64.pl 105 my ($h0,$h1,$h2)=("%r14","%rbx","%rbp");
108 # input: copy of $r1 in %rax, $h0-$h2, $r0-$r1
109 # output: $h0-$h2 *= $r0-$r1
127 mov $h2,$h1 # borrow $h1
131 imulq $s1,$h1 # h2*s1
136 imulq $r0,$h2 # h2*r0
139 adc $h2,$d3
142 mov $d3,$h2
144 and \$3,$h2
    [all...]
poly1305-s390x.pl 128 my ($d0hi,$d0lo,$d1hi,$d1lo,$t0,$h0,$t1,$h1,$h2) = map("%r$_",(6..14));
143 lg ($h2,"16($ctx)");
169 alcgr ($h2,$padbit);
180 lgr ($d1lo,$h2);
187 msgr ($d1lo,$s1); # h2*s1
188 msgr ($h2,$r0); # h2*r0
194 alcgr ($h2,$t1);
197 ngr ($h0,$h2);
198 srlg ($t0,$h2,2)
    [all...]
poly1305-armv4.pl 177 my ($h0,$h1,$h2,$h3,$h4,$r0,$r1,$r2,$r3)=map("r$_",(4..12));
238 adcs $h2,$h2,r3
265 adcs $h2,$h2,r2
277 umlal r2,r3,$h2,$s3
280 umlal r0,r1,$h2,$s2
287 adc lr,r3,#0 @ future $h2
295 umlal r0,r1,$h2,$r0
296 umlal r2,r3,$h2,$r
    [all...]
poly1305-c64xplus.pl 32 ($H0,$H1,$H2,$H3,$H4,$H4a)=("A8","B8","A10","B10","B2",$LEN);
69 STDW B9:B8,*${CTXA}[1] ; initialize h3:h2
123 LDDW *${CTXA}[1],B29:B28 ; load h3:h2, B28 is h2
151 || ADDU $D2,B28,$D2:$H2 ; h2+=inp[2]
161 || ADDU B25,$D2:$H2,$D2:$H2 ; ADDU $D1,$D2:$H2,$D2:$H2
    [all...]
poly1305-ppc.pl 103 my ($h0,$h1,$h2,$d0,$d1,$d2, $r0,$r1,$s1, $t0,$t1) = map("r$_",(7..12,27..31));
173 ld $h2,16($ctx)
207 adde $h2,$h2,$padbit
224 mulld $t0,$h2,$s1 # h2*5*r1
225 mulld $t1,$h2,$r0 # h2*r0
230 and $h2,$d2,$mask
235 addze $h2,$h
    [all...]
  /src/crypto/external/bsd/openssl/dist/crypto/poly1305/asm/
poly1305-x86_64.pl 105 my ($h0,$h1,$h2)=("%r14","%rbx","%rbp");
108 # input: copy of $r1 in %rax, $h0-$h2, $r0-$r1
109 # output: $h0-$h2 *= $r0-$r1
127 mov $h2,$h1 # borrow $h1
131 imulq $s1,$h1 # h2*s1
136 imulq $r0,$h2 # h2*r0
139 adc $h2,$d3
142 mov $d3,$h2
144 and \$3,$h2
    [all...]
poly1305-s390x.pl 128 my ($d0hi,$d0lo,$d1hi,$d1lo,$t0,$h0,$t1,$h1,$h2) = map("%r$_",(6..14));
143 lg ($h2,"16($ctx)");
169 alcgr ($h2,$padbit);
180 lgr ($d1lo,$h2);
187 msgr ($d1lo,$s1); # h2*s1
188 msgr ($h2,$r0); # h2*r0
194 alcgr ($h2,$t1);
197 ngr ($h0,$h2);
198 srlg ($t0,$h2,2)
    [all...]
poly1305-armv4.pl 177 my ($h0,$h1,$h2,$h3,$h4,$r0,$r1,$r2,$r3)=map("r$_",(4..12));
238 adcs $h2,$h2,r3
265 adcs $h2,$h2,r2
277 umlal r2,r3,$h2,$s3
280 umlal r0,r1,$h2,$s2
287 adc lr,r3,#0 @ future $h2
295 umlal r0,r1,$h2,$r0
296 umlal r2,r3,$h2,$r
    [all...]
poly1305-c64xplus.pl 32 ($H0,$H1,$H2,$H3,$H4,$H4a)=("A8","B8","A10","B10","B2",$LEN);
69 STDW B9:B8,*${CTXA}[1] ; initialize h3:h2
123 LDDW *${CTXA}[1],B29:B28 ; load h3:h2, B28 is h2
151 || ADDU $D2,B28,$D2:$H2 ; h2+=inp[2]
161 || ADDU B25,$D2:$H2,$D2:$H2 ; ADDU $D1,$D2:$H2,$D2:$H2
    [all...]
poly1305-ppc.pl 103 my ($h0,$h1,$h2,$d0,$d1,$d2, $r0,$r1,$s1, $t0,$t1) = map("r$_",(7..12,27..31));
173 ld $h2,16($ctx)
207 adde $h2,$h2,$padbit
224 mulld $t0,$h2,$s1 # h2*5*r1
225 mulld $t1,$h2,$r0 # h2*r0
230 and $h2,$d2,$mask
235 addze $h2,$h
    [all...]
  /src/crypto/external/bsd/openssl.old/dist/crypto/poly1305/asm/
poly1305-x86_64.pl 103 my ($h0,$h1,$h2)=("%r14","%rbx","%rbp");
106 # input: copy of $r1 in %rax, $h0-$h2, $r0-$r1
107 # output: $h0-$h2 *= $r0-$r1
125 mov $h2,$h1 # borrow $h1
129 imulq $s1,$h1 # h2*s1
134 imulq $r0,$h2 # h2*r0
137 adc $h2,$d3
140 mov $d3,$h2
142 and \$3,$h2
    [all...]
poly1305-armv4.pl 182 my ($h0,$h1,$h2,$h3,$h4,$r0,$r1,$r2,$r3)=map("r$_",(4..12));
243 adcs $h2,$h2,r3
270 adcs $h2,$h2,r2
282 umlal r2,r3,$h2,$s3
285 umlal r0,r1,$h2,$s2
292 adc lr,r3,#0 @ future $h2
300 umlal r0,r1,$h2,$r0
301 umlal r2,r3,$h2,$r
    [all...]
poly1305-c64xplus.pl 33 ($H0,$H1,$H2,$H3,$H4,$H4a)=("A8","B8","A10","B10","B2",$LEN);
70 STDW B9:B8,*${CTXA}[1] ; initialize h3:h2
124 LDDW *${CTXA}[1],B29:B28 ; load h3:h2, B28 is h2
152 || ADDU $D2,B28,$D2:$H2 ; h2+=inp[2]
162 || ADDU B25,$D2:$H2,$D2:$H2 ; ADDU $D1,$D2:$H2,$D2:$H2
    [all...]
  /src/crypto/external/apache2/openssl/dist/crypto/modes/asm/
ghashv8-armx.pl 72 my ($t0,$t1,$t2,$xC2,$H,$Hhl,$H2)=map("q$_",(8..14));
152 veor $H2,$Xl,$t2
154 vext.8 $t1,$H2,$H2,#8 @ Karatsuba pre-processing
155 veor $t1,$t1,$H2
157 vst1.64 {$Hhl-$H2},[x0],#32 @ store Htable[1..2]
165 vpmull.p64 $Xl,$H, $H2
166 vpmull.p64 $Yl,$H2,$H2
167 vpmull2.p64 $Xh,$H, $H2
    [all...]
ghashp8-ppc.pl 73 my ($Xl1,$Xm1,$Xh1,$IN1,$H2,$H2h,$H2l)=map("v$_",(13..19));
140 vsldoi $H2,$IN1,$IN1,8
141 vsldoi $H2l,$zero,$H2,8
142 vsldoi $H2h,$H2,$zero,8
146 stvx_u $H2,r9,r3
156 vpmsumd $Xm,$IN,$H2 # H.hi·H^2.lo+H.lo·H^2.hi
157 vpmsumd $Xm1,$IN1,$H2 # H^2.hi·H^2.lo+H^2.lo·H^2.hi
188 vsldoi $H2,$Xl1,$Xl1,8
191 vsldoi $H2l,$zero,$H2,8
192 vsldoi $H2h,$H2,$zero,
    [all...]
ghash-c64xplus.pl 33 ($Z0,$Z1,$Z2,$Z3, $H0, $H1, $H2, $H3,
65 LDDW *${Htable}[-2],$H3:$H2 ; H.hi
79 AND $H2,$FF000000,$H2u ; H2's upper byte
93 LDDW *${Htable}[-2],$H3:$H2 ; H.hi
107 AND $H2,$FF000000,$H2u ; H2's upper byte
167 XORMPY $H2,$xia,$H2x ; 2
  /src/crypto/external/bsd/openssl/dist/crypto/modes/asm/
ghashv8-armx.pl 72 my ($t0,$t1,$t2,$xC2,$H,$Hhl,$H2)=map("q$_",(8..14));
147 veor $H2,$Xl,$t2
149 vext.8 $t1,$H2,$H2,#8 @ Karatsuba pre-processing
150 veor $t1,$t1,$H2
152 vst1.64 {$Hhl-$H2},[x0],#32 @ store Htable[1..2]
159 vpmull.p64 $Xl,$H, $H2
160 vpmull.p64 $Yl,$H2,$H2
161 vpmull2.p64 $Xh,$H, $H2
    [all...]
ghashp8-ppc.pl 73 my ($Xl1,$Xm1,$Xh1,$IN1,$H2,$H2h,$H2l)=map("v$_",(13..19));
140 vsldoi $H2,$IN1,$IN1,8
141 vsldoi $H2l,$zero,$H2,8
142 vsldoi $H2h,$H2,$zero,8
146 stvx_u $H2,r9,r3
156 vpmsumd $Xm,$IN,$H2 # H.hi·H^2.lo+H.lo·H^2.hi
157 vpmsumd $Xm1,$IN1,$H2 # H^2.hi·H^2.lo+H^2.lo·H^2.hi
188 vsldoi $H2,$Xl1,$Xl1,8
191 vsldoi $H2l,$zero,$H2,8
192 vsldoi $H2h,$H2,$zero,
    [all...]
ghash-c64xplus.pl 33 ($Z0,$Z1,$Z2,$Z3, $H0, $H1, $H2, $H3,
65 LDDW *${Htable}[-2],$H3:$H2 ; H.hi
79 AND $H2,$FF000000,$H2u ; H2's upper byte
93 LDDW *${Htable}[-2],$H3:$H2 ; H.hi
107 AND $H2,$FF000000,$H2u ; H2's upper byte
167 XORMPY $H2,$xia,$H2x ; 2
  /src/crypto/external/bsd/openssl.old/dist/crypto/modes/asm/
ghashv8-armx.pl 68 my ($t0,$t1,$t2,$xC2,$H,$Hhl,$H2)=map("q$_",(8..14));
135 veor $H2,$Xl,$t2
137 vext.8 $t1,$H2,$H2,#8 @ Karatsuba pre-processing
138 veor $t1,$t1,$H2
140 vst1.64 {$Hhl-$H2},[x0],#32 @ store Htable[1..2]
147 vpmull.p64 $Xl,$H, $H2
148 vpmull.p64 $Yl,$H2,$H2
149 vpmull2.p64 $Xh,$H, $H2
    [all...]
ghashp8-ppc.pl 70 my ($Xl1,$Xm1,$Xh1,$IN1,$H2,$H2h,$H2l)=map("v$_",(13..19));
137 vsldoi $H2,$IN1,$IN1,8
138 vsldoi $H2l,$zero,$H2,8
139 vsldoi $H2h,$H2,$zero,8
143 stvx_u $H2,r9,r3
153 vpmsumd $Xm,$IN,$H2 # H.hi·H^2.lo+H.lo·H^2.hi
154 vpmsumd $Xm1,$IN1,$H2 # H^2.hi·H^2.lo+H^2.lo·H^2.hi
185 vsldoi $H2,$Xl1,$Xl1,8
188 vsldoi $H2l,$zero,$H2,8
189 vsldoi $H2h,$H2,$zero,
    [all...]
ghash-c64xplus.pl 34 ($Z0,$Z1,$Z2,$Z3, $H0, $H1, $H2, $H3,
66 LDDW *${Htable}[-2],$H3:$H2 ; H.hi
80 AND $H2,$FF000000,$H2u ; H2's upper byte
94 LDDW *${Htable}[-2],$H3:$H2 ; H.hi
108 AND $H2,$FF000000,$H2u ; H2's upper byte
168 XORMPY $H2,$xia,$H2x ; 2
  /src/external/ibm-public/postfix/dist/mantools/
postconf2man 42 if ($block =~ /<H2>/) {
43 $block =~ s/<H2><a[^>]+>([^<]+)<\/a><\/H2>/\n.SH \1\n/g;
  /src/crypto/external/apache2/openssl/dist/crypto/sha/asm/
sha512-riscv64-zvkb-zvknhb.pl 72 my ($H, $INP, $LEN, $KT, $H2, $INDEX_PATTERN) = ("a0", "a1", "a2", "a3", "t3", "t4");
96 addi $H2, $H, 16
101 @{[vluxei8_v $V24, $H2, $V1]}
211 @{[vsuxei8_v $V24, ($H2), $V1]}
  /src/sys/external/isc/libsodium/dist/src/libsodium/crypto_onetimeauth/poly1305/sse2/
poly1305_sse2.c 206 xmmi H0, H1, H2, H3, H4;
231 H2 = _mm_and_si128(MMASK, T5);
244 H2 = _mm_shuffle_epi32(T1, _MM_SHUFFLE(1, 1, 0, 0));
322 v02 = H2;
329 v12 = H2;
348 v22 = H2;
350 v32 = H2;
369 v42 = H2;
526 H2 = T2;
558 v02 = H2;
686 uint64_t h0, h1, h2, g0, g1, g2, c, nc; local
806 uint64_t h0, h1, h2; local
    [all...]

Completed in 39 milliseconds

1 2