HomeSort by: relevance | last modified time | path
    Searched refs:Htbl (Results 1 - 25 of 33) sorted by relevancy

1 2

  /src/crypto/external/apache2/openssl/dist/crypto/modes/asm/
ghash-s390x.pl 68 $Htbl="%r3";
123 lmg %r0,%r1,8*16($Htbl)
165 lg $Zlo,8($nlo,$Htbl)
166 lg $Zhi,0($nlo,$Htbl)
177 xg $Zlo,8($nhi,$Htbl)
178 xg $Zhi,0($nhi,$Htbl)
189 xg $Zlo,8($nlo,$Htbl)
191 xg $Zhi,0($nlo,$Htbl)
203 xg $Zlo,8($nhi,$Htbl)
204 xg $Zhi,0($nhi,$Htbl)
    [all...]
ghash-x86.pl 147 $Htbl = "esi";
158 &mov ($Zhh,&DWP(4,$Htbl,$Zll));
159 &mov ($Zhl,&DWP(0,$Htbl,$Zll));
160 &mov ($Zlh,&DWP(12,$Htbl,$Zll));
161 &mov ($Zll,&DWP(8,$Htbl,$Zll));
190 &xor ($Zll,&DWP(8,$Htbl,$rem));
191 &xor ($Zlh,&DWP(12,$Htbl,$rem));
192 &xor ($Zhl,&DWP(0,$Htbl,$rem));
193 &xor ($Zhh,&DWP(4,$Htbl,$rem));
222 &xor ($Zll,&DWP(8,$Htbl,$rem))
    [all...]
ghash-x86_64.pl 139 $Htbl="%rsi";
170 mov 8($Htbl,$nlo),$Zlo
171 mov ($Htbl,$nlo),$Zhi
183 xor 8($Htbl,$nhi),$Zlo
185 xor ($Htbl,$nhi),$Zhi
198 xor 8($Htbl,$nlo),$Zlo
200 xor ($Htbl,$nlo),$Zhi
213 xor 8($Htbl,$nlo),$Zlo
215 xor ($Htbl,$nlo),$Zhi
225 xor 8($Htbl,$nhi),$Zl
    [all...]
ghash-armv4.pl 99 $Htbl="r1";
204 add $Zhh,$Htbl,$nlo,lsl#4
205 ldmia $Zhh,{$Zll-$Zhh} @ load Htbl[nlo]
206 add $Thh,$Htbl,$nhi
210 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nhi]
227 add $Thh,$Htbl,$nlo,lsl#4
231 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nlo]
245 add $Thh,$Htbl,$nhi
249 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nhi]
308 add $Zhh,$Htbl,$nlo,lsl#
    [all...]
ghashp8-ppc.pl 69 my ($Xip,$Htbl,$inp,$len)=map("r$_",(3..6)); # argument block
224 lvx_u $Hl,r8,$Htbl # load pre-computed table
226 lvx_u $H, r9,$Htbl
228 lvx_u $Hh,r10,$Htbl
230 lvx_u $xC2,0,$Htbl
274 lvx_u $Hl,r8,$Htbl # load pre-computed table
277 lvx_u $H, r9,$Htbl
280 lvx_u $Hh,r10,$Htbl
283 lvx_u $xC2,0,$Htbl
297 lvx_u $H2l,r8,$Htbl # load H^
    [all...]
ghash-alpha.pl 23 # Htbl and Z.hi updates for 8 cycles per byte, measured performance is
42 $Htbl="a1";
65 addq $nlo,$Htbl,$nlo
67 addq $nhi,$Htbl,$nhi
90 addq $nlo,$Htbl,$nlo
92 addq $nhi,$Htbl,$nhi
124 addq $nlo,$Htbl,$nlo
125 addq $nhi,$Htbl,$nhi
165 addq $nlo,$Htbl,$nlo
166 addq $nhi,$Htbl,$nh
    [all...]
ghash-ia64.pl 77 (p18) add Hi[1]=Htbl,Hi[1] };;
91 (p17) add Hi[0]=Htbl,Hi[0]
103 Xi=r24; Htbl=r25;
118 { .mii; $ADDP Htbl=8,in1 // &Htbl[0].lo
138 { .mii; add Hi[1]=Htbl,Hi[1] // &Htbl[nlo].lo
162 $Htbl="in1";
197 { .mfi; $ADDP r8=0+0,$Htbl
198 $ADDP r9=0+8,$Htbl }
    [all...]
ghash-sparcv9.pl 78 $Htbl="%i1";
112 add $Htbl,8,$Htblo
123 ldx [$Htbl+$nlo],$Zhi
129 ldx [$Htbl+$nhi],$Thi
150 ldx [$Htbl+$nlo],$Thi
166 ldx [$Htbl+$nhi],$Thi
184 ldx [$Htbl+$nlo],$Thi
202 ldx [$Htbl+$nhi],$Thi
223 ldx [$Htbl+$nhi],$Thi
250 add $Htbl,8,$Htbl
    [all...]
ghash-parisc.pl 64 $Htbl="%r25";
67 $Hhh=$Htbl; # variables
136 ldo 8($Htbl),$Hll
227 ldo 12($Htbl),$Hll
228 ldo 8($Htbl),$Hlh
229 ldo 4($Htbl),$Hhl
392 ldo 8($Htbl),$Hll
494 ldo 12($Htbl),$Hll
495 ldo 8($Htbl),$Hlh
496 ldo 4($Htbl),$Hh
    [all...]
  /src/crypto/external/bsd/openssl/dist/crypto/modes/asm/
ghash-s390x.pl 68 $Htbl="%r3";
173 lg $Zlo,8($nlo,$Htbl)
174 lg $Zhi,0($nlo,$Htbl)
185 xg $Zlo,8($nhi,$Htbl)
186 xg $Zhi,0($nhi,$Htbl)
197 xg $Zlo,8($nlo,$Htbl)
199 xg $Zhi,0($nlo,$Htbl)
211 xg $Zlo,8($nhi,$Htbl)
212 xg $Zhi,0($nhi,$Htbl)
223 xg $Zlo,8($nlo,$Htbl)
    [all...]
ghash-x86.pl 147 $Htbl = "esi";
158 &mov ($Zhh,&DWP(4,$Htbl,$Zll));
159 &mov ($Zhl,&DWP(0,$Htbl,$Zll));
160 &mov ($Zlh,&DWP(12,$Htbl,$Zll));
161 &mov ($Zll,&DWP(8,$Htbl,$Zll));
190 &xor ($Zll,&DWP(8,$Htbl,$rem));
191 &xor ($Zlh,&DWP(12,$Htbl,$rem));
192 &xor ($Zhl,&DWP(0,$Htbl,$rem));
193 &xor ($Zhh,&DWP(4,$Htbl,$rem));
222 &xor ($Zll,&DWP(8,$Htbl,$rem))
    [all...]
ghash-x86_64.pl 139 $Htbl="%rsi";
170 mov 8($Htbl,$nlo),$Zlo
171 mov ($Htbl,$nlo),$Zhi
183 xor 8($Htbl,$nhi),$Zlo
185 xor ($Htbl,$nhi),$Zhi
198 xor 8($Htbl,$nlo),$Zlo
200 xor ($Htbl,$nlo),$Zhi
213 xor 8($Htbl,$nlo),$Zlo
215 xor ($Htbl,$nlo),$Zhi
225 xor 8($Htbl,$nhi),$Zl
    [all...]
ghash-armv4.pl 99 $Htbl="r1";
204 add $Zhh,$Htbl,$nlo,lsl#4
205 ldmia $Zhh,{$Zll-$Zhh} @ load Htbl[nlo]
206 add $Thh,$Htbl,$nhi
210 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nhi]
227 add $Thh,$Htbl,$nlo,lsl#4
231 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nlo]
245 add $Thh,$Htbl,$nhi
249 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nhi]
308 add $Zhh,$Htbl,$nlo,lsl#
    [all...]
ghashp8-ppc.pl 69 my ($Xip,$Htbl,$inp,$len)=map("r$_",(3..6)); # argument block
224 lvx_u $Hl,r8,$Htbl # load pre-computed table
226 lvx_u $H, r9,$Htbl
228 lvx_u $Hh,r10,$Htbl
230 lvx_u $xC2,0,$Htbl
274 lvx_u $Hl,r8,$Htbl # load pre-computed table
277 lvx_u $H, r9,$Htbl
280 lvx_u $Hh,r10,$Htbl
283 lvx_u $xC2,0,$Htbl
297 lvx_u $H2l,r8,$Htbl # load H^
    [all...]
ghash-alpha.pl 23 # Htbl and Z.hi updates for 8 cycles per byte, measured performance is
42 $Htbl="a1";
65 addq $nlo,$Htbl,$nlo
67 addq $nhi,$Htbl,$nhi
90 addq $nlo,$Htbl,$nlo
92 addq $nhi,$Htbl,$nhi
124 addq $nlo,$Htbl,$nlo
125 addq $nhi,$Htbl,$nhi
165 addq $nlo,$Htbl,$nlo
166 addq $nhi,$Htbl,$nh
    [all...]
ghash-ia64.pl 77 (p18) add Hi[1]=Htbl,Hi[1] };;
91 (p17) add Hi[0]=Htbl,Hi[0]
103 Xi=r24; Htbl=r25;
118 { .mii; $ADDP Htbl=8,in1 // &Htbl[0].lo
138 { .mii; add Hi[1]=Htbl,Hi[1] // &Htbl[nlo].lo
162 $Htbl="in1";
197 { .mfi; $ADDP r8=0+0,$Htbl
198 $ADDP r9=0+8,$Htbl }
    [all...]
ghash-sparcv9.pl 78 $Htbl="%i1";
112 add $Htbl,8,$Htblo
123 ldx [$Htbl+$nlo],$Zhi
129 ldx [$Htbl+$nhi],$Thi
150 ldx [$Htbl+$nlo],$Thi
166 ldx [$Htbl+$nhi],$Thi
184 ldx [$Htbl+$nlo],$Thi
202 ldx [$Htbl+$nhi],$Thi
223 ldx [$Htbl+$nhi],$Thi
250 add $Htbl,8,$Htbl
    [all...]
  /src/crypto/external/bsd/openssl.old/dist/crypto/modes/asm/
ghash-s390x.pl 66 $Htbl="%r3";
171 lg $Zlo,8($nlo,$Htbl)
172 lg $Zhi,0($nlo,$Htbl)
183 xg $Zlo,8($nhi,$Htbl)
184 xg $Zhi,0($nhi,$Htbl)
195 xg $Zlo,8($nlo,$Htbl)
197 xg $Zhi,0($nlo,$Htbl)
209 xg $Zlo,8($nhi,$Htbl)
210 xg $Zhi,0($nhi,$Htbl)
221 xg $Zlo,8($nlo,$Htbl)
    [all...]
ghash-x86.pl 148 $Htbl = "esi";
159 &mov ($Zhh,&DWP(4,$Htbl,$Zll));
160 &mov ($Zhl,&DWP(0,$Htbl,$Zll));
161 &mov ($Zlh,&DWP(12,$Htbl,$Zll));
162 &mov ($Zll,&DWP(8,$Htbl,$Zll));
191 &xor ($Zll,&DWP(8,$Htbl,$rem));
192 &xor ($Zlh,&DWP(12,$Htbl,$rem));
193 &xor ($Zhl,&DWP(0,$Htbl,$rem));
194 &xor ($Zhh,&DWP(4,$Htbl,$rem));
223 &xor ($Zll,&DWP(8,$Htbl,$rem))
    [all...]
ghash-x86_64.pl 137 $Htbl="%rsi";
168 mov 8($Htbl,$nlo),$Zlo
169 mov ($Htbl,$nlo),$Zhi
181 xor 8($Htbl,$nhi),$Zlo
183 xor ($Htbl,$nhi),$Zhi
196 xor 8($Htbl,$nlo),$Zlo
198 xor ($Htbl,$nlo),$Zhi
211 xor 8($Htbl,$nlo),$Zlo
213 xor ($Htbl,$nlo),$Zhi
223 xor 8($Htbl,$nhi),$Zl
    [all...]
ghash-armv4.pl 97 $Htbl="r1";
201 add $Zhh,$Htbl,$nlo,lsl#4
202 ldmia $Zhh,{$Zll-$Zhh} @ load Htbl[nlo]
203 add $Thh,$Htbl,$nhi
207 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nhi]
224 add $Thh,$Htbl,$nlo,lsl#4
228 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nlo]
242 add $Thh,$Htbl,$nhi
246 ldmia $Thh,{$Tll-$Thh} @ load Htbl[nhi]
305 add $Zhh,$Htbl,$nlo,lsl#
    [all...]
ghashp8-ppc.pl 66 my ($Xip,$Htbl,$inp,$len)=map("r$_",(3..6)); # argument block
221 lvx_u $Hl,r8,$Htbl # load pre-computed table
223 lvx_u $H, r9,$Htbl
225 lvx_u $Hh,r10,$Htbl
227 lvx_u $xC2,0,$Htbl
271 lvx_u $Hl,r8,$Htbl # load pre-computed table
274 lvx_u $H, r9,$Htbl
277 lvx_u $Hh,r10,$Htbl
280 lvx_u $xC2,0,$Htbl
294 lvx_u $H2l,r8,$Htbl # load H^
    [all...]
ghash-alpha.pl 23 # Htbl and Z.hi updates for 8 cycles per byte, measured performance is
42 $Htbl="a1";
65 addq $nlo,$Htbl,$nlo
67 addq $nhi,$Htbl,$nhi
90 addq $nlo,$Htbl,$nlo
92 addq $nhi,$Htbl,$nhi
124 addq $nlo,$Htbl,$nlo
125 addq $nhi,$Htbl,$nhi
165 addq $nlo,$Htbl,$nlo
166 addq $nhi,$Htbl,$nh
    [all...]
ghash-ia64.pl 77 (p18) add Hi[1]=Htbl,Hi[1] };;
91 (p17) add Hi[0]=Htbl,Hi[0]
103 Xi=r24; Htbl=r25;
118 { .mii; $ADDP Htbl=8,in1 // &Htbl[0].lo
138 { .mii; add Hi[1]=Htbl,Hi[1] // &Htbl[nlo].lo
162 $Htbl="in1";
197 { .mfi; $ADDP r8=0+0,$Htbl
198 $ADDP r9=0+8,$Htbl }
    [all...]
ghash-sparcv9.pl 79 $Htbl="%i1";
110 add $Htbl,8,$Htblo
121 ldx [$Htbl+$nlo],$Zhi
127 ldx [$Htbl+$nhi],$Thi
148 ldx [$Htbl+$nlo],$Thi
164 ldx [$Htbl+$nhi],$Thi
182 ldx [$Htbl+$nlo],$Thi
200 ldx [$Htbl+$nhi],$Thi
221 ldx [$Htbl+$nhi],$Thi
248 add $Htbl,8,$Htbl
    [all...]

Completed in 39 milliseconds

1 2