/src/lib/libc/arch/powerpc64/gen/ |
__setjmp14.S | 20 mr %r6,%r3 23 addi %r5,%r6,100 # &sigmask 30 std %r8,8(%r6) # save r8-r31 31 std %r9,16(%r6) 32 std %r10,24(%r6) 33 std %r11,32(%r6) 34 std %r12,40(%r6) 35 std %r13,48(%r6) 36 std %r14,56(%r6) 37 std %r15,64(%r6) [all...] |
__sigsetjmp14.S | 11 mr %r6,%r3 16 addi %r5,%r6,100 # &sigmask 25 std 7+i,i*8(%r6) # save r7-r31 38 mr %r6,%r4 50 or. %r3,%r6,%r6
|
/src/common/lib/libc/arch/powerpc/string/ |
memcpy.S | 76 mfspr %r6, 287 /* mfpvbr %r6 PVR = 287 */ 77 srwi %r6, %r6, 0x10 /* get version field from PVR */ 78 cmpwi %r6, 0x1 /* 601 CPU = 0x0001 */ 81 or %r6, %r3, %r4 /* see if both source and dest */ 82 andi. %r6, %r6, 3 /* are 32bit aligned */ 86 li %r6, 0 88 lbzx %r7, %r4, %r6 [all...] |
memmove.S | 60 mr %r6, %r3 /* swap src/dst */ 62 mr %r4, %r6 89 stwu %r6, 4(%r8) /* Store previous word */ 95 lwzu %r6, 4(%r4) /* Load another word */ 99 mr %r7, %r6 /* If word count -> 0, then... */ 112 lbzu %r6, 4(%r4) /* 1st byte: update addr by 4 */ 113 stbu %r6, 4(%r8) /* since we pre-adjusted by 4 */ 118 lbzu %r6, 1(%r4) /* But handle the rest by */ 119 stbu %r6, 1(%r8) /* updating addr by 1 */ 143 stwu %r6, -4(%r8) /* Store previous word * [all...] |
/src/sys/lib/libkern/arch/vax/ |
blkset.S | 38 ENTRY(__blkset,R6|R7) 41 movl 12(%ap), %r6 43 1: subl2 %r0, %r6 46 cmpl %r6, %r0 48 movc5 $0,(%r3),%r7,%r6,(%r3)
|
blkcpy.S | 34 ENTRY(__blkcpy, R6) 37 movl 12(%ap),%r6 43 subl2 %r0,%r6 47 cmpl %r6,%r0 49 movc3 %r6,(%r1),(%r3) 52 addl2 %r6,%r1 53 addl2 %r6,%r3 57 subl2 %r0,%r6 65 cmpl %r6,%r0 67 subl2 %r6,%r [all...] |
/src/lib/libc/arch/vax/string/ |
bcopy.S | 41 ENTRY(bcopy, R6) 44 movl 12(%ap),%r6 50 subl2 %r0,%r6 54 cmpl %r6,%r0 56 movc3 %r6,(%r1),(%r3) 59 addl2 %r6,%r1 60 addl2 %r6,%r3 64 subl2 %r0,%r6 72 cmpl %r6,%r0 74 subl2 %r6,%r [all...] |
/src/common/lib/libc/arch/arm/string/ |
strlcat_arm.S | 42 push {r4-r6, lr} 45 .save {r4-r6, lr} 55 mov r6, r2 /* save siz */ 57 mov r1, r6 /* get siz */ 60 subs r2, r6, r0 /* get remaining space in dst */ 66 pop {r4-r6, pc} /* restore registers and return */
|
/src/sys/arch/arm/arm/ |
bcopyinout.S | 105 and r6, r1, #0x3 106 ldr pc, [pc, r6, lsl #2] 112 .Lial3: ldrbt r6, [r0], #1 114 strb r6, [r1], #1 118 .Lial1: ldrbt r6, [r0], #1 120 strb r6, [r1], #1 144 and r6, r1, #0x1f 145 ldr pc, [pc, r6] 155 .Lical28:ldrt r6, [r0], #4 157 str r6, [r1], # [all...] |
copystr.S | 57 #define SAVE_REGS push {r3-r6} 58 #define RESTORE_REGS pop {r3-r6} 72 mov r6, #0x00000000 87 add r6, r6, #0x00000001 90 teqne r6, r2 101 strne r6, [r3] 119 mov r6, #0x00000000 134 add r6, r6, #0x0000000 [all...] |
/src/sys/arch/powerpc/oea/ |
altivec_subr.S | 57 li %r6,VREG_V2; lvx %v2,%r3,%r6 62 li %r6,VREG_V6; lvx %v6,%r3,%r6 67 li %r6,VREG_V10; lvx %v10,%r3,%r6 72 li %r6,VREG_V14; lvx %v14,%r3,%r6 77 li %r6,VREG_V18; lvx %v18,%r3,%r6 [all...] |
/src/lib/libc/arch/powerpc/sys/ |
sbrk.S | 23 ldptru %r6,(_C_LABEL(__curbrk)-.LPIC0)@l(%r8) 26 ldptru %r6,_C_LABEL(__curbrk)@l(%r8) # r6 = old break, r5 = &curbrk 28 add %r3,%r3,%r6 32 mr %r3,%r6 # set return value
|
/src/lib/libc/compat/arch/powerpc/gen/ |
compat_setjmp.S | 21 mr %r6,%r3 30 stmw %r8,4(%r6) 37 mr %r6,%r4 45 or. %r3,%r6,%r6
|
/src/lib/libc/arch/or1k/sys/ |
brk.S | 31 l.movhi r6,gotoffhi(_C_LABEL(__minbrk)) 32 l.ori r6,r6,gotofflo(_C_LABEL(__minbrk)) 33 l.add r6,r6,r7 35 l.movhi r6,hi(_C_LABEL(__minbrk)) 36 l.ori r6,r6,lo(_C_LABEL(__minbrk)) 38 l.lwz r5,0(r6) # r5 = __minbrk 45 l.sw __SIZEOF_POINTER__(r6),r [all...] |
sbrk.S | 19 PIC_GOTSETUP(r6) 23 l.add r5,r5,r6 28 l.lwz r6,0(r5) # r6 = old break, r5 = &curbrk 29 l.add r3,r3,r6 34 l.or r11,r6,r0 # set return value
|
/src/sys/arch/powerpc/ibm4xx/ |
4xx_locore.S | 128 * %r6 tmp 146 neg %r6,%r3 /* Find how far unaligned we are... */ 147 andi. %r6,%r6,31 /* Cache-align dest. */ 148 mtxer %r6 149 sub %r5,%r5,%r6 /* subtract count */ 151 add %r4,%r4,%r6 154 add %r3,%r3,%r6 155 addic. %r6,%r5,-32 /* Pre-decrement next line */ 170 mr %r5,%r6 [all...] |
/src/lib/libc/arch/powerpc64/sys/ |
sbrk.S | 14 ldptru %r6,_C_LABEL(__curbrk)@toc@l(%r8) 15 add %r3,%r3,%r6 19 mr %r3,%r6 # set return value
|
/src/lib/libc/compat/arch/sh3/sys/ |
compat_sigprocmask.S | 65 tst r6, r6 /* oset == NULL? */ 67 mov.l r0, @r6 /* store old mask */
|
/src/lib/csu/arch/earm/ |
crt0.S | 50 movs r6, #7 52 bics r7, r7, r6
|
/src/common/lib/libc/arch/or1k/string/ |
memset.S | 92 l.lwz r6, 0(r3) /* get first word */ 95 l.and r6, r6, r7 /* clear bytes to be filled */ 98 l.or r6, r6, r7 /* merge existing with new */ 100 l.sw 0(r3), r6 /* store first word */ 105 l.srli r6, r5, 2 /* clear low two bits of len */ 106 l.srli r6, r6, 2 /* ... */ 107 l.sfgeu r3, r6 /* any more full words? * [all...] |
strlen.S | 80 l.addi r6, r0, -1 /* r6 = 0xffffffff */ 82 l.srl r6, r6, r5 /* clear low (MSB) bytes */ 83 l.xori r6, r6, -1 /* complement */ 84 l.or r8, r8, r6 /* merge with loaded word */ 93 l.xori r6, r7, -1 /* t1 = ~t0 */ 102 l.or r6, r8, r13 /* t1 = x | 0x7f7f7f7f */ 104 l.or r4, r5, r6 /* t3 = t2 | t1 * [all...] |
/src/common/lib/libc/arch/or1k/gen/ |
mulsi3.S | 49 l.ori r6, r3, 0 # move r3 to r6 50 l.bf .Lloop # yes, r6 already has smaller value, 52 l.ori r6, r4, 0 # move r4 to r6 because it's smaller than r3 54 l.cmov r6, r3, r4 # choose lesser of r3 or r4 58 l.andi r5, r6, 1 # get LSB 60 l.and r8, r8, r4 # r8 = r6 & 1 ? r4 : 0 63 l.slli r6, r6, 1 # multiply by [all...] |
/src/sys/arch/arm/arm32/ |
irq_dispatch.S | 113 * r6 old value of `ci_intr_depth' 115 ldr r6, [r4, #CI_INTR_DEPTH] 116 add r1, r6, #1 126 str r6, [r4, #CI_INTR_DEPTH] 155 * r6 old value of `ci_intr_depth' 158 ldr r6, [r4, #CI_INTR_DEPTH] 159 add r1, r6, #1 174 str r6, [r4, #CI_INTR_DEPTH] 181 PULLIDLEFRAME /* restore r4, r6, sp, lr */
|
/src/lib/libc/arch/powerpc/gen/ |
__setjmp14.S | 21 mr %r6,%r3 24 addi %r5,%r6,4*(1+24) # &sigmask 30 stmw %r8,4(%r6) # save r8-r31 37 mr %r6,%r4 46 or. %r3,%r6,%r6
|
__sigsetjmp14.S | 11 mr %r6,%r3 16 addi %r5,%r6,4*(1+24) # &sigmask 23 stmw %r7,0(%r6) # save r7-r31 30 mr %r6,%r4 42 or. %r3,%r6,%r6
|