1 1.1 mrg /* Miscellaneous BPABI functions. 2 1.1 mrg 3 1.10 mrg Copyright (C) 2003-2022 Free Software Foundation, Inc. 4 1.1 mrg Contributed by CodeSourcery, LLC. 5 1.1 mrg 6 1.1 mrg This file is free software; you can redistribute it and/or modify it 7 1.1 mrg under the terms of the GNU General Public License as published by the 8 1.1 mrg Free Software Foundation; either version 3, or (at your option) any 9 1.1 mrg later version. 10 1.1 mrg 11 1.1 mrg This file is distributed in the hope that it will be useful, but 12 1.1 mrg WITHOUT ANY WARRANTY; without even the implied warranty of 13 1.1 mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 1.1 mrg General Public License for more details. 15 1.1 mrg 16 1.1 mrg Under Section 7 of GPL version 3, you are granted additional 17 1.1 mrg permissions described in the GCC Runtime Library Exception, version 18 1.1 mrg 3.1, as published by the Free Software Foundation. 19 1.1 mrg 20 1.1 mrg You should have received a copy of the GNU General Public License and 21 1.1 mrg a copy of the GCC Runtime Library Exception along with this program; 22 1.1 mrg see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23 1.1 mrg <http://www.gnu.org/licenses/>. */ 24 1.1 mrg 25 1.3 mrg .cfi_sections .debug_frame 26 1.3 mrg 27 1.1 mrg #ifdef __ARM_EABI__ 28 1.1 mrg /* Some attributes that are common to all routines in this file. */ 29 1.1 mrg /* Tag_ABI_align_needed: This code does not require 8-byte 30 1.1 mrg alignment from the caller. */ 31 1.1 mrg /* .eabi_attribute 24, 0 -- default setting. */ 32 1.1 mrg /* Tag_ABI_align_preserved: This code preserves 8-byte 33 1.1 mrg alignment in any callee. */ 34 1.1 mrg .eabi_attribute 25, 1 35 1.1 mrg #endif /* __ARM_EABI__ */ 36 1.1 mrg 37 1.1 mrg #ifdef L_aeabi_lcmp 38 1.1 mrg 39 1.1 mrg ARM_FUNC_START aeabi_lcmp 40 1.1 mrg cmp xxh, yyh 41 1.1 mrg do_it lt 42 1.1 mrg movlt r0, #-1 43 1.1 mrg do_it gt 44 1.1 mrg movgt r0, #1 45 1.1 mrg do_it ne 46 1.1 mrg RETc(ne) 47 1.1 mrg subs r0, xxl, yyl 48 1.1 mrg do_it lo 49 1.1 mrg movlo r0, #-1 50 1.1 mrg do_it hi 51 1.1 mrg movhi r0, #1 52 1.1 mrg RET 53 1.1 mrg FUNC_END aeabi_lcmp 54 1.1 mrg 55 1.1 mrg #endif /* L_aeabi_lcmp */ 56 1.1 mrg 57 1.1 mrg #ifdef L_aeabi_ulcmp 58 1.1 mrg 59 1.1 mrg ARM_FUNC_START aeabi_ulcmp 60 1.1 mrg cmp xxh, yyh 61 1.1 mrg do_it lo 62 1.1 mrg movlo r0, #-1 63 1.1 mrg do_it hi 64 1.1 mrg movhi r0, #1 65 1.1 mrg do_it ne 66 1.1 mrg RETc(ne) 67 1.1 mrg cmp xxl, yyl 68 1.1 mrg do_it lo 69 1.1 mrg movlo r0, #-1 70 1.1 mrg do_it hi 71 1.1 mrg movhi r0, #1 72 1.1 mrg do_it eq 73 1.1 mrg moveq r0, #0 74 1.1 mrg RET 75 1.1 mrg FUNC_END aeabi_ulcmp 76 1.1 mrg 77 1.1 mrg #endif /* L_aeabi_ulcmp */ 78 1.1 mrg 79 1.1 mrg .macro test_div_by_zero signed 80 1.1 mrg /* Tail-call to divide-by-zero handlers which may be overridden by the user, 81 1.1 mrg so unwinding works properly. */ 82 1.1 mrg #if defined(__thumb2__) 83 1.1 mrg cbnz yyh, 2f 84 1.1 mrg cbnz yyl, 2f 85 1.1 mrg cmp xxh, #0 86 1.1 mrg .ifc \signed, unsigned 87 1.1 mrg do_it eq 88 1.1 mrg cmpeq xxl, #0 89 1.1 mrg do_it ne, t 90 1.1 mrg movne xxh, #0xffffffff 91 1.1 mrg movne xxl, #0xffffffff 92 1.1 mrg .else 93 1.1 mrg do_it lt, tt 94 1.1 mrg movlt xxl, #0 95 1.1 mrg movlt xxh, #0x80000000 96 1.1 mrg blt 1f 97 1.1 mrg do_it eq 98 1.1 mrg cmpeq xxl, #0 99 1.1 mrg do_it ne, t 100 1.1 mrg movne xxh, #0x7fffffff 101 1.1 mrg movne xxl, #0xffffffff 102 1.1 mrg .endif 103 1.1 mrg 1: 104 1.1 mrg b SYM (__aeabi_ldiv0) __PLT__ 105 1.1 mrg 2: 106 1.1 mrg #else 107 1.1 mrg /* Note: Thumb-1 code calls via an ARM shim on processors which 108 1.1 mrg support ARM mode. */ 109 1.1 mrg cmp yyh, #0 110 1.1 mrg cmpeq yyl, #0 111 1.1 mrg bne 2f 112 1.1 mrg cmp xxh, #0 113 1.1 mrg .ifc \signed, unsigned 114 1.1 mrg cmpeq xxl, #0 115 1.1 mrg movne xxh, #0xffffffff 116 1.1 mrg movne xxl, #0xffffffff 117 1.1 mrg .else 118 1.1 mrg movlt xxh, #0x80000000 119 1.1 mrg movlt xxl, #0 120 1.1 mrg blt 1f 121 1.1 mrg cmpeq xxl, #0 122 1.1 mrg movne xxh, #0x7fffffff 123 1.1 mrg movne xxl, #0xffffffff 124 1.1 mrg .endif 125 1.1 mrg 1: 126 1.1 mrg b SYM (__aeabi_ldiv0) __PLT__ 127 1.1 mrg 2: 128 1.1 mrg #endif 129 1.1 mrg .endm 130 1.1 mrg 131 1.3 mrg /* we can use STRD/LDRD on v5TE and later, and any Thumb-2 architecture. */ 132 1.3 mrg #if (defined(__ARM_EABI__) \ 133 1.3 mrg && (defined(__thumb2__) \ 134 1.3 mrg || (__ARM_ARCH >= 5 && defined(__TARGET_FEATURE_DSP)))) 135 1.3 mrg #define CAN_USE_LDRD 1 136 1.3 mrg #else 137 1.3 mrg #define CAN_USE_LDRD 0 138 1.3 mrg #endif 139 1.3 mrg 140 1.3 mrg /* set up stack from for call to __udivmoddi4. At the end of the macro the 141 1.3 mrg stack is arranged as follows: 142 1.3 mrg sp+12 / space for remainder 143 1.3 mrg sp+8 \ (written by __udivmoddi4) 144 1.3 mrg sp+4 lr 145 1.3 mrg sp+0 sp+8 [rp (remainder pointer) argument for __udivmoddi4] 146 1.3 mrg 147 1.3 mrg */ 148 1.3 mrg .macro push_for_divide fname 149 1.3 mrg #if defined(__thumb2__) && CAN_USE_LDRD 150 1.3 mrg sub ip, sp, #8 151 1.3 mrg strd ip, lr, [sp, #-16]! 152 1.3 mrg #else 153 1.3 mrg sub sp, sp, #8 154 1.3 mrg do_push {sp, lr} 155 1.3 mrg #endif 156 1.3 mrg .cfi_adjust_cfa_offset 16 157 1.3 mrg .cfi_offset 14, -12 158 1.3 mrg .endm 159 1.3 mrg 160 1.3 mrg /* restore stack */ 161 1.3 mrg .macro pop_for_divide 162 1.3 mrg ldr lr, [sp, #4] 163 1.3 mrg #if CAN_USE_LDRD 164 1.3 mrg ldrd r2, r3, [sp, #8] 165 1.3 mrg add sp, sp, #16 166 1.3 mrg #else 167 1.3 mrg add sp, sp, #8 168 1.3 mrg do_pop {r2, r3} 169 1.3 mrg #endif 170 1.3 mrg .cfi_restore 14 171 1.3 mrg .cfi_adjust_cfa_offset 0 172 1.3 mrg .endm 173 1.3 mrg 174 1.1 mrg #ifdef L_aeabi_ldivmod 175 1.1 mrg 176 1.3 mrg /* Perform 64 bit signed division. 177 1.3 mrg Inputs: 178 1.3 mrg r0:r1 numerator 179 1.3 mrg r2:r3 denominator 180 1.3 mrg Outputs: 181 1.3 mrg r0:r1 quotient 182 1.3 mrg r2:r3 remainder 183 1.3 mrg */ 184 1.1 mrg ARM_FUNC_START aeabi_ldivmod 185 1.3 mrg .cfi_startproc 186 1.3 mrg test_div_by_zero signed 187 1.1 mrg 188 1.3 mrg push_for_divide __aeabi_ldivmod 189 1.3 mrg cmp xxh, #0 190 1.3 mrg blt 1f 191 1.3 mrg cmp yyh, #0 192 1.3 mrg blt 2f 193 1.3 mrg /* arguments in (r0:r1), (r2:r3) and *sp */ 194 1.3 mrg bl SYM(__udivmoddi4) __PLT__ 195 1.3 mrg .cfi_remember_state 196 1.3 mrg pop_for_divide 197 1.3 mrg RET 198 1.3 mrg 199 1.3 mrg 1: /* xxh:xxl is negative */ 200 1.3 mrg .cfi_restore_state 201 1.3 mrg negs xxl, xxl 202 1.3 mrg sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 203 1.3 mrg cmp yyh, #0 204 1.3 mrg blt 3f 205 1.3 mrg /* arguments in (r0:r1), (r2:r3) and *sp */ 206 1.3 mrg bl SYM(__udivmoddi4) __PLT__ 207 1.3 mrg .cfi_remember_state 208 1.3 mrg pop_for_divide 209 1.3 mrg negs xxl, xxl 210 1.3 mrg sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 211 1.3 mrg negs yyl, yyl 212 1.3 mrg sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 213 1.3 mrg RET 214 1.3 mrg 215 1.3 mrg 2: /* only yyh:yyl is negative */ 216 1.3 mrg .cfi_restore_state 217 1.3 mrg negs yyl, yyl 218 1.3 mrg sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 219 1.3 mrg /* arguments in (r0:r1), (r2:r3) and *sp */ 220 1.3 mrg bl SYM(__udivmoddi4) __PLT__ 221 1.3 mrg .cfi_remember_state 222 1.3 mrg pop_for_divide 223 1.3 mrg negs xxl, xxl 224 1.3 mrg sbc xxh, xxh, xxh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 225 1.3 mrg RET 226 1.3 mrg 227 1.3 mrg 3: /* both xxh:xxl and yyh:yyl are negative */ 228 1.3 mrg .cfi_restore_state 229 1.3 mrg negs yyl, yyl 230 1.3 mrg sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 231 1.3 mrg /* arguments in (r0:r1), (r2:r3) and *sp */ 232 1.3 mrg bl SYM(__udivmoddi4) __PLT__ 233 1.3 mrg pop_for_divide 234 1.3 mrg negs yyl, yyl 235 1.3 mrg sbc yyh, yyh, yyh, lsl #1 /* Thumb-2 has no RSC, so use X - 2X */ 236 1.1 mrg RET 237 1.3 mrg 238 1.3 mrg .cfi_endproc 239 1.1 mrg 240 1.1 mrg #endif /* L_aeabi_ldivmod */ 241 1.1 mrg 242 1.1 mrg #ifdef L_aeabi_uldivmod 243 1.1 mrg 244 1.3 mrg /* Perform 64 bit signed division. 245 1.3 mrg Inputs: 246 1.3 mrg r0:r1 numerator 247 1.3 mrg r2:r3 denominator 248 1.3 mrg Outputs: 249 1.3 mrg r0:r1 quotient 250 1.3 mrg r2:r3 remainder 251 1.3 mrg */ 252 1.1 mrg ARM_FUNC_START aeabi_uldivmod 253 1.3 mrg .cfi_startproc 254 1.3 mrg test_div_by_zero unsigned 255 1.1 mrg 256 1.3 mrg push_for_divide __aeabi_uldivmod 257 1.3 mrg /* arguments in (r0:r1), (r2:r3) and *sp */ 258 1.3 mrg bl SYM(__udivmoddi4) __PLT__ 259 1.3 mrg pop_for_divide 260 1.1 mrg RET 261 1.3 mrg .cfi_endproc 262 1.3 mrg 263 1.1 mrg #endif /* L_aeabi_divmod */ 264 1.1 mrg 265