1 1.1 matt /*- 2 1.1 matt * Copyright (c) 2012 The NetBSD Foundation, Inc. 3 1.1 matt * All rights reserved. 4 1.1 matt * 5 1.1 matt * This code is derived from software contributed to The NetBSD Foundation 6 1.1 matt * by Matt Thomas of 3am Software Foundry. 7 1.1 matt * 8 1.1 matt * Redistribution and use in source and binary forms, with or without 9 1.1 matt * modification, are permitted provided that the following conditions 10 1.1 matt * are met: 11 1.1 matt * 1. Redistributions of source code must retain the above copyright 12 1.1 matt * notice, this list of conditions and the following disclaimer. 13 1.1 matt * 2. Redistributions in binary form must reproduce the above copyright 14 1.1 matt * notice, this list of conditions and the following disclaimer in the 15 1.1 matt * documentation and/or other materials provided with the distribution. 16 1.1 matt * 17 1.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 18 1.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 19 1.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 20 1.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 21 1.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 22 1.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 23 1.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 24 1.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 25 1.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 26 1.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 27 1.1 matt * POSSIBILITY OF SUCH DAMAGE. 28 1.1 matt */ 29 1.1 matt 30 1.1 matt #include <machine/asm.h> 31 1.1 matt 32 1.9 joerg RCSID("$NetBSD: __aeabi_uldivmod.S,v 1.9 2014/05/06 16:02:11 joerg Exp $") 33 1.2 matt 34 1.2 matt /* 35 1.2 matt * typedef struct { unsigned long long quo, rem } ulldiv_t; 36 1.2 matt * __value_in_regs ulldiv_t __aeabi_uldivmod(unsigned long long n, 37 1.2 matt * unsigned long long d); 38 1.2 matt */ 39 1.2 matt 40 1.1 matt ENTRY(__aeabi_uldivmod) 41 1.7 matt #ifdef __ARM_EABI__ 42 1.9 joerg # if !defined(__ARM_DWARF_EH__) 43 1.7 matt .fnstart 44 1.9 joerg #endif 45 1.7 matt .cfi_startproc 46 1.7 matt #endif 47 1.6 matt #if !defined(_KERNEL) && !defined(_STANDALONE) 48 1.7 matt #if !defined(__thumb__) 49 1.5 matt orrs ip, r2, r3 50 1.5 matt beq .Ldivbyzero 51 1.7 matt #elif defined(_ARM_ARCH_T2) 52 1.7 matt cbnz r2, 1f 53 1.7 matt cbz r3, .Ldivbyzero 54 1.7 matt 1: 55 1.7 matt #else 56 1.7 matt cmp r2, #0 57 1.7 matt bne 1f 58 1.7 matt cmp r3, #0 59 1.7 matt beq .Ldivbyzero 60 1.7 matt 1: 61 1.7 matt #endif 62 1.6 matt #endif 63 1.5 matt 64 1.1 matt push {r4,lr} 65 1.7 matt #ifdef __ARM_EABI__ 66 1.9 joerg # if !defined(__ARM_DWARF_EH__) 67 1.7 matt .save {r4,lr} 68 1.9 joerg # endif 69 1.7 matt .cfi_def_cfa_offset 8 70 1.7 matt .cfi_offset 14, -4 71 1.7 matt .cfi_offset 4, -8 72 1.7 matt #endif 73 1.3 matt sub sp, sp, #16 74 1.7 matt #ifdef __ARM_EABI__ 75 1.7 matt .cfi_def_cfa_offset 24 76 1.7 matt #endif 77 1.7 matt #if !defined(__thumb__) || defined(_ARM_ARCH_T2) 78 1.7 matt add r4, sp, #8 79 1.7 matt #else 80 1.7 matt mov r4, sp 81 1.7 matt adds r4, r4, #8 82 1.7 matt #endif 83 1.7 matt str r4, [sp] 84 1.4 skrll bl PLT_SYM(__qdivrem) 85 1.3 matt add sp, sp, #8 86 1.7 matt #ifdef __ARM_EABI__ 87 1.7 matt .cfi_def_cfa_offset 16 88 1.7 matt .cfi_offset 3, -12 89 1.7 matt .cfi_offset 2, -16 90 1.7 matt #endif 91 1.2 matt /* 92 1.2 matt * The remainder is already on the stack just waiting to be popped 93 1.2 matt * into r2/r3. 94 1.2 matt */ 95 1.7 matt pop {r2-r4,pc} 96 1.5 matt 97 1.6 matt #if !defined(_KERNEL) && !defined(_STANDALONE) 98 1.5 matt .Ldivbyzero: 99 1.7 matt push {r0-r1,r4,lr} 100 1.7 matt #ifdef __ARM_EABI__ 101 1.9 joerg # if !defined(__ARM_DWARF_EH__) 102 1.7 matt .save {r0-r1,r4,lr} 103 1.9 joerg # endif 104 1.7 matt .cfi_def_cfa_offset 16 105 1.7 matt .cfi_offset 14, -4 106 1.7 matt .cfi_offset 4, -8 107 1.7 matt #endif 108 1.7 matt #ifdef __thumb__ 109 1.7 matt movs r0, #0 110 1.7 matt mvns r0, r0 111 1.7 matt #else 112 1.5 matt mvn r0, #0 113 1.7 matt #endif 114 1.8 matt movs r1, r0 115 1.5 matt bl PLT_SYM(__aeabi_ldiv0) 116 1.7 matt pop {r2-r4,pc} 117 1.7 matt #endif 118 1.7 matt #ifdef __ARM_EABI__ 119 1.7 matt .cfi_endproc 120 1.9 joerg # if !defined(__ARM_DWARF_EH__) 121 1.7 matt .fnend 122 1.9 joerg # endif 123 1.6 matt #endif 124 1.1 matt END(__aeabi_uldivmod) 125