vfpsf.S revision 1.1.4.2 1 1.1.4.2 tls /*-
2 1.1.4.2 tls * Copyright (c) 2013 The NetBSD Foundation, Inc.
3 1.1.4.2 tls * All rights reserved.
4 1.1.4.2 tls *
5 1.1.4.2 tls * This code is derived from software contributed to The NetBSD Foundation
6 1.1.4.2 tls * by Matt Thomas of 3am Software Foundry.
7 1.1.4.2 tls *
8 1.1.4.2 tls * Redistribution and use in source and binary forms, with or without
9 1.1.4.2 tls * modification, are permitted provided that the following conditions
10 1.1.4.2 tls * are met:
11 1.1.4.2 tls * 1. Redistributions of source code must retain the above copyright
12 1.1.4.2 tls * notice, this list of conditions and the following disclaimer.
13 1.1.4.2 tls * 2. Redistributions in binary form must reproduce the above copyright
14 1.1.4.2 tls * notice, this list of conditions and the following disclaimer in the
15 1.1.4.2 tls * documentation and/or other materials provided with the distribution.
16 1.1.4.2 tls *
17 1.1.4.2 tls * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
18 1.1.4.2 tls * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
19 1.1.4.2 tls * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
20 1.1.4.2 tls * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
21 1.1.4.2 tls * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 1.1.4.2 tls * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 1.1.4.2 tls * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 1.1.4.2 tls * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 1.1.4.2 tls * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 1.1.4.2 tls * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 1.1.4.2 tls * POSSIBILITY OF SUCH DAMAGE.
28 1.1.4.2 tls */
29 1.1.4.2 tls
30 1.1.4.2 tls #include <arm/asm.h>
31 1.1.4.2 tls #include <arm/vfpreg.h>
32 1.1.4.2 tls
33 1.1.4.2 tls RCSID("$NetBSD: vfpsf.S,v 1.1.4.2 2013/02/25 00:27:55 tls Exp $")
34 1.1.4.2 tls
35 1.1.4.2 tls /*
36 1.1.4.2 tls * This file provides softfloat compatible routines which use VFP instructions
37 1.1.4.2 tls * to do the actual work. This should give near hard-float performance while
38 1.1.4.2 tls * being compatible with soft-float code.
39 1.1.4.2 tls *
40 1.1.4.2 tls * This file implements the single precision floating point routines.
41 1.1.4.2 tls */
42 1.1.4.2 tls
43 1.1.4.2 tls ENTRY(__addsf3)
44 1.1.4.2 tls vmov s0, s1, r0, r1
45 1.1.4.2 tls vadd.f32 s0, s0, s1
46 1.1.4.2 tls vmov r0, s0
47 1.1.4.2 tls RET
48 1.1.4.2 tls END(__addsf3)
49 1.1.4.2 tls
50 1.1.4.2 tls ENTRY(__subsf3)
51 1.1.4.2 tls vmov s0, s1, r0, r1
52 1.1.4.2 tls vsub.f32 s0, s0, s1
53 1.1.4.2 tls vmov r0, s0
54 1.1.4.2 tls RET
55 1.1.4.2 tls END(__subsf3)
56 1.1.4.2 tls
57 1.1.4.2 tls ENTRY(__mulsf3)
58 1.1.4.2 tls vmov s0, s1, r0, r1
59 1.1.4.2 tls vmul.f32 s0, s0, s1
60 1.1.4.2 tls vmov r0, s0
61 1.1.4.2 tls RET
62 1.1.4.2 tls END(__mulsf3)
63 1.1.4.2 tls
64 1.1.4.2 tls ENTRY(__divsf3)
65 1.1.4.2 tls vmov s0, s1, r0, r1
66 1.1.4.2 tls vdiv.f32 s0, s0, s1
67 1.1.4.2 tls vmov r0, s0
68 1.1.4.2 tls RET
69 1.1.4.2 tls END(__divsf3)
70 1.1.4.2 tls
71 1.1.4.2 tls ENTRY(__negsf2)
72 1.1.4.2 tls vmov s0, r0
73 1.1.4.2 tls vneg.f32 s0, s0
74 1.1.4.2 tls vmov r0, s0
75 1.1.4.2 tls RET
76 1.1.4.2 tls END(__negsf2)
77 1.1.4.2 tls
78 1.1.4.2 tls ENTRY(__truncdfsf2)
79 1.1.4.2 tls #ifdef __ARMEL__
80 1.1.4.2 tls vmov d0, r0, r1
81 1.1.4.2 tls #else
82 1.1.4.2 tls vmov d0, r1, r0
83 1.1.4.2 tls #endif
84 1.1.4.2 tls vcvt.f32.f64 s0, d0
85 1.1.4.2 tls vmov r0, s0
86 1.1.4.2 tls RET
87 1.1.4.2 tls END(__truncdfsf2)
88 1.1.4.2 tls
89 1.1.4.2 tls ENTRY(__fixsfsi)
90 1.1.4.2 tls vmov s0, r0
91 1.1.4.2 tls vcvt.s32.f32 s0, s0
92 1.1.4.2 tls vmov r0, s0
93 1.1.4.2 tls RET
94 1.1.4.2 tls END(__fixsfsi)
95 1.1.4.2 tls
96 1.1.4.2 tls ENTRY(__fixunssfsi)
97 1.1.4.2 tls vmov s0, r0
98 1.1.4.2 tls vcvt.u32.f32 s0, s0
99 1.1.4.2 tls vmov r0, s0
100 1.1.4.2 tls RET
101 1.1.4.2 tls END(__fixunssfsi)
102 1.1.4.2 tls
103 1.1.4.2 tls ENTRY(__floatsisf)
104 1.1.4.2 tls vmov s0, r0
105 1.1.4.2 tls vcvt.f32.s32 s0, s0
106 1.1.4.2 tls vmov r0, s0
107 1.1.4.2 tls RET
108 1.1.4.2 tls END(__floatsisf)
109 1.1.4.2 tls
110 1.1.4.2 tls ENTRY(__floatunsisf)
111 1.1.4.2 tls vmov s0, r0
112 1.1.4.2 tls vcvt.f32.u32 s0, s0
113 1.1.4.2 tls vmov r0, s0
114 1.1.4.2 tls RET
115 1.1.4.2 tls END(__floatunsisf)
116 1.1.4.2 tls
117 1.1.4.2 tls /* N set if compare <= result */
118 1.1.4.2 tls /* Z set if compare = result */
119 1.1.4.2 tls /* C set if compare (=,>=,UNORD) result */
120 1.1.4.2 tls /* V set if compare UNORD result */
121 1.1.4.2 tls
122 1.1.4.2 tls STRONG_ALIAS(__eqsf2, __nesf2)
123 1.1.4.2 tls ENTRY(__nesf2)
124 1.1.4.2 tls vmov s0, s1, r0, r1
125 1.1.4.2 tls vcmp.f32 s0, s1
126 1.1.4.2 tls vmrs APSR_nzcv, fpscr
127 1.1.4.2 tls moveq r0, #0 /* !(a == b) */
128 1.1.4.2 tls movne r0, #1 /* !(a == b) */
129 1.1.4.2 tls RET
130 1.1.4.2 tls END(__nesf2)
131 1.1.4.2 tls
132 1.1.4.2 tls STRONG_ALIAS(__gesf2, __ltsf2)
133 1.1.4.2 tls ENTRY(__ltsf2)
134 1.1.4.2 tls vmov s0, s1, r0, r1
135 1.1.4.2 tls vcmp.f32 s0, s1
136 1.1.4.2 tls vmrs APSR_nzcv, fpscr
137 1.1.4.2 tls mvnmi r0, #0 /* -(a < b) */
138 1.1.4.2 tls movpl r0, #0 /* -(a < b) */
139 1.1.4.2 tls RET
140 1.1.4.2 tls END(__ltsf2)
141 1.1.4.2 tls
142 1.1.4.2 tls STRONG_ALIAS(__gtsf2, __lesf2)
143 1.1.4.2 tls ENTRY(__lesf2)
144 1.1.4.2 tls vmov s0, s1, r0, r1
145 1.1.4.2 tls vcmp.f32 s0, s1
146 1.1.4.2 tls vmrs APSR_nzcv, fpscr
147 1.1.4.2 tls movgt r0, #1 /* (a > b) */
148 1.1.4.2 tls movle r0, #0 /* (a > b) */
149 1.1.4.2 tls RET
150 1.1.4.2 tls END(__lesf2)
151 1.1.4.2 tls
152 1.1.4.2 tls ENTRY(__unordsf2)
153 1.1.4.2 tls vmov s0, s1, r0, r1
154 1.1.4.2 tls vcmp.f32 s0, s1
155 1.1.4.2 tls vmrs APSR_nzcv, fpscr
156 1.1.4.2 tls movvs r0, #1 /* isnan(a) || isnan(b) */
157 1.1.4.2 tls movvc r0, #0 /* isnan(a) || isnan(b) */
158 1.1.4.2 tls RET
159 1.1.4.2 tls END(__unordsf2)
160