cache.S revision 1.2 1 /*-
2 * Copyright (c) 2010 Per Odlund <per.odlund (at) armagedon.se>
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
14 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
15 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
23 * POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 /* ARMv7 assembly functions for manipulating caches and other core functions.
27 * Based on cpufuncs for v6 and xscale.
28 */
29
30 #include <machine/asm.h>
31
32 .arch armv7a
33
34 /* LINTSTUB: void armv7_dcache_wbinv_range(vaddr_t, vsize_t); */
35 ENTRY(armv7_dcache_wbinv_range)
36 mov ip, #0
37 mcr p15, 2, ip, c0, c0, 0 @ set cache level to L1
38 mrc p15, 1, r2, c0, c0, 0 @ read CCSIDR
39 and r2, r2, #7 @ get line size (log2(size)-4, 0=16)
40 mov ip, #16 @ make a bit mask
41 lsl r2, ip, r2 @ and shift into position
42 sub ip, r2, #1 @ make into a mask
43 and r3, r0, ip @ get offset into cache line
44 add r1, r1, r3 @ add to length
45 bic r0, r0, ip @ clear offset from start.
46 dsb
47 1:
48 mcr p15, 0, r0, c7, c14, 1 @ wb and inv the D-Cache line to PoC
49 add r0, r0, r2
50 subs r1, r1, r2
51 bhi 1b
52 dsb @ data synchronization barrier
53 bx lr
54 END(armv7_dcache_wbinv_range)
55
56 /* LINTSTUB: void armv7_dcache_wbinv_all(void); */
57 ENTRY_NP(armv7_dcache_wbinv_all)
58 mrc p15, 1, r0, c0, c0, 1 @ read CLIDR
59 tst r0, #0x07000000
60 bxeq lr
61 mov r3, #0 @ start with L1
62
63 .Lstart_wbinv:
64 add r2, r3, r3, lsr #1 @ r2 = level * 3 / 2
65 mov r1, r0, lsr r2 @ r1 = cache type
66 tst r1, #6 @ is it unified or data?
67 beq .Lnext_level_wbinv @ nope, skip level
68
69 mcr p15, 2, r3, c0, c0, 0 @ select cache level
70 isb
71 mrc p15, 1, r0, c0, c0, 0 @ read CCSIDR
72
73 ubfx ip, r0, #0, #3 @ get linesize from CCSIDR
74 add ip, ip, #4 @ apply bias
75 ubfx r2, r0, #13, #15 @ get numsets - 1 from CCSIDR
76 lsl r2, r2, ip @ shift to set position
77 orr r3, r3, r2 @ merge set into way/set/level
78 mov r1, #1
79 lsl r1, r1, ip @ r1 = set decr
80
81 ubfx ip, r0, #3, #10 @ get numways - 1 from [to be discarded] CCSIDR
82 clz r2, ip @ number of bits to MSB of way
83 lsl ip, ip, r2 @ shift by that into way position
84 mov r0, #1 @
85 lsl r2, r0, r2 @ r2 now contains the way decr
86 mov r0, r3 @ get sets/level (no way yet)
87 orr r3, r3, ip @ merge way into way/set/level
88 bfc r0, #0, #4 @ clear low 4 bits (level) to get numset - 1
89 sub r2, r2, r0 @ subtract from way decr
90
91 /* r3 = ways/sets/level, r2 = way decr, r1 = set decr, r0 and ip are free */
92 1: mcr p15, 0, r3, c7, c14, 2 @ DCCISW (data cache clean and invalidate by set/way)
93 cmp r3, #15 @ are we done with this level (way/set == 0)
94 bls .Lnext_level_wbinv @ yes, go to next level
95 ubfx r0, r3, #4, #18 @ extract set bits
96 cmp r0, #0 @ compare
97 subne r3, r3, r1 @ non-zero?, decrement set #
98 subeq r3, r3, r2 @ zero?, decrement way # and restore set count
99 b 1b
100
101 .Lnext_level_wbinv:
102 dsb
103 mrc p15, 1, r0, c0, c0, 1 @ read CLIDR
104 ubfx ip, r0, #24, #3 @ narrow to LoC
105 add r3, r3, #2 @ go to next level
106 cmp r3, ip, lsl #1 @ compare
107 blt .Lstart_wbinv @ not done, next level (r0 == CLIDR)
108
109 .Ldone_wbinv:
110 mov r0, #0 @ default back to cache level 0
111 mcr p15, 2, r0, c0, c0, 0 @ select cache level
112 dsb
113 isb
114 bx lr
115 END(armv7_dcache_wbinv_all)
116
117 /* * LINTSTUB: void armv7_icache_inv_all(void); */
118 ENTRY_NP(armv7_icache_inv_all)
119 mov r0, #0
120 mcr p15, 2, r0, c0, c0, 0 @ set cache level to L1
121 mrc p15, 1, r0, c0, c0, 0 @ read CCSIDR
122
123 ubfx r2, r0, #13, #15 @ get num sets - 1 from CCSIDR
124 ubfx r3, r0, #3, #10 @ get numways - 1 from CCSIDR
125 clz r1, r3 @ number of bits to MSB of way
126 lsl r3, r3, r1 @ shift into position
127 mov ip, #1 @
128 lsl ip, ip, r1 @ ip now contains the way decr
129
130 ubfx r0, r0, #0, #3 @ get linesize from CCSIDR
131 add r0, r0, #4 @ apply bias
132 lsl r2, r2, r0 @ shift sets by log2(linesize)
133 add r3, r3, r2 @ merge numsets - 1 with numways - 1
134 sub ip, ip, r2 @ subtract numsets - 1 from way decr
135 mov r1, #1
136 lsl r1, r1, r0 @ r1 now contains the set decr
137 mov r2, ip @ r2 now contains set way decr
138
139 /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
140 1: mcr p15, 0, r3, c7, c6, 2 @ DCISW (data cache invalidate by set/way)
141 movs r0, r3 @ get current way/set
142 beq 2f @ at 0 means we are done.
143 lsls r0, r0, #10 @ clear way bits leaving only set bits
144 subne r3, r3, r1 @ non-zero?, decrement set #
145 subeq r3, r3, r2 @ zero?, decrement way # and restore set count
146 b 1b
147
148 2: dsb @ wait for stores to finish
149 mov r0, #0 @ and ...
150 mcr p15, 0, r0, c7, c5, 0 @ invalidate L1 cache
151 isb @ instruction sync barrier
152 bx lr @ return
153 END(armv7_icache_inv_all)
154
155 ENTRY_NP(armv7_exec_kernel)
156 mov r4, r0 @ kernel entry
157 mov r5, r1 @ fdt address
158
159 /* Disable MMU and cache */
160 mrc p15, 0, r0, c1, c0, 0 @ SCTLR read
161 bic r0, r0, #5 @ disable dcache and MMU
162 mcr p15, 0, r0, c1, c0, 0 @ SCTLR write
163
164 /* Invalidate TLB */
165 dsb
166 mov r0, #0
167 mcr p15, 0, r0, c8, c7, 0 @ flush I+D TLB
168 dsb
169 isb
170
171 /* Setup kernel args */
172 mov r0, #0
173 mov r1, #0
174 mov r2, r5
175 mov r3, #0
176
177 /* Jump to kernel */
178 bx r4
179 END(armv7_exec_kernel)
180