cache.S revision 1.1 1 /*-
2 * Copyright (c) 2010 Per Odlund <per.odlund (at) armagedon.se>
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
14 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
15 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
23 * POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 /* ARMv7 assembly functions for manipulating caches and other core functions.
27 * Based on cpufuncs for v6 and xscale.
28 */
29
30 #include <machine/asm.h>
31
32 .arch armv7a
33
34 /* LINTSTUB: void armv7_dcache_wbinv_range(vaddr_t, vsize_t); */
35 ENTRY(armv7_dcache_wbinv_range)
36 mov ip, #0
37 mcr p15, 2, ip, c0, c0, 0 @ set cache level to L1
38 mrc p15, 1, r2, c0, c0, 0 @ read CCSIDR
39 and r2, r2, #7 @ get line size (log2(size)-4, 0=16)
40 mov ip, #16 @ make a bit mask
41 lsl r2, ip, r2 @ and shift into position
42 sub ip, r2, #1 @ make into a mask
43 and r3, r0, ip @ get offset into cache line
44 add r1, r1, r3 @ add to length
45 bic r0, r0, ip @ clear offset from start.
46 dsb
47 1:
48 mcr p15, 0, r0, c7, c14, 1 @ wb and inv the D-Cache line to PoC
49 add r0, r0, r2
50 subs r1, r1, r2
51 bhi 1b
52 dsb @ data synchronization barrier
53 bx lr
54 END(armv7_dcache_wbinv_range)
55
56 /* * LINTSTUB: void armv7_icache_inv_all(void); */
57 ENTRY_NP(armv7_icache_inv_all)
58 mov r0, #0
59 mcr p15, 2, r0, c0, c0, 0 @ set cache level to L1
60 mrc p15, 1, r0, c0, c0, 0 @ read CCSIDR
61
62 ubfx r2, r0, #13, #15 @ get num sets - 1 from CCSIDR
63 ubfx r3, r0, #3, #10 @ get numways - 1 from CCSIDR
64 clz r1, r3 @ number of bits to MSB of way
65 lsl r3, r3, r1 @ shift into position
66 mov ip, #1 @
67 lsl ip, ip, r1 @ ip now contains the way decr
68
69 ubfx r0, r0, #0, #3 @ get linesize from CCSIDR
70 add r0, r0, #4 @ apply bias
71 lsl r2, r2, r0 @ shift sets by log2(linesize)
72 add r3, r3, r2 @ merge numsets - 1 with numways - 1
73 sub ip, ip, r2 @ subtract numsets - 1 from way decr
74 mov r1, #1
75 lsl r1, r1, r0 @ r1 now contains the set decr
76 mov r2, ip @ r2 now contains set way decr
77
78 /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
79 1: mcr p15, 0, r3, c7, c6, 2 @ DCISW (data cache invalidate by set/way)
80 movs r0, r3 @ get current way/set
81 beq 2f @ at 0 means we are done.
82 lsls r0, r0, #10 @ clear way bits leaving only set bits
83 subne r3, r3, r1 @ non-zero?, decrement set #
84 subeq r3, r3, r2 @ zero?, decrement way # and restore set count
85 b 1b
86
87 2: dsb @ wait for stores to finish
88 mov r0, #0 @ and ...
89 mcr p15, 0, r0, c7, c5, 0 @ invalidate L1 cache
90 isb @ instruction sync barrier
91 bx lr @ return
92 END(armv7_icache_inv_all)
93
94 ENTRY_NP(armv7_exec_kernel)
95 mov r4, r0 @ kernel entry
96 mov r5, r1 @ fdt address
97
98 /* Disable MMU and cache */
99 mrc p15, 0, r0, c1, c0, 0 @ SCTLR read
100 bic r0, r0, #5 @ disable dcache and MMU
101 mcr p15, 0, r0, c1, c0, 0 @ SCTLR write
102
103 /* Invalidate TLB */
104 dsb
105 mov r0, #0
106 mcr p15, 0, r0, c8, c7, 0 @ flush I+D TLB
107 dsb
108 isb
109
110 /* Setup kernel args */
111 mov r0, #0
112 mov r1, #0
113 mov r2, r5
114 mov r3, #0
115
116 /* Jump to kernel */
117 bx r4
118 END(armv7_exec_kernel)
119