booke_cache.c revision 1.1.2.1 1 1.1.2.1 matt /* $NetBSD: booke_cache.c,v 1.1.2.1 2011/01/07 01:26:19 matt Exp $ */
2 1.1.2.1 matt /*-
3 1.1.2.1 matt * Copyright (c) 2010, 2011 The NetBSD Foundation, Inc.
4 1.1.2.1 matt * All rights reserved.
5 1.1.2.1 matt *
6 1.1.2.1 matt * This code is derived from software contributed to The NetBSD Foundation
7 1.1.2.1 matt * by Raytheon BBN Technologies Corp and Defense Advanced Research Projects
8 1.1.2.1 matt * Agency and which was developed by Matt Thomas of 3am Software Foundry.
9 1.1.2.1 matt *
10 1.1.2.1 matt * This material is based upon work supported by the Defense Advanced Research
11 1.1.2.1 matt * Projects Agency and Space and Naval Warfare Systems Center, Pacific, under
12 1.1.2.1 matt * Contract No. N66001-09-C-2073.
13 1.1.2.1 matt * Approved for Public Release, Distribution Unlimited
14 1.1.2.1 matt *
15 1.1.2.1 matt * Redistribution and use in source and binary forms, with or without
16 1.1.2.1 matt * modification, are permitted provided that the following conditions
17 1.1.2.1 matt * are met:
18 1.1.2.1 matt * 1. Redistributions of source code must retain the above copyright
19 1.1.2.1 matt * notice, this list of conditions and the following disclaimer.
20 1.1.2.1 matt * 2. Redistributions in binary form must reproduce the above copyright
21 1.1.2.1 matt * notice, this list of conditions and the following disclaimer in the
22 1.1.2.1 matt * documentation and/or other materials provided with the distribution.
23 1.1.2.1 matt *
24 1.1.2.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
25 1.1.2.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
26 1.1.2.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
27 1.1.2.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
28 1.1.2.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29 1.1.2.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30 1.1.2.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31 1.1.2.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32 1.1.2.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33 1.1.2.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 1.1.2.1 matt * POSSIBILITY OF SUCH DAMAGE.
35 1.1.2.1 matt */
36 1.1.2.1 matt /*
37 1.1.2.1 matt *
38 1.1.2.1 matt */
39 1.1.2.1 matt #include <sys/cdefs.h>
40 1.1.2.1 matt
41 1.1.2.1 matt __KERNEL_RCSID(0, "$NetBSD: booke_cache.c,v 1.1.2.1 2011/01/07 01:26:19 matt Exp $");
42 1.1.2.1 matt
43 1.1.2.1 matt #include <sys/param.h>
44 1.1.2.1 matt #include <sys/cpu.h>
45 1.1.2.1 matt
46 1.1.2.1 matt #include <uvm/uvm_extern.h>
47 1.1.2.1 matt
48 1.1.2.1 matt static void inline
49 1.1.2.1 matt dcbf(vaddr_t va, vsize_t off)
50 1.1.2.1 matt {
51 1.1.2.1 matt __asm volatile("dcbf\t%0,%1" : : "b" (va), "r" (off));
52 1.1.2.1 matt }
53 1.1.2.1 matt
54 1.1.2.1 matt static void inline
55 1.1.2.1 matt dcbst(vaddr_t va, vsize_t off)
56 1.1.2.1 matt {
57 1.1.2.1 matt __asm volatile("dcbst\t%0,%1" : : "b" (va), "r" (off));
58 1.1.2.1 matt }
59 1.1.2.1 matt
60 1.1.2.1 matt static void inline
61 1.1.2.1 matt dcbi(vaddr_t va, vsize_t off)
62 1.1.2.1 matt {
63 1.1.2.1 matt __asm volatile("dcbi\t%0,%1" : : "b" (va), "r" (off));
64 1.1.2.1 matt }
65 1.1.2.1 matt
66 1.1.2.1 matt static void inline
67 1.1.2.1 matt dcbz(vaddr_t va, vsize_t off)
68 1.1.2.1 matt {
69 1.1.2.1 matt __asm volatile("dcbz\t%0,%1" : : "b" (va), "r" (off));
70 1.1.2.1 matt }
71 1.1.2.1 matt
72 1.1.2.1 matt static void inline
73 1.1.2.1 matt dcba(vaddr_t va, vsize_t off)
74 1.1.2.1 matt {
75 1.1.2.1 matt __asm volatile("dcba\t%0,%1" : : "b" (va), "r" (off));
76 1.1.2.1 matt }
77 1.1.2.1 matt
78 1.1.2.1 matt static void inline
79 1.1.2.1 matt icbi(vaddr_t va, vsize_t off)
80 1.1.2.1 matt {
81 1.1.2.1 matt __asm volatile("icbi\t%0,%1" : : "b" (va), "r" (off));
82 1.1.2.1 matt }
83 1.1.2.1 matt
84 1.1.2.1 matt static inline void
85 1.1.2.1 matt cache_op(vaddr_t va, vsize_t len, vsize_t line_size,
86 1.1.2.1 matt void (*op)(vaddr_t, vsize_t))
87 1.1.2.1 matt {
88 1.1.2.1 matt KASSERT(line_size > 0);
89 1.1.2.1 matt
90 1.1.2.1 matt if (len == 0)
91 1.1.2.1 matt return;
92 1.1.2.1 matt
93 1.1.2.1 matt /* Make sure we flush all cache lines */
94 1.1.2.1 matt len += va & (line_size - 1);
95 1.1.2.1 matt va &= ~(line_size - 1);
96 1.1.2.1 matt
97 1.1.2.1 matt for (vsize_t i = 0; i < len; i += line_size)
98 1.1.2.1 matt (*op)(va, i);
99 1.1.2.1 matt __asm volatile("mbar 0");
100 1.1.2.1 matt }
101 1.1.2.1 matt
102 1.1.2.1 matt void
103 1.1.2.1 matt dcache_wb_page(vaddr_t va)
104 1.1.2.1 matt {
105 1.1.2.1 matt cache_op(va, PAGE_SIZE, curcpu()->ci_ci.dcache_line_size, dcbst);
106 1.1.2.1 matt }
107 1.1.2.1 matt
108 1.1.2.1 matt void
109 1.1.2.1 matt dcache_wbinv_page(vaddr_t va)
110 1.1.2.1 matt {
111 1.1.2.1 matt cache_op(va, PAGE_SIZE, curcpu()->ci_ci.dcache_line_size, dcbf);
112 1.1.2.1 matt }
113 1.1.2.1 matt
114 1.1.2.1 matt void
115 1.1.2.1 matt dcache_inv_page(vaddr_t va)
116 1.1.2.1 matt {
117 1.1.2.1 matt cache_op(va, PAGE_SIZE, curcpu()->ci_ci.dcache_line_size, dcbi);
118 1.1.2.1 matt }
119 1.1.2.1 matt
120 1.1.2.1 matt void
121 1.1.2.1 matt dcache_zero_page(vaddr_t va)
122 1.1.2.1 matt {
123 1.1.2.1 matt cache_op(va, PAGE_SIZE, curcpu()->ci_ci.dcache_line_size, dcbz);
124 1.1.2.1 matt }
125 1.1.2.1 matt
126 1.1.2.1 matt void
127 1.1.2.1 matt icache_inv_page(vaddr_t va)
128 1.1.2.1 matt {
129 1.1.2.1 matt __asm("msync");
130 1.1.2.1 matt cache_op(va, PAGE_SIZE, curcpu()->ci_ci.icache_line_size, icbi);
131 1.1.2.1 matt __asm("msync");
132 1.1.2.1 matt /* synchronizing instruction will be the rfi to user mode */
133 1.1.2.1 matt }
134 1.1.2.1 matt
135 1.1.2.1 matt void
136 1.1.2.1 matt dcache_wb(vaddr_t va, vsize_t len)
137 1.1.2.1 matt {
138 1.1.2.1 matt cache_op(va, len, curcpu()->ci_ci.dcache_line_size, dcbst);
139 1.1.2.1 matt }
140 1.1.2.1 matt
141 1.1.2.1 matt void
142 1.1.2.1 matt dcache_wbinv(vaddr_t va, vsize_t len)
143 1.1.2.1 matt {
144 1.1.2.1 matt cache_op(va, len, curcpu()->ci_ci.dcache_line_size, dcbf);
145 1.1.2.1 matt }
146 1.1.2.1 matt
147 1.1.2.1 matt void
148 1.1.2.1 matt dcache_inv(vaddr_t va, vsize_t len)
149 1.1.2.1 matt {
150 1.1.2.1 matt cache_op(va, len, curcpu()->ci_ci.dcache_line_size, dcbi);
151 1.1.2.1 matt }
152 1.1.2.1 matt
153 1.1.2.1 matt void
154 1.1.2.1 matt icache_inv(vaddr_t va, vsize_t len)
155 1.1.2.1 matt {
156 1.1.2.1 matt __asm volatile("msync");
157 1.1.2.1 matt cache_op(va, len, curcpu()->ci_ci.icache_line_size, icbi);
158 1.1.2.1 matt __asm volatile("msync");
159 1.1.2.1 matt }
160