cache.h revision 1.13 1 1.13 matt /* $NetBSD: cache.h,v 1.13 2016/07/11 16:15:35 matt Exp $ */
2 1.2 thorpej
3 1.2 thorpej /*
4 1.2 thorpej * Copyright 2001 Wasabi Systems, Inc.
5 1.2 thorpej * All rights reserved.
6 1.2 thorpej *
7 1.2 thorpej * Written by Jason R. Thorpe for Wasabi Systems, Inc.
8 1.2 thorpej *
9 1.2 thorpej * Redistribution and use in source and binary forms, with or without
10 1.2 thorpej * modification, are permitted provided that the following conditions
11 1.2 thorpej * are met:
12 1.2 thorpej * 1. Redistributions of source code must retain the above copyright
13 1.2 thorpej * notice, this list of conditions and the following disclaimer.
14 1.2 thorpej * 2. Redistributions in binary form must reproduce the above copyright
15 1.2 thorpej * notice, this list of conditions and the following disclaimer in the
16 1.2 thorpej * documentation and/or other materials provided with the distribution.
17 1.2 thorpej * 3. All advertising materials mentioning features or use of this software
18 1.2 thorpej * must display the following acknowledgement:
19 1.2 thorpej * This product includes software developed for the NetBSD Project by
20 1.2 thorpej * Wasabi Systems, Inc.
21 1.2 thorpej * 4. The name of Wasabi Systems, Inc. may not be used to endorse
22 1.2 thorpej * or promote products derived from this software without specific prior
23 1.2 thorpej * written permission.
24 1.2 thorpej *
25 1.2 thorpej * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
26 1.2 thorpej * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
27 1.2 thorpej * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 1.2 thorpej * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC
29 1.2 thorpej * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
30 1.2 thorpej * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
31 1.2 thorpej * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
32 1.2 thorpej * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
33 1.2 thorpej * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
34 1.2 thorpej * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35 1.2 thorpej * POSSIBILITY OF SUCH DAMAGE.
36 1.2 thorpej */
37 1.2 thorpej
38 1.12 matt #ifndef _MIPS_CACHE_H_
39 1.12 matt #define _MIPS_CACHE_H_
40 1.12 matt
41 1.2 thorpej /*
42 1.2 thorpej * Cache operations.
43 1.2 thorpej *
44 1.2 thorpej * We define the following primitives:
45 1.2 thorpej *
46 1.2 thorpej * --- Instruction cache synchronization (mandatory):
47 1.2 thorpej *
48 1.2 thorpej * icache_sync_all Synchronize I-cache
49 1.2 thorpej *
50 1.2 thorpej * icache_sync_range Synchronize I-cache range
51 1.2 thorpej *
52 1.2 thorpej * icache_sync_range_index (index ops)
53 1.2 thorpej *
54 1.2 thorpej * --- Primary data cache (mandatory):
55 1.2 thorpej *
56 1.2 thorpej * pdcache_wbinv_all Write-back Invalidate primary D-cache
57 1.2 thorpej *
58 1.2 thorpej * pdcache_wbinv_range Write-back Invalidate primary D-cache range
59 1.2 thorpej *
60 1.2 thorpej * pdcache_wbinv_range_index (index ops)
61 1.2 thorpej *
62 1.2 thorpej * pdcache_inv_range Invalidate primary D-cache range
63 1.2 thorpej *
64 1.2 thorpej * pdcache_wb_range Write-back primary D-cache range
65 1.2 thorpej *
66 1.2 thorpej * --- Secondary data cache (optional):
67 1.2 thorpej *
68 1.2 thorpej * sdcache_wbinv_all Write-back Invalidate secondary D-cache
69 1.2 thorpej *
70 1.2 thorpej * sdcache_wbinv_range Write-back Invalidate secondary D-cache range
71 1.2 thorpej *
72 1.2 thorpej * sdcache_wbinv_range_index (index ops)
73 1.2 thorpej *
74 1.2 thorpej * sdcache_inv_range Invalidate secondary D-cache range
75 1.2 thorpej *
76 1.2 thorpej * sdcache_wb_range Write-back secondary D-cache range
77 1.2 thorpej *
78 1.2 thorpej * There are some rules that must be followed:
79 1.2 thorpej *
80 1.2 thorpej * I-cache Synch (all or range):
81 1.2 thorpej * The goal is to synchronize the instruction stream,
82 1.2 thorpej * so you may need to write-back dirty data cache
83 1.2 thorpej * blocks first. If a range is requested, and you
84 1.2 thorpej * can't synchronize just a range, you have to hit
85 1.2 thorpej * the whole thing.
86 1.2 thorpej *
87 1.2 thorpej * D-cache Write-back Invalidate range:
88 1.2 thorpej * If you can't WB-Inv a range, you must WB-Inv the
89 1.2 thorpej * entire D-cache.
90 1.2 thorpej *
91 1.2 thorpej * D-cache Invalidate:
92 1.2 thorpej * If you can't Inv the D-cache without doing a
93 1.2 thorpej * Write-back, YOU MUST PANIC. This is to catch
94 1.2 thorpej * errors in calling code. Callers must be aware
95 1.2 thorpej * of this scenario, and must handle it appropriately
96 1.2 thorpej * (consider the bus_dma(9) operations).
97 1.2 thorpej *
98 1.2 thorpej * D-cache Write-back:
99 1.2 thorpej * If you can't Write-back without doing an invalidate,
100 1.2 thorpej * that's fine. Then treat this as a WB-Inv. Skipping
101 1.2 thorpej * the invalidate is merely an optimization.
102 1.2 thorpej *
103 1.2 thorpej * All operations:
104 1.2 thorpej * Valid virtual addresses must be passed to the
105 1.2 thorpej * cache operation.
106 1.2 thorpej *
107 1.2 thorpej * Finally, these primitives are grouped together in reasonable
108 1.2 thorpej * ways. For all operations described here, first the primary
109 1.2 thorpej * cache is frobbed, then the secondary cache frobbed, if the
110 1.2 thorpej * operation for the secondary cache exists.
111 1.2 thorpej *
112 1.2 thorpej * mips_icache_sync_all Synchronize I-cache
113 1.2 thorpej *
114 1.2 thorpej * mips_icache_sync_range Synchronize I-cache range
115 1.2 thorpej *
116 1.2 thorpej * mips_icache_sync_range_index (index ops)
117 1.2 thorpej *
118 1.2 thorpej * mips_dcache_wbinv_all Write-back Invalidate D-cache
119 1.2 thorpej *
120 1.2 thorpej * mips_dcache_wbinv_range Write-back Invalidate D-cache range
121 1.2 thorpej *
122 1.2 thorpej * mips_dcache_wbinv_range_index (index ops)
123 1.2 thorpej *
124 1.2 thorpej * mips_dcache_inv_range Invalidate D-cache range
125 1.2 thorpej *
126 1.2 thorpej * mips_dcache_wb_range Write-back D-cache range
127 1.2 thorpej */
128 1.2 thorpej
129 1.2 thorpej struct mips_cache_ops {
130 1.2 thorpej void (*mco_icache_sync_all)(void);
131 1.13 matt void (*mco_icache_sync_range)(register_t, vsize_t);
132 1.2 thorpej void (*mco_icache_sync_range_index)(vaddr_t, vsize_t);
133 1.2 thorpej
134 1.2 thorpej void (*mco_pdcache_wbinv_all)(void);
135 1.13 matt void (*mco_pdcache_wbinv_range)(register_t, vsize_t);
136 1.2 thorpej void (*mco_pdcache_wbinv_range_index)(vaddr_t, vsize_t);
137 1.13 matt void (*mco_pdcache_inv_range)(register_t, vsize_t);
138 1.13 matt void (*mco_pdcache_wb_range)(register_t, vsize_t);
139 1.2 thorpej
140 1.5 simonb /* These are called only by the (mipsNN) icache functions. */
141 1.13 matt void (*mco_intern_icache_sync_range_index)(vaddr_t, vsize_t);
142 1.13 matt void (*mco_intern_icache_sync_range)(register_t, vsize_t);
143 1.13 matt void (*mco_intern_pdcache_sync_all)(void);
144 1.13 matt void (*mco_intern_pdcache_sync_range_index)(vaddr_t, vsize_t);
145 1.13 matt void (*mco_intern_pdcache_sync_range)(register_t, vsize_t);
146 1.13 matt /* This is used internally by the (mipsNN) pdcache functions. */
147 1.5 simonb void (*mco_intern_pdcache_wbinv_range_index)(vaddr_t, vsize_t);
148 1.5 simonb
149 1.2 thorpej void (*mco_sdcache_wbinv_all)(void);
150 1.13 matt void (*mco_sdcache_wbinv_range)(register_t, vsize_t);
151 1.2 thorpej void (*mco_sdcache_wbinv_range_index)(vaddr_t, vsize_t);
152 1.13 matt void (*mco_sdcache_inv_range)(register_t, vsize_t);
153 1.13 matt void (*mco_sdcache_wb_range)(register_t, vsize_t);
154 1.5 simonb
155 1.5 simonb /* These are called only by the (mipsNN) icache functions. */
156 1.13 matt void (*mco_intern_sdcache_sync_all)(void);
157 1.13 matt void (*mco_intern_sdcache_sync_range_index)(vaddr_t, vsize_t);
158 1.13 matt void (*mco_intern_sdcache_sync_range)(register_t, vsize_t);
159 1.13 matt
160 1.13 matt /* This is used internally by the (mipsNN) sdcache functions. */
161 1.5 simonb void (*mco_intern_sdcache_wbinv_range_index)(vaddr_t, vsize_t);
162 1.2 thorpej };
163 1.2 thorpej
164 1.2 thorpej extern struct mips_cache_ops mips_cache_ops;
165 1.2 thorpej
166 1.2 thorpej /* PRIMARY CACHE VARIABLES */
167 1.10 matt struct mips_cache_info {
168 1.10 matt u_int mci_picache_size;
169 1.10 matt u_int mci_picache_line_size;
170 1.10 matt u_int mci_picache_ways;
171 1.10 matt u_int mci_picache_way_size;
172 1.10 matt u_int mci_picache_way_mask;
173 1.13 matt bool mci_picache_vivt; /* virtually indexed and tagged */
174 1.10 matt
175 1.10 matt u_int mci_pdcache_size; /* and unified */
176 1.10 matt u_int mci_pdcache_line_size;
177 1.10 matt u_int mci_pdcache_ways;
178 1.10 matt u_int mci_pdcache_way_size;
179 1.10 matt u_int mci_pdcache_way_mask;
180 1.10 matt bool mci_pdcache_write_through;
181 1.10 matt
182 1.10 matt bool mci_pcache_unified;
183 1.10 matt
184 1.10 matt /* SECONDARY CACHE VARIABLES */
185 1.10 matt u_int mci_sicache_size;
186 1.10 matt u_int mci_sicache_line_size;
187 1.10 matt u_int mci_sicache_ways;
188 1.10 matt u_int mci_sicache_way_size;
189 1.10 matt u_int mci_sicache_way_mask;
190 1.10 matt
191 1.10 matt u_int mci_sdcache_size; /* and unified */
192 1.10 matt u_int mci_sdcache_line_size;
193 1.10 matt u_int mci_sdcache_ways;
194 1.10 matt u_int mci_sdcache_way_size;
195 1.10 matt u_int mci_sdcache_way_mask;
196 1.10 matt bool mci_sdcache_write_through;
197 1.10 matt
198 1.10 matt bool mci_scache_unified;
199 1.10 matt
200 1.10 matt /* TERTIARY CACHE VARIABLES */
201 1.10 matt u_int mci_tcache_size; /* always unified */
202 1.10 matt u_int mci_tcache_line_size;
203 1.10 matt u_int mci_tcache_ways;
204 1.10 matt u_int mci_tcache_way_size;
205 1.10 matt u_int mci_tcache_way_mask;
206 1.10 matt bool mci_tcache_write_through;
207 1.10 matt
208 1.10 matt /*
209 1.10 matt * These two variables inform the rest of the kernel about the
210 1.10 matt * size of the largest D-cache line present in the system. The
211 1.10 matt * mask can be used to determine if a region of memory is cache
212 1.10 matt * line size aligned.
213 1.10 matt *
214 1.10 matt * Whenever any code updates a data cache line size, it should
215 1.10 matt * call mips_dcache_compute_align() to recompute these values.
216 1.10 matt */
217 1.10 matt u_int mci_dcache_align;
218 1.10 matt u_int mci_dcache_align_mask;
219 1.10 matt
220 1.10 matt u_int mci_cache_prefer_mask;
221 1.10 matt u_int mci_cache_alias_mask;
222 1.13 matt u_int mci_icache_alias_mask;
223 1.10 matt
224 1.10 matt bool mci_cache_virtual_alias;
225 1.13 matt bool mci_icache_virtual_alias;
226 1.13 matt };
227 1.10 matt
228 1.13 matt #if (MIPS1 + MIPS64_RMIXL + MIPS64R2_RMIXL + MIPS64_OCTEON) > 0 \
229 1.13 matt && !defined(MODULE)
230 1.13 matt #define MIPS_CACHE_ALIAS_MASK 0
231 1.13 matt #define MIPS_CACHE_VIRTUAL_ALIAS false
232 1.13 matt #else
233 1.10 matt #define MIPS_CACHE_ALIAS_MASK mips_cache_info.mci_cache_alias_mask
234 1.10 matt #define MIPS_CACHE_VIRTUAL_ALIAS mips_cache_info.mci_cache_virtual_alias
235 1.13 matt #endif
236 1.13 matt #if (MIPS1 + MIPS64_RMIXL + MIPS64_OCTEON) > 0 && !defined(_MODULE)
237 1.13 matt #define MIPS_ICACHE_ALIAS_MASK 0
238 1.13 matt #define MIPS_ICACHE_VIRTUAL_ALIAS false
239 1.10 matt #else
240 1.13 matt #define MIPS_ICACHE_ALIAS_MASK mips_cache_info.mci_icache_alias_mask
241 1.13 matt #define MIPS_ICACHE_VIRTUAL_ALIAS mips_cache_info.mci_icache_virtual_alias
242 1.10 matt #endif
243 1.3 thorpej
244 1.10 matt extern struct mips_cache_info mips_cache_info;
245 1.2 thorpej
246 1.8 tsutsui
247 1.2 thorpej /*
248 1.2 thorpej * XXX XXX XXX THIS SHOULD NOT EXIST XXX XXX XXX
249 1.2 thorpej */
250 1.10 matt #define mips_cache_indexof(x) (((vaddr_t)(x)) & MIPS_CACHE_ALIAS_MASK)
251 1.10 matt #define mips_cache_badalias(x,y) (((vaddr_t)(x)^(vaddr_t)(y)) & MIPS_CACHE_ALIAS_MASK)
252 1.2 thorpej
253 1.5 simonb #define __mco_noargs(prefix, x) \
254 1.2 thorpej do { \
255 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## p ## x )(); \
256 1.5 simonb if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \
257 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## s ## x )(); \
258 1.2 thorpej } while (/*CONSTCOND*/0)
259 1.2 thorpej
260 1.5 simonb #define __mco_2args(prefix, x, a, b) \
261 1.2 thorpej do { \
262 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## p ## x )((a), (b)); \
263 1.5 simonb if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \
264 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## s ## x )((a), (b)); \
265 1.2 thorpej } while (/*CONSTCOND*/0)
266 1.2 thorpej
267 1.2 thorpej #define mips_icache_sync_all() \
268 1.2 thorpej (*mips_cache_ops.mco_icache_sync_all)()
269 1.2 thorpej
270 1.2 thorpej #define mips_icache_sync_range(v, s) \
271 1.2 thorpej (*mips_cache_ops.mco_icache_sync_range)((v), (s))
272 1.2 thorpej
273 1.2 thorpej #define mips_icache_sync_range_index(v, s) \
274 1.2 thorpej (*mips_cache_ops.mco_icache_sync_range_index)((v), (s))
275 1.2 thorpej
276 1.2 thorpej #define mips_dcache_wbinv_all() \
277 1.5 simonb __mco_noargs(, dcache_wbinv_all)
278 1.2 thorpej
279 1.2 thorpej #define mips_dcache_wbinv_range(v, s) \
280 1.5 simonb __mco_2args(, dcache_wbinv_range, (v), (s))
281 1.2 thorpej
282 1.2 thorpej #define mips_dcache_wbinv_range_index(v, s) \
283 1.5 simonb __mco_2args(, dcache_wbinv_range_index, (v), (s))
284 1.2 thorpej
285 1.2 thorpej #define mips_dcache_inv_range(v, s) \
286 1.5 simonb __mco_2args(, dcache_inv_range, (v), (s))
287 1.2 thorpej
288 1.2 thorpej #define mips_dcache_wb_range(v, s) \
289 1.5 simonb __mco_2args(, dcache_wb_range, (v), (s))
290 1.5 simonb
291 1.5 simonb
292 1.5 simonb /*
293 1.5 simonb * Private D-cache functions only called from (currently only the
294 1.5 simonb * mipsNN) I-cache functions.
295 1.5 simonb */
296 1.13 matt #define mips_intern_dcache_sync_all() \
297 1.13 matt __mco_noargs(intern_, dcache_sync_all)
298 1.13 matt
299 1.13 matt #define mips_intern_dcache_sync_range_index(v, s) \
300 1.13 matt __mco_2args(intern_, dcache_sync_range_index, (v), (s))
301 1.13 matt
302 1.13 matt #define mips_intern_dcache_sync_range(v, s) \
303 1.13 matt __mco_2args(intern_, dcache_sync_range, (v), (s))
304 1.13 matt
305 1.13 matt #define mips_intern_pdcache_wbinv_range_index(v, s) \
306 1.13 matt (*mips_cache_ops.mco_intern_pdcache_wbinv_range_index)((v), (s))
307 1.13 matt
308 1.13 matt #define mips_intern_sdcache_wbinv_range_index(v, s) \
309 1.13 matt (*mips_cache_ops.mco_intern_sdcache_wbinv_range_index)((v), (s))
310 1.5 simonb
311 1.13 matt #define mips_intern_icache_sync_range(v, s) \
312 1.13 matt (*mips_cache_ops.mco_intern_icache_sync_range)((v), (s))
313 1.5 simonb
314 1.13 matt #define mips_intern_icache_sync_range_index(v, s) \
315 1.13 matt (*mips_cache_ops.mco_intern_icache_sync_range_index)((v), (s))
316 1.2 thorpej
317 1.2 thorpej void mips_config_cache(void);
318 1.3 thorpej void mips_dcache_compute_align(void);
319 1.5 simonb
320 1.5 simonb #include <mips/cache_mipsNN.h>
321 1.12 matt
322 1.12 matt #endif /* _MIPS_CACHE_H_ */
323