1 1.17 riastrad /* $NetBSD: cache.h,v 1.17 2025/05/03 02:00:46 riastradh Exp $ */ 2 1.2 thorpej 3 1.2 thorpej /* 4 1.2 thorpej * Copyright 2001 Wasabi Systems, Inc. 5 1.2 thorpej * All rights reserved. 6 1.2 thorpej * 7 1.2 thorpej * Written by Jason R. Thorpe for Wasabi Systems, Inc. 8 1.2 thorpej * 9 1.2 thorpej * Redistribution and use in source and binary forms, with or without 10 1.2 thorpej * modification, are permitted provided that the following conditions 11 1.2 thorpej * are met: 12 1.2 thorpej * 1. Redistributions of source code must retain the above copyright 13 1.2 thorpej * notice, this list of conditions and the following disclaimer. 14 1.2 thorpej * 2. Redistributions in binary form must reproduce the above copyright 15 1.2 thorpej * notice, this list of conditions and the following disclaimer in the 16 1.2 thorpej * documentation and/or other materials provided with the distribution. 17 1.2 thorpej * 3. All advertising materials mentioning features or use of this software 18 1.2 thorpej * must display the following acknowledgement: 19 1.2 thorpej * This product includes software developed for the NetBSD Project by 20 1.2 thorpej * Wasabi Systems, Inc. 21 1.2 thorpej * 4. The name of Wasabi Systems, Inc. may not be used to endorse 22 1.2 thorpej * or promote products derived from this software without specific prior 23 1.2 thorpej * written permission. 24 1.2 thorpej * 25 1.2 thorpej * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND 26 1.2 thorpej * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 27 1.2 thorpej * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 28 1.2 thorpej * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC 29 1.2 thorpej * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 30 1.2 thorpej * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 31 1.2 thorpej * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 32 1.2 thorpej * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 33 1.2 thorpej * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 34 1.2 thorpej * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 35 1.2 thorpej * POSSIBILITY OF SUCH DAMAGE. 36 1.2 thorpej */ 37 1.2 thorpej 38 1.12 matt #ifndef _MIPS_CACHE_H_ 39 1.15 simonb #define _MIPS_CACHE_H_ 40 1.12 matt 41 1.17 riastrad #ifdef _KERNEL_OPT 42 1.17 riastrad #include "opt_cputype.h" 43 1.17 riastrad #endif 44 1.17 riastrad 45 1.17 riastrad #include <sys/types.h> 46 1.17 riastrad 47 1.2 thorpej /* 48 1.2 thorpej * Cache operations. 49 1.2 thorpej * 50 1.2 thorpej * We define the following primitives: 51 1.2 thorpej * 52 1.2 thorpej * --- Instruction cache synchronization (mandatory): 53 1.2 thorpej * 54 1.2 thorpej * icache_sync_all Synchronize I-cache 55 1.2 thorpej * 56 1.2 thorpej * icache_sync_range Synchronize I-cache range 57 1.2 thorpej * 58 1.2 thorpej * icache_sync_range_index (index ops) 59 1.2 thorpej * 60 1.2 thorpej * --- Primary data cache (mandatory): 61 1.2 thorpej * 62 1.2 thorpej * pdcache_wbinv_all Write-back Invalidate primary D-cache 63 1.2 thorpej * 64 1.2 thorpej * pdcache_wbinv_range Write-back Invalidate primary D-cache range 65 1.2 thorpej * 66 1.2 thorpej * pdcache_wbinv_range_index (index ops) 67 1.2 thorpej * 68 1.2 thorpej * pdcache_inv_range Invalidate primary D-cache range 69 1.2 thorpej * 70 1.2 thorpej * pdcache_wb_range Write-back primary D-cache range 71 1.2 thorpej * 72 1.2 thorpej * --- Secondary data cache (optional): 73 1.2 thorpej * 74 1.2 thorpej * sdcache_wbinv_all Write-back Invalidate secondary D-cache 75 1.2 thorpej * 76 1.2 thorpej * sdcache_wbinv_range Write-back Invalidate secondary D-cache range 77 1.2 thorpej * 78 1.2 thorpej * sdcache_wbinv_range_index (index ops) 79 1.2 thorpej * 80 1.2 thorpej * sdcache_inv_range Invalidate secondary D-cache range 81 1.2 thorpej * 82 1.2 thorpej * sdcache_wb_range Write-back secondary D-cache range 83 1.2 thorpej * 84 1.2 thorpej * There are some rules that must be followed: 85 1.2 thorpej * 86 1.2 thorpej * I-cache Synch (all or range): 87 1.2 thorpej * The goal is to synchronize the instruction stream, 88 1.2 thorpej * so you may need to write-back dirty data cache 89 1.2 thorpej * blocks first. If a range is requested, and you 90 1.2 thorpej * can't synchronize just a range, you have to hit 91 1.2 thorpej * the whole thing. 92 1.2 thorpej * 93 1.2 thorpej * D-cache Write-back Invalidate range: 94 1.2 thorpej * If you can't WB-Inv a range, you must WB-Inv the 95 1.2 thorpej * entire D-cache. 96 1.2 thorpej * 97 1.2 thorpej * D-cache Invalidate: 98 1.2 thorpej * If you can't Inv the D-cache without doing a 99 1.2 thorpej * Write-back, YOU MUST PANIC. This is to catch 100 1.2 thorpej * errors in calling code. Callers must be aware 101 1.2 thorpej * of this scenario, and must handle it appropriately 102 1.2 thorpej * (consider the bus_dma(9) operations). 103 1.2 thorpej * 104 1.2 thorpej * D-cache Write-back: 105 1.2 thorpej * If you can't Write-back without doing an invalidate, 106 1.2 thorpej * that's fine. Then treat this as a WB-Inv. Skipping 107 1.2 thorpej * the invalidate is merely an optimization. 108 1.2 thorpej * 109 1.2 thorpej * All operations: 110 1.2 thorpej * Valid virtual addresses must be passed to the 111 1.2 thorpej * cache operation. 112 1.2 thorpej * 113 1.2 thorpej * Finally, these primitives are grouped together in reasonable 114 1.2 thorpej * ways. For all operations described here, first the primary 115 1.2 thorpej * cache is frobbed, then the secondary cache frobbed, if the 116 1.2 thorpej * operation for the secondary cache exists. 117 1.2 thorpej * 118 1.2 thorpej * mips_icache_sync_all Synchronize I-cache 119 1.2 thorpej * 120 1.2 thorpej * mips_icache_sync_range Synchronize I-cache range 121 1.2 thorpej * 122 1.2 thorpej * mips_icache_sync_range_index (index ops) 123 1.2 thorpej * 124 1.2 thorpej * mips_dcache_wbinv_all Write-back Invalidate D-cache 125 1.2 thorpej * 126 1.2 thorpej * mips_dcache_wbinv_range Write-back Invalidate D-cache range 127 1.2 thorpej * 128 1.2 thorpej * mips_dcache_wbinv_range_index (index ops) 129 1.2 thorpej * 130 1.2 thorpej * mips_dcache_inv_range Invalidate D-cache range 131 1.2 thorpej * 132 1.2 thorpej * mips_dcache_wb_range Write-back D-cache range 133 1.2 thorpej */ 134 1.2 thorpej 135 1.2 thorpej struct mips_cache_ops { 136 1.2 thorpej void (*mco_icache_sync_all)(void); 137 1.13 matt void (*mco_icache_sync_range)(register_t, vsize_t); 138 1.2 thorpej void (*mco_icache_sync_range_index)(vaddr_t, vsize_t); 139 1.2 thorpej 140 1.2 thorpej void (*mco_pdcache_wbinv_all)(void); 141 1.13 matt void (*mco_pdcache_wbinv_range)(register_t, vsize_t); 142 1.2 thorpej void (*mco_pdcache_wbinv_range_index)(vaddr_t, vsize_t); 143 1.13 matt void (*mco_pdcache_inv_range)(register_t, vsize_t); 144 1.13 matt void (*mco_pdcache_wb_range)(register_t, vsize_t); 145 1.2 thorpej 146 1.5 simonb /* These are called only by the (mipsNN) icache functions. */ 147 1.13 matt void (*mco_intern_icache_sync_range_index)(vaddr_t, vsize_t); 148 1.13 matt void (*mco_intern_icache_sync_range)(register_t, vsize_t); 149 1.13 matt void (*mco_intern_pdcache_sync_all)(void); 150 1.13 matt void (*mco_intern_pdcache_sync_range_index)(vaddr_t, vsize_t); 151 1.13 matt void (*mco_intern_pdcache_sync_range)(register_t, vsize_t); 152 1.13 matt /* This is used internally by the (mipsNN) pdcache functions. */ 153 1.5 simonb void (*mco_intern_pdcache_wbinv_range_index)(vaddr_t, vsize_t); 154 1.5 simonb 155 1.2 thorpej void (*mco_sdcache_wbinv_all)(void); 156 1.13 matt void (*mco_sdcache_wbinv_range)(register_t, vsize_t); 157 1.2 thorpej void (*mco_sdcache_wbinv_range_index)(vaddr_t, vsize_t); 158 1.13 matt void (*mco_sdcache_inv_range)(register_t, vsize_t); 159 1.13 matt void (*mco_sdcache_wb_range)(register_t, vsize_t); 160 1.5 simonb 161 1.5 simonb /* These are called only by the (mipsNN) icache functions. */ 162 1.13 matt void (*mco_intern_sdcache_sync_all)(void); 163 1.13 matt void (*mco_intern_sdcache_sync_range_index)(vaddr_t, vsize_t); 164 1.13 matt void (*mco_intern_sdcache_sync_range)(register_t, vsize_t); 165 1.13 matt 166 1.13 matt /* This is used internally by the (mipsNN) sdcache functions. */ 167 1.5 simonb void (*mco_intern_sdcache_wbinv_range_index)(vaddr_t, vsize_t); 168 1.2 thorpej }; 169 1.2 thorpej 170 1.2 thorpej extern struct mips_cache_ops mips_cache_ops; 171 1.2 thorpej 172 1.2 thorpej /* PRIMARY CACHE VARIABLES */ 173 1.10 matt struct mips_cache_info { 174 1.10 matt u_int mci_picache_size; 175 1.10 matt u_int mci_picache_line_size; 176 1.10 matt u_int mci_picache_ways; 177 1.10 matt u_int mci_picache_way_size; 178 1.10 matt u_int mci_picache_way_mask; 179 1.13 matt bool mci_picache_vivt; /* virtually indexed and tagged */ 180 1.10 matt 181 1.10 matt u_int mci_pdcache_size; /* and unified */ 182 1.10 matt u_int mci_pdcache_line_size; 183 1.10 matt u_int mci_pdcache_ways; 184 1.10 matt u_int mci_pdcache_way_size; 185 1.10 matt u_int mci_pdcache_way_mask; 186 1.10 matt bool mci_pdcache_write_through; 187 1.10 matt 188 1.10 matt bool mci_pcache_unified; 189 1.10 matt 190 1.10 matt /* SECONDARY CACHE VARIABLES */ 191 1.10 matt u_int mci_sicache_size; 192 1.10 matt u_int mci_sicache_line_size; 193 1.10 matt u_int mci_sicache_ways; 194 1.10 matt u_int mci_sicache_way_size; 195 1.10 matt u_int mci_sicache_way_mask; 196 1.10 matt 197 1.10 matt u_int mci_sdcache_size; /* and unified */ 198 1.10 matt u_int mci_sdcache_line_size; 199 1.10 matt u_int mci_sdcache_ways; 200 1.10 matt u_int mci_sdcache_way_size; 201 1.10 matt u_int mci_sdcache_way_mask; 202 1.10 matt bool mci_sdcache_write_through; 203 1.10 matt 204 1.10 matt bool mci_scache_unified; 205 1.10 matt 206 1.10 matt /* TERTIARY CACHE VARIABLES */ 207 1.10 matt u_int mci_tcache_size; /* always unified */ 208 1.10 matt u_int mci_tcache_line_size; 209 1.10 matt u_int mci_tcache_ways; 210 1.10 matt u_int mci_tcache_way_size; 211 1.10 matt u_int mci_tcache_way_mask; 212 1.10 matt bool mci_tcache_write_through; 213 1.10 matt 214 1.10 matt /* 215 1.10 matt * These two variables inform the rest of the kernel about the 216 1.10 matt * size of the largest D-cache line present in the system. The 217 1.10 matt * mask can be used to determine if a region of memory is cache 218 1.10 matt * line size aligned. 219 1.10 matt * 220 1.10 matt * Whenever any code updates a data cache line size, it should 221 1.10 matt * call mips_dcache_compute_align() to recompute these values. 222 1.10 matt */ 223 1.10 matt u_int mci_dcache_align; 224 1.10 matt u_int mci_dcache_align_mask; 225 1.10 matt 226 1.10 matt u_int mci_cache_prefer_mask; 227 1.10 matt u_int mci_cache_alias_mask; 228 1.13 matt u_int mci_icache_alias_mask; 229 1.10 matt 230 1.10 matt bool mci_cache_virtual_alias; 231 1.13 matt bool mci_icache_virtual_alias; 232 1.13 matt }; 233 1.10 matt 234 1.14 skrll 235 1.14 skrll #if (MIPS1 + MIPS64_RMIXL + MIPS64R2_RMIXL + MIPS64_OCTEON) > 0 && \ 236 1.14 skrll (MIPS3 + MIPS4) == 0 \ 237 1.16 skrll && !defined(_MODULE) 238 1.13 matt #define MIPS_CACHE_ALIAS_MASK 0 239 1.13 matt #define MIPS_CACHE_VIRTUAL_ALIAS false 240 1.13 matt #else 241 1.10 matt #define MIPS_CACHE_ALIAS_MASK mips_cache_info.mci_cache_alias_mask 242 1.10 matt #define MIPS_CACHE_VIRTUAL_ALIAS mips_cache_info.mci_cache_virtual_alias 243 1.13 matt #endif 244 1.14 skrll #if (MIPS1 + MIPS64_RMIXL + MIPS64_OCTEON) > 0 && \ 245 1.14 skrll (MIPS3 + MIPS4) == 0 \ 246 1.14 skrll && !defined(_MODULE) 247 1.13 matt #define MIPS_ICACHE_ALIAS_MASK 0 248 1.13 matt #define MIPS_ICACHE_VIRTUAL_ALIAS false 249 1.10 matt #else 250 1.13 matt #define MIPS_ICACHE_ALIAS_MASK mips_cache_info.mci_icache_alias_mask 251 1.13 matt #define MIPS_ICACHE_VIRTUAL_ALIAS mips_cache_info.mci_icache_virtual_alias 252 1.10 matt #endif 253 1.3 thorpej 254 1.10 matt extern struct mips_cache_info mips_cache_info; 255 1.2 thorpej 256 1.8 tsutsui 257 1.2 thorpej /* 258 1.2 thorpej * XXX XXX XXX THIS SHOULD NOT EXIST XXX XXX XXX 259 1.2 thorpej */ 260 1.10 matt #define mips_cache_indexof(x) (((vaddr_t)(x)) & MIPS_CACHE_ALIAS_MASK) 261 1.10 matt #define mips_cache_badalias(x,y) (((vaddr_t)(x)^(vaddr_t)(y)) & MIPS_CACHE_ALIAS_MASK) 262 1.2 thorpej 263 1.5 simonb #define __mco_noargs(prefix, x) \ 264 1.2 thorpej do { \ 265 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## p ## x )(); \ 266 1.5 simonb if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \ 267 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## s ## x )(); \ 268 1.2 thorpej } while (/*CONSTCOND*/0) 269 1.2 thorpej 270 1.5 simonb #define __mco_2args(prefix, x, a, b) \ 271 1.2 thorpej do { \ 272 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## p ## x )((a), (b)); \ 273 1.5 simonb if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \ 274 1.5 simonb (*mips_cache_ops.mco_ ## prefix ## s ## x )((a), (b)); \ 275 1.2 thorpej } while (/*CONSTCOND*/0) 276 1.2 thorpej 277 1.2 thorpej #define mips_icache_sync_all() \ 278 1.2 thorpej (*mips_cache_ops.mco_icache_sync_all)() 279 1.2 thorpej 280 1.2 thorpej #define mips_icache_sync_range(v, s) \ 281 1.2 thorpej (*mips_cache_ops.mco_icache_sync_range)((v), (s)) 282 1.2 thorpej 283 1.2 thorpej #define mips_icache_sync_range_index(v, s) \ 284 1.2 thorpej (*mips_cache_ops.mco_icache_sync_range_index)((v), (s)) 285 1.2 thorpej 286 1.2 thorpej #define mips_dcache_wbinv_all() \ 287 1.5 simonb __mco_noargs(, dcache_wbinv_all) 288 1.2 thorpej 289 1.2 thorpej #define mips_dcache_wbinv_range(v, s) \ 290 1.5 simonb __mco_2args(, dcache_wbinv_range, (v), (s)) 291 1.2 thorpej 292 1.2 thorpej #define mips_dcache_wbinv_range_index(v, s) \ 293 1.5 simonb __mco_2args(, dcache_wbinv_range_index, (v), (s)) 294 1.2 thorpej 295 1.2 thorpej #define mips_dcache_inv_range(v, s) \ 296 1.5 simonb __mco_2args(, dcache_inv_range, (v), (s)) 297 1.2 thorpej 298 1.2 thorpej #define mips_dcache_wb_range(v, s) \ 299 1.5 simonb __mco_2args(, dcache_wb_range, (v), (s)) 300 1.5 simonb 301 1.5 simonb 302 1.5 simonb /* 303 1.5 simonb * Private D-cache functions only called from (currently only the 304 1.5 simonb * mipsNN) I-cache functions. 305 1.5 simonb */ 306 1.13 matt #define mips_intern_dcache_sync_all() \ 307 1.13 matt __mco_noargs(intern_, dcache_sync_all) 308 1.13 matt 309 1.13 matt #define mips_intern_dcache_sync_range_index(v, s) \ 310 1.13 matt __mco_2args(intern_, dcache_sync_range_index, (v), (s)) 311 1.13 matt 312 1.13 matt #define mips_intern_dcache_sync_range(v, s) \ 313 1.13 matt __mco_2args(intern_, dcache_sync_range, (v), (s)) 314 1.13 matt 315 1.13 matt #define mips_intern_pdcache_wbinv_range_index(v, s) \ 316 1.13 matt (*mips_cache_ops.mco_intern_pdcache_wbinv_range_index)((v), (s)) 317 1.13 matt 318 1.13 matt #define mips_intern_sdcache_wbinv_range_index(v, s) \ 319 1.13 matt (*mips_cache_ops.mco_intern_sdcache_wbinv_range_index)((v), (s)) 320 1.5 simonb 321 1.13 matt #define mips_intern_icache_sync_range(v, s) \ 322 1.13 matt (*mips_cache_ops.mco_intern_icache_sync_range)((v), (s)) 323 1.5 simonb 324 1.13 matt #define mips_intern_icache_sync_range_index(v, s) \ 325 1.13 matt (*mips_cache_ops.mco_intern_icache_sync_range_index)((v), (s)) 326 1.2 thorpej 327 1.2 thorpej void mips_config_cache(void); 328 1.3 thorpej void mips_dcache_compute_align(void); 329 1.5 simonb 330 1.5 simonb #include <mips/cache_mipsNN.h> 331 1.12 matt 332 1.12 matt #endif /* _MIPS_CACHE_H_ */ 333