1 /* $NetBSD: armv7_generic_space.c,v 1.13 2020/10/30 18:54:36 skrll Exp $ */ 2 3 /*- 4 * Copyright (c) 2012 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Nick Hudson 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 * POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32 33 #include <sys/cdefs.h> 34 __KERNEL_RCSID(0, "$NetBSD: armv7_generic_space.c,v 1.13 2020/10/30 18:54:36 skrll Exp $"); 35 36 #include <sys/param.h> 37 38 #include <sys/bus.h> 39 #include <sys/systm.h> 40 41 #include <uvm/uvm_extern.h> 42 43 /* Prototypes for all the bus_space structure functions */ 44 bs_protos(armv7_generic); 45 bs_protos(armv7_generic_a4x); 46 bs_protos(a4x); 47 bs_protos(bs_notimpl); 48 bs_protos(generic); 49 bs_protos(generic_armv4); 50 51 #if __ARMEB__ 52 #define NSWAP(n) n ## _swap 53 #else 54 #define NSWAP(n) n 55 #endif 56 57 __strong_alias(arm_generic_bs_tag,armv7_generic_bs_tag); 58 __strong_alias(arm_generic_a4x_bs_tag,armv7_generic_a4x_bs_tag); 59 60 struct bus_space armv7_generic_bs_tag = { 61 /* cookie */ 62 .bs_cookie = (void *) 0, 63 64 /* mapping/unmapping */ 65 .bs_map = armv7_generic_bs_map, 66 .bs_unmap = armv7_generic_bs_unmap, 67 .bs_subregion = armv7_generic_bs_subregion, 68 69 /* allocation/deallocation */ 70 .bs_alloc = armv7_generic_bs_alloc, /* not implemented */ 71 .bs_free = armv7_generic_bs_free, /* not implemented */ 72 73 /* get kernel virtual address */ 74 .bs_vaddr = armv7_generic_bs_vaddr, 75 76 /* mmap */ 77 .bs_mmap = armv7_generic_bs_mmap, 78 79 /* barrier */ 80 .bs_barrier = armv7_generic_bs_barrier, 81 82 /* read (single) */ 83 .bs_r_1 = generic_bs_r_1, 84 .bs_r_2 = NSWAP(generic_armv4_bs_r_2), 85 .bs_r_4 = NSWAP(generic_bs_r_4), 86 .bs_r_8 = bs_notimpl_bs_r_8, 87 88 /* read multiple */ 89 .bs_rm_1 = generic_bs_rm_1, 90 .bs_rm_2 = NSWAP(generic_armv4_bs_rm_2), 91 .bs_rm_4 = NSWAP(generic_bs_rm_4), 92 .bs_rm_8 = bs_notimpl_bs_rm_8, 93 94 /* read region */ 95 .bs_rr_1 = generic_bs_rr_1, 96 .bs_rr_2 = NSWAP(generic_armv4_bs_rr_2), 97 .bs_rr_4 = NSWAP(generic_bs_rr_4), 98 .bs_rr_8 = bs_notimpl_bs_rr_8, 99 100 /* write (single) */ 101 .bs_w_1 = generic_bs_w_1, 102 .bs_w_2 = NSWAP(generic_armv4_bs_w_2), 103 .bs_w_4 = NSWAP(generic_bs_w_4), 104 .bs_w_8 = bs_notimpl_bs_w_8, 105 106 /* write multiple */ 107 .bs_wm_1 = generic_bs_wm_1, 108 .bs_wm_2 = NSWAP(generic_armv4_bs_wm_2), 109 .bs_wm_4 = NSWAP(generic_bs_wm_4), 110 .bs_wm_8 = bs_notimpl_bs_wm_8, 111 112 /* write region */ 113 .bs_wr_1 = generic_bs_wr_1, 114 .bs_wr_2 = NSWAP(generic_armv4_bs_wr_2), 115 .bs_wr_4 = NSWAP(generic_bs_wr_4), 116 .bs_wr_8 = bs_notimpl_bs_wr_8, 117 118 /* set multiple */ 119 .bs_sm_1 = bs_notimpl_bs_sm_1, 120 .bs_sm_2 = bs_notimpl_bs_sm_2, 121 .bs_sm_4 = bs_notimpl_bs_sm_4, 122 .bs_sm_8 = bs_notimpl_bs_sm_8, 123 124 /* set region */ 125 .bs_sr_1 = generic_bs_sr_1, 126 .bs_sr_2 = NSWAP(generic_armv4_bs_sr_2), 127 .bs_sr_4 = NSWAP(generic_bs_sr_4), 128 .bs_sr_8 = bs_notimpl_bs_sr_8, 129 130 /* copy */ 131 .bs_c_1 = bs_notimpl_bs_c_1, 132 .bs_c_2 = generic_armv4_bs_c_2, 133 .bs_c_4 = bs_notimpl_bs_c_4, 134 .bs_c_8 = bs_notimpl_bs_c_8, 135 136 #ifdef __BUS_SPACE_HAS_STREAM_METHODS 137 /* read (single) */ 138 .bs_r_1_s = generic_bs_r_1, 139 .bs_r_2_s = NSWAP(generic_armv4_bs_r_2), 140 .bs_r_4_s = NSWAP(generic_bs_r_4), 141 .bs_r_8_s = bs_notimpl_bs_r_8, 142 143 /* read multiple */ 144 .bs_rm_1_s = generic_bs_rm_1, 145 .bs_rm_2_s = NSWAP(generic_armv4_bs_rm_2), 146 .bs_rm_4_s = NSWAP(generic_bs_rm_4), 147 .bs_rm_8_s = bs_notimpl_bs_rm_8, 148 149 /* read region */ 150 .bs_rr_1_s = generic_bs_rr_1, 151 .bs_rr_2_s = NSWAP(generic_armv4_bs_rr_2), 152 .bs_rr_4_s = NSWAP(generic_bs_rr_4), 153 .bs_rr_8_s = bs_notimpl_bs_rr_8, 154 155 /* write (single) */ 156 .bs_w_1_s = generic_bs_w_1, 157 .bs_w_2_s = NSWAP(generic_armv4_bs_w_2), 158 .bs_w_4_s = NSWAP(generic_bs_w_4), 159 .bs_w_8_s = bs_notimpl_bs_w_8, 160 161 /* write multiple */ 162 .bs_wm_1_s = generic_bs_wm_1, 163 .bs_wm_2_s = NSWAP(generic_armv4_bs_wm_2), 164 .bs_wm_4_s = NSWAP(generic_bs_wm_4), 165 .bs_wm_8_s = bs_notimpl_bs_wm_8, 166 167 /* write region */ 168 .bs_wr_1_s = generic_bs_wr_1, 169 .bs_wr_2_s = NSWAP(generic_armv4_bs_wr_2), 170 .bs_wr_4_s = NSWAP(generic_bs_wr_4), 171 .bs_wr_8_s = bs_notimpl_bs_wr_8, 172 #endif 173 }; 174 175 struct bus_space armv7_generic_a4x_bs_tag = { 176 /* cookie */ 177 .bs_cookie = (void *) 0, 178 179 /* mapping/unmapping */ 180 .bs_map = armv7_generic_bs_map, 181 .bs_unmap = armv7_generic_bs_unmap, 182 .bs_subregion = armv7_generic_a4x_bs_subregion, 183 184 /* allocation/deallocation */ 185 .bs_alloc = armv7_generic_bs_alloc, /* not implemented */ 186 .bs_free = armv7_generic_bs_free, /* not implemented */ 187 188 /* get kernel virtual address */ 189 .bs_vaddr = armv7_generic_bs_vaddr, 190 191 /* mmap */ 192 .bs_mmap = armv7_generic_a4x_bs_mmap, 193 194 /* barrier */ 195 .bs_barrier = armv7_generic_bs_barrier, 196 197 /* read (single) */ 198 .bs_r_1 = a4x_bs_r_1, 199 .bs_r_2 = NSWAP(a4x_bs_r_2), 200 .bs_r_4 = NSWAP(a4x_bs_r_4), 201 .bs_r_8 = bs_notimpl_bs_r_8, 202 203 /* read multiple */ 204 .bs_rm_1 = a4x_bs_rm_1, 205 .bs_rm_2 = NSWAP(a4x_bs_rm_2), 206 .bs_rm_4 = NSWAP(a4x_bs_rm_4), 207 .bs_rm_8 = bs_notimpl_bs_rm_8, 208 209 /* read region */ 210 .bs_rr_1 = bs_notimpl_bs_rr_1, 211 .bs_rr_2 = bs_notimpl_bs_rr_2, 212 .bs_rr_4 = bs_notimpl_bs_rr_4, 213 .bs_rr_8 = bs_notimpl_bs_rr_8, 214 215 /* write (single) */ 216 .bs_w_1 = a4x_bs_w_1, 217 .bs_w_2 = NSWAP(a4x_bs_w_2), 218 .bs_w_4 = NSWAP(a4x_bs_w_4), 219 .bs_w_8 = bs_notimpl_bs_w_8, 220 221 /* write multiple */ 222 .bs_wm_1 = a4x_bs_wm_1, 223 .bs_wm_2 = NSWAP(a4x_bs_wm_2), 224 .bs_wm_4 = NSWAP(a4x_bs_wm_4), 225 .bs_wm_8 = bs_notimpl_bs_wm_8, 226 227 /* write region */ 228 .bs_wr_1 = bs_notimpl_bs_wr_1, 229 .bs_wr_2 = bs_notimpl_bs_wr_2, 230 .bs_wr_4 = bs_notimpl_bs_wr_4, 231 .bs_wr_8 = bs_notimpl_bs_wr_8, 232 233 /* set multiple */ 234 .bs_sm_1 = bs_notimpl_bs_sm_1, 235 .bs_sm_2 = bs_notimpl_bs_sm_2, 236 .bs_sm_4 = bs_notimpl_bs_sm_4, 237 .bs_sm_8 = bs_notimpl_bs_sm_8, 238 239 /* set region */ 240 .bs_sr_1 = bs_notimpl_bs_sr_1, 241 .bs_sr_2 = bs_notimpl_bs_sr_2, 242 .bs_sr_4 = bs_notimpl_bs_sr_4, 243 .bs_sr_8 = bs_notimpl_bs_sr_8, 244 245 /* copy */ 246 .bs_c_1 = bs_notimpl_bs_c_1, 247 .bs_c_2 = bs_notimpl_bs_c_2, 248 .bs_c_4 = bs_notimpl_bs_c_4, 249 .bs_c_8 = bs_notimpl_bs_c_8, 250 251 #ifdef __BUS_SPACE_HAS_STREAM_METHODS 252 /* read (single) */ 253 .bs_r_1_s = a4x_bs_r_1, 254 .bs_r_2_s = NSWAP(a4x_bs_r_2), 255 .bs_r_4_s = NSWAP(a4x_bs_r_4), 256 .bs_r_8_s = bs_notimpl_bs_r_8, 257 258 /* read multiple */ 259 .bs_rm_1_s = a4x_bs_rm_1, 260 .bs_rm_2_s = NSWAP(a4x_bs_rm_2), 261 .bs_rm_4_s = NSWAP(a4x_bs_rm_4), 262 .bs_rm_8_s = bs_notimpl_bs_rm_8, 263 264 /* read region */ 265 .bs_rr_1_s = bs_notimpl_bs_rr_1, 266 .bs_rr_2_s = bs_notimpl_bs_rr_2, 267 .bs_rr_4_s = bs_notimpl_bs_rr_4, 268 .bs_rr_8_s = bs_notimpl_bs_rr_8, 269 270 /* write (single) */ 271 .bs_w_1_s = a4x_bs_w_1, 272 .bs_w_2_s = NSWAP(a4x_bs_w_2), 273 .bs_w_4_s = NSWAP(a4x_bs_w_4), 274 .bs_w_8_s = bs_notimpl_bs_w_8, 275 276 /* write multiple */ 277 .bs_wm_1_s = a4x_bs_wm_1, 278 .bs_wm_2_s = NSWAP(a4x_bs_wm_2), 279 .bs_wm_4_s = NSWAP(a4x_bs_wm_4), 280 .bs_wm_8_s = bs_notimpl_bs_wm_8, 281 282 /* write region */ 283 .bs_wr_1_s = bs_notimpl_bs_wr_1, 284 .bs_wr_2_s = bs_notimpl_bs_wr_2, 285 .bs_wr_4_s = bs_notimpl_bs_wr_4, 286 .bs_wr_8_s = bs_notimpl_bs_wr_8, 287 #endif 288 }; 289 290 int 291 armv7_generic_bs_map(void *t, bus_addr_t bpa, bus_size_t size, int flag, 292 bus_space_handle_t *bshp) 293 { 294 u_long startpa, endpa, pa; 295 const struct pmap_devmap *pd; 296 int pmapflags; 297 vaddr_t va; 298 299 if ((pd = pmap_devmap_find_pa(bpa, size)) != NULL) { 300 /* Device was statically mapped. */ 301 *bshp = pd->pd_va + (bpa - pd->pd_pa); 302 return 0; 303 } 304 305 startpa = trunc_page(bpa); 306 endpa = round_page(bpa + size); 307 308 /* XXX use extent manager to check duplicate mapping */ 309 310 va = uvm_km_alloc(kernel_map, endpa - startpa, 0, 311 UVM_KMF_VAONLY | UVM_KMF_NOWAIT | UVM_KMF_COLORMATCH); 312 if (!va) 313 return ENOMEM; 314 315 *bshp = (bus_space_handle_t)(va + (bpa - startpa)); 316 317 if (flag & BUS_SPACE_MAP_PREFETCHABLE) 318 pmapflags = PMAP_WRITE_COMBINE; 319 else if (flag & BUS_SPACE_MAP_CACHEABLE) 320 pmapflags = 0; 321 else 322 pmapflags = PMAP_DEV; 323 324 for (pa = startpa; pa < endpa; pa += PAGE_SIZE, va += PAGE_SIZE) { 325 pmap_kenter_pa(va, pa, VM_PROT_READ | VM_PROT_WRITE, pmapflags); 326 } 327 pmap_update(pmap_kernel()); 328 329 return 0; 330 } 331 332 void 333 armv7_generic_bs_unmap(void *t, bus_space_handle_t bsh, bus_size_t size) 334 { 335 vaddr_t va; 336 vsize_t sz; 337 338 if (pmap_devmap_find_va(bsh, size) != NULL) { 339 /* Device was statically mapped; nothing to do. */ 340 return; 341 } 342 343 va = trunc_page(bsh); 344 sz = round_page(bsh + size) - va; 345 346 pmap_kremove(va, sz); 347 pmap_update(pmap_kernel()); 348 uvm_km_free(kernel_map, va, sz, UVM_KMF_VAONLY); 349 } 350 351 352 int 353 armv7_generic_bs_subregion(void *t, bus_space_handle_t bsh, bus_size_t offset, 354 bus_size_t size, bus_space_handle_t *nbshp) 355 { 356 357 *nbshp = bsh + offset; 358 return 0; 359 } 360 361 int 362 armv7_generic_a4x_bs_subregion(void *t, bus_space_handle_t bsh, bus_size_t offset, 363 bus_size_t size, bus_space_handle_t *nbshp) 364 { 365 366 *nbshp = bsh + 4 * offset; 367 return 0; 368 } 369 370 void 371 armv7_generic_bs_barrier(void *t, bus_space_handle_t bsh, bus_size_t offset, 372 bus_size_t len, int flags) 373 { 374 flags &= BUS_SPACE_BARRIER_READ|BUS_SPACE_BARRIER_WRITE; 375 376 if (flags) 377 dsb(sy); 378 } 379 380 void * 381 armv7_generic_bs_vaddr(void *t, bus_space_handle_t bsh) 382 { 383 384 return (void *)bsh; 385 } 386 387 paddr_t 388 armv7_generic_bs_mmap(void *t, bus_addr_t bpa, off_t offset, int prot, int flags) 389 { 390 paddr_t bus_flags = 0; 391 392 if (flags & BUS_SPACE_MAP_PREFETCHABLE) 393 bus_flags |= ARM32_MMAP_WRITECOMBINE; 394 395 return arm_btop(bpa + offset) | bus_flags; 396 } 397 398 paddr_t 399 armv7_generic_a4x_bs_mmap(void *t, bus_addr_t bpa, off_t offset, int prot, int flags) 400 { 401 paddr_t bus_flags = 0; 402 403 if (flags & BUS_SPACE_MAP_PREFETCHABLE) 404 bus_flags |= ARM32_MMAP_WRITECOMBINE; 405 406 return arm_btop(bpa + 4 * offset) | bus_flags; 407 } 408 409 int 410 armv7_generic_bs_alloc(void *t, bus_addr_t rstart, bus_addr_t rend, 411 bus_size_t size, bus_size_t alignment, bus_size_t boundary, int flags, 412 bus_addr_t *bpap, bus_space_handle_t *bshp) 413 { 414 415 panic("%s(): not implemented\n", __func__); 416 } 417 418 void 419 armv7_generic_bs_free(void *t, bus_space_handle_t bsh, bus_size_t size) 420 { 421 422 panic("%s(): not implemented\n", __func__); 423 } 424