1 1.12 skrll /* $NetBSD: booke_stubs.c,v 1.12 2021/10/02 14:28:04 skrll Exp $ */ 2 1.2 matt /*- 3 1.2 matt * Copyright (c) 2010, 2011 The NetBSD Foundation, Inc. 4 1.2 matt * All rights reserved. 5 1.2 matt * 6 1.2 matt * This code is derived from software contributed to The NetBSD Foundation 7 1.2 matt * by Raytheon BBN Technologies Corp and Defense Advanced Research Projects 8 1.2 matt * Agency and which was developed by Matt Thomas of 3am Software Foundry. 9 1.2 matt * 10 1.2 matt * This material is based upon work supported by the Defense Advanced Research 11 1.2 matt * Projects Agency and Space and Naval Warfare Systems Center, Pacific, under 12 1.2 matt * Contract No. N66001-09-C-2073. 13 1.2 matt * Approved for Public Release, Distribution Unlimited 14 1.2 matt * 15 1.2 matt * Redistribution and use in source and binary forms, with or without 16 1.2 matt * modification, are permitted provided that the following conditions 17 1.2 matt * are met: 18 1.2 matt * 1. Redistributions of source code must retain the above copyright 19 1.2 matt * notice, this list of conditions and the following disclaimer. 20 1.2 matt * 2. Redistributions in binary form must reproduce the above copyright 21 1.2 matt * notice, this list of conditions and the following disclaimer in the 22 1.2 matt * documentation and/or other materials provided with the distribution. 23 1.2 matt * 24 1.2 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 25 1.2 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 26 1.2 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 27 1.2 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 28 1.2 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 29 1.2 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 30 1.2 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 31 1.2 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 32 1.2 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 33 1.2 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 34 1.2 matt * POSSIBILITY OF SUCH DAMAGE. 35 1.2 matt */ 36 1.2 matt 37 1.2 matt #include <sys/cdefs.h> 38 1.12 skrll __KERNEL_RCSID(0, "$NetBSD: booke_stubs.c,v 1.12 2021/10/02 14:28:04 skrll Exp $"); 39 1.2 matt 40 1.2 matt #include <sys/param.h> 41 1.2 matt #include <sys/cpu.h> 42 1.2 matt 43 1.2 matt #include <powerpc/instr.h> 44 1.2 matt #include <powerpc/booke/cpuvar.h> 45 1.2 matt 46 1.4 matt #define __stub __section(".stub") __noprofile 47 1.2 matt 48 1.12 skrll void tlb_set_asid(tlb_asid_t, struct pmap *) __stub; 49 1.2 matt 50 1.2 matt void 51 1.12 skrll tlb_set_asid(tlb_asid_t asid, struct pmap *pm) 52 1.2 matt { 53 1.2 matt (*cpu_md_ops.md_tlb_ops->md_tlb_set_asid)(asid); 54 1.2 matt } 55 1.2 matt 56 1.7 matt tlb_asid_t tlb_get_asid(void) __stub; 57 1.2 matt 58 1.7 matt tlb_asid_t 59 1.2 matt tlb_get_asid(void) 60 1.2 matt { 61 1.2 matt return (*cpu_md_ops.md_tlb_ops->md_tlb_get_asid)(); 62 1.2 matt } 63 1.2 matt 64 1.2 matt void tlb_invalidate_all(void) __stub; 65 1.2 matt 66 1.2 matt void 67 1.2 matt tlb_invalidate_all(void) 68 1.2 matt { 69 1.2 matt (*cpu_md_ops.md_tlb_ops->md_tlb_invalidate_all)(); 70 1.2 matt } 71 1.2 matt 72 1.2 matt void tlb_invalidate_globals(void) __stub; 73 1.2 matt 74 1.2 matt void 75 1.2 matt tlb_invalidate_globals(void) 76 1.2 matt { 77 1.2 matt (*cpu_md_ops.md_tlb_ops->md_tlb_invalidate_globals)(); 78 1.2 matt } 79 1.2 matt 80 1.7 matt void tlb_invalidate_asids(tlb_asid_t, tlb_asid_t) __stub; 81 1.2 matt 82 1.2 matt void 83 1.7 matt tlb_invalidate_asids(tlb_asid_t asid_lo, tlb_asid_t asid_hi) 84 1.2 matt { 85 1.2 matt (*cpu_md_ops.md_tlb_ops->md_tlb_invalidate_asids)(asid_lo, asid_hi); 86 1.2 matt } 87 1.2 matt 88 1.7 matt void tlb_invalidate_addr(vaddr_t, tlb_asid_t) __stub; 89 1.2 matt 90 1.2 matt void 91 1.7 matt tlb_invalidate_addr(vaddr_t va, tlb_asid_t asid) 92 1.2 matt { 93 1.2 matt (*cpu_md_ops.md_tlb_ops->md_tlb_invalidate_addr)(va, asid); 94 1.2 matt } 95 1.2 matt 96 1.7 matt bool tlb_update_addr(vaddr_t, tlb_asid_t, pt_entry_t, bool) __stub; 97 1.2 matt 98 1.2 matt bool 99 1.7 matt tlb_update_addr(vaddr_t va, tlb_asid_t asid, pt_entry_t pte, bool insert_p) 100 1.2 matt { 101 1.2 matt return (*cpu_md_ops.md_tlb_ops->md_tlb_update_addr)(va, asid, pte, insert_p); 102 1.2 matt } 103 1.2 matt 104 1.2 matt void tlb_read_entry(size_t, struct tlbmask *) __stub; 105 1.2 matt 106 1.2 matt void 107 1.2 matt tlb_read_entry(size_t pos, struct tlbmask *tlb) 108 1.2 matt { 109 1.2 matt (*cpu_md_ops.md_tlb_ops->md_tlb_read_entry)(pos, tlb); 110 1.2 matt } 111 1.2 matt 112 1.7 matt void tlb_write_entry(size_t, const struct tlbmask *) __stub; 113 1.7 matt 114 1.7 matt void 115 1.7 matt tlb_write_entry(size_t pos, const struct tlbmask *tlb) 116 1.7 matt { 117 1.7 matt (*cpu_md_ops.md_tlb_ops->md_tlb_write_entry)(pos, tlb); 118 1.7 matt } 119 1.7 matt 120 1.10 matt u_int tlb_record_asids(u_long *, tlb_asid_t) __stub; 121 1.2 matt 122 1.2 matt u_int 123 1.10 matt tlb_record_asids(u_long *bitmap, tlb_asid_t asid_max) 124 1.7 matt { 125 1.10 matt return (*cpu_md_ops.md_tlb_ops->md_tlb_record_asids)(bitmap, asid_max); 126 1.7 matt } 127 1.7 matt 128 1.7 matt void tlb_dump(void (*)(const char *, ...)) __stub; 129 1.7 matt 130 1.7 matt void 131 1.7 matt tlb_dump(void (*pr)(const char *, ...)) 132 1.2 matt { 133 1.7 matt (*cpu_md_ops.md_tlb_ops->md_tlb_dump)(pr); 134 1.7 matt } 135 1.7 matt 136 1.7 matt void tlb_walk(void *, bool (*)(void *, vaddr_t, uint32_t, uint32_t)) 137 1.7 matt __stub; 138 1.7 matt 139 1.7 matt void 140 1.7 matt tlb_walk(void *ctx, bool (*func)(void *, vaddr_t, uint32_t, uint32_t)) 141 1.7 matt { 142 1.7 matt (*cpu_md_ops.md_tlb_ops->md_tlb_walk)(ctx, func); 143 1.2 matt } 144 1.2 matt 145 1.9 matt void *tlb_mapiodev(paddr_t, psize_t, bool) __stub; 146 1.2 matt 147 1.2 matt void * 148 1.9 matt tlb_mapiodev(paddr_t pa, psize_t len, bool prefetchable) 149 1.2 matt { 150 1.9 matt return (*cpu_md_ops.md_tlb_io_ops->md_tlb_mapiodev)(pa, len, prefetchable); 151 1.2 matt } 152 1.2 matt 153 1.2 matt void tlb_unmapiodev(vaddr_t, vsize_t) __stub; 154 1.2 matt 155 1.2 matt void 156 1.2 matt tlb_unmapiodev(vaddr_t va, vsize_t len) 157 1.2 matt { 158 1.7 matt (*cpu_md_ops.md_tlb_io_ops->md_tlb_unmapiodev)(va, len); 159 1.2 matt } 160 1.2 matt 161 1.2 matt int tlb_ioreserve(vaddr_t, vsize_t, uint32_t) __stub; 162 1.2 matt 163 1.2 matt int 164 1.2 matt tlb_ioreserve(vaddr_t va, vsize_t len, uint32_t pte) 165 1.2 matt { 166 1.7 matt return (*cpu_md_ops.md_tlb_io_ops->md_tlb_ioreserve)(va, len, pte); 167 1.2 matt } 168 1.2 matt 169 1.2 matt int tlb_iorelease(vaddr_t) __stub; 170 1.2 matt 171 1.2 matt int 172 1.2 matt tlb_iorelease(vaddr_t va) 173 1.2 matt { 174 1.7 matt return (*cpu_md_ops.md_tlb_io_ops->md_tlb_iorelease)(va); 175 1.2 matt } 176