Home | History | Annotate | Line # | Download | only in arm32
arm32_tlb.c revision 1.11.4.1
      1       1.1      matt /*-
      2       1.1      matt  * Copyright (c) 2013 The NetBSD Foundation, Inc.
      3       1.1      matt  * All rights reserved.
      4       1.1      matt  *
      5       1.1      matt  * This code is derived from software contributed to The NetBSD Foundation
      6       1.1      matt  * by Matt Thomas of 3am Software Foundry.
      7       1.1      matt  *
      8       1.1      matt  * Redistribution and use in source and binary forms, with or without
      9       1.1      matt  * modification, are permitted provided that the following conditions
     10       1.1      matt  * are met:
     11       1.1      matt  * 1. Redistributions of source code must retain the above copyright
     12       1.1      matt  *    notice, this list of conditions and the following disclaimer.
     13       1.1      matt  * 2. Redistributions in binary form must reproduce the above copyright
     14       1.1      matt  *    notice, this list of conditions and the following disclaimer in the
     15       1.1      matt  *    documentation and/or other materials provided with the distribution.
     16       1.1      matt  *
     17       1.1      matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     18       1.1      matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     19       1.1      matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     20       1.1      matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     21       1.1      matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22       1.1      matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23       1.1      matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     24       1.1      matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     25       1.1      matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     26       1.1      matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     27       1.1      matt  * POSSIBILITY OF SUCH DAMAGE.
     28       1.1      matt  */
     29       1.5     skrll 
     30  1.11.4.1  christos #include "opt_cputypes.h"
     31       1.5     skrll #include "opt_multiprocessor.h"
     32       1.5     skrll 
     33       1.1      matt #include <sys/cdefs.h>
     34  1.11.4.1  christos __KERNEL_RCSID(1, "$NetBSD: arm32_tlb.c,v 1.11.4.1 2019/06/10 22:05:51 christos Exp $");
     35       1.1      matt 
     36       1.1      matt #include <sys/param.h>
     37       1.1      matt #include <sys/types.h>
     38       1.1      matt 
     39       1.1      matt #include <uvm/uvm.h>
     40       1.1      matt 
     41       1.1      matt #include <arm/locore.h>
     42       1.1      matt 
     43       1.1      matt bool arm_has_tlbiasid_p;	// CPU supports TLBIASID system coprocessor op
     44      1.11  jmcneill bool arm_has_mpext_p;		// CPU supports MP extensions
     45       1.1      matt 
     46       1.1      matt tlb_asid_t
     47       1.1      matt tlb_get_asid(void)
     48       1.1      matt {
     49       1.1      matt 	return armreg_contextidr_read() & 0xff;
     50       1.1      matt }
     51       1.1      matt 
     52       1.1      matt void
     53       1.1      matt tlb_set_asid(tlb_asid_t asid)
     54       1.1      matt {
     55       1.1      matt 	arm_dsb();
     56       1.6     skrll 	if (asid == KERNEL_PID) {
     57       1.1      matt 		armreg_ttbcr_write(armreg_ttbcr_read() | TTBCR_S_PD0);
     58       1.6     skrll 		arm_isb();
     59       1.1      matt 	}
     60       1.1      matt 	armreg_contextidr_write(asid);
     61       1.1      matt 	arm_isb();
     62       1.1      matt }
     63       1.1      matt 
     64       1.1      matt void
     65       1.1      matt tlb_invalidate_all(void)
     66       1.1      matt {
     67       1.2      matt 	const bool vivt_icache_p = arm_pcache.icache_type == CACHE_TYPE_VIVT;
     68       1.1      matt 	arm_dsb();
     69      1.11  jmcneill 	if (arm_has_mpext_p) {
     70      1.11  jmcneill 		armreg_tlbiallis_write(0);
     71      1.11  jmcneill 	} else {
     72      1.11  jmcneill 		armreg_tlbiall_write(0);
     73      1.11  jmcneill 	}
     74       1.1      matt 	arm_isb();
     75       1.2      matt 	if (__predict_false(vivt_icache_p)) {
     76       1.2      matt 		if (arm_has_tlbiasid_p) {
     77       1.2      matt 			armreg_icialluis_write(0);
     78       1.2      matt 		} else {
     79       1.2      matt 			armreg_iciallu_write(0);
     80       1.2      matt 		}
     81       1.2      matt 	}
     82      1.10      matt 	arm_dsb();
     83       1.2      matt 	arm_isb();
     84       1.1      matt }
     85       1.1      matt 
     86       1.1      matt void
     87       1.1      matt tlb_invalidate_globals(void)
     88       1.1      matt {
     89       1.1      matt 	tlb_invalidate_all();
     90       1.1      matt }
     91       1.1      matt 
     92       1.1      matt void
     93       1.1      matt tlb_invalidate_asids(tlb_asid_t lo, tlb_asid_t hi)
     94       1.1      matt {
     95       1.2      matt 	const bool vivt_icache_p = arm_pcache.icache_type == CACHE_TYPE_VIVT;
     96       1.1      matt 	arm_dsb();
     97       1.1      matt 	if (arm_has_tlbiasid_p) {
     98       1.1      matt 		for (; lo <= hi; lo++) {
     99      1.11  jmcneill 			if (arm_has_mpext_p) {
    100      1.11  jmcneill 				armreg_tlbiasidis_write(lo);
    101      1.11  jmcneill 			} else {
    102      1.11  jmcneill 				armreg_tlbiasid_write(lo);
    103      1.11  jmcneill 			}
    104       1.1      matt 		}
    105      1.10      matt 		arm_dsb();
    106       1.2      matt 		arm_isb();
    107       1.2      matt 		if (__predict_false(vivt_icache_p)) {
    108      1.11  jmcneill 			if (arm_has_mpext_p) {
    109      1.11  jmcneill 				armreg_icialluis_write(0);
    110      1.11  jmcneill 			} else {
    111      1.11  jmcneill 				armreg_iciallu_write(0);
    112      1.11  jmcneill 			}
    113       1.2      matt 		}
    114       1.2      matt 	} else {
    115       1.2      matt 		armreg_tlbiall_write(0);
    116       1.2      matt 		arm_isb();
    117       1.2      matt 		if (__predict_false(vivt_icache_p)) {
    118       1.2      matt 			armreg_iciallu_write(0);
    119       1.2      matt 		}
    120       1.1      matt 	}
    121       1.1      matt 	arm_isb();
    122       1.1      matt }
    123       1.1      matt 
    124       1.1      matt void
    125       1.1      matt tlb_invalidate_addr(vaddr_t va, tlb_asid_t asid)
    126       1.1      matt {
    127       1.1      matt 	arm_dsb();
    128       1.1      matt 	va = trunc_page(va) | asid;
    129       1.1      matt 	for (vaddr_t eva = va + PAGE_SIZE; va < eva; va += L2_S_SIZE) {
    130      1.11  jmcneill 		if (arm_has_mpext_p) {
    131      1.11  jmcneill 			armreg_tlbimvais_write(va);
    132      1.11  jmcneill 		} else {
    133      1.11  jmcneill 			armreg_tlbimva_write(va);
    134      1.11  jmcneill 		}
    135       1.1      matt 	}
    136       1.1      matt 	arm_isb();
    137       1.1      matt }
    138       1.1      matt 
    139       1.1      matt bool
    140       1.1      matt tlb_update_addr(vaddr_t va, tlb_asid_t asid, pt_entry_t pte, bool insert_p)
    141       1.1      matt {
    142       1.1      matt 	tlb_invalidate_addr(va, asid);
    143       1.1      matt 	return true;
    144       1.1      matt }
    145       1.1      matt 
    146       1.1      matt #if !defined(MULTIPROCESSOR) && defined(CPU_CORTEXA5)
    147       1.1      matt static u_int
    148      1.10      matt tlb_cortex_a5_record_asids(u_long *mapp, tlb_asid_t asid_max)
    149       1.1      matt {
    150       1.1      matt 	u_int nasids = 0;
    151       1.1      matt 	for (size_t va_index = 0; va_index < 63; va_index++) {
    152       1.1      matt 		for (size_t way = 0; way < 2; way++) {
    153       1.1      matt 			armreg_tlbdataop_write(
    154       1.1      matt 			     __SHIFTIN(way, ARM_TLBDATAOP_WAY)
    155       1.1      matt 			     | __SHIFTIN(va_index, ARM_A5_TLBDATAOP_INDEX));
    156       1.1      matt 			arm_isb();
    157       1.1      matt 			const uint64_t d = ((uint64_t) armreg_tlbdata1_read())
    158       1.1      matt 			    | armreg_tlbdata0_read();
    159       1.1      matt 			if (!(d & ARM_TLBDATA_VALID)
    160       1.8  jmcneill 			    || !(d & ARM_A5_TLBDATA_nG))
    161       1.1      matt 				continue;
    162       1.1      matt 
    163       1.1      matt 			const tlb_asid_t asid = __SHIFTOUT(d,
    164       1.8  jmcneill 			    ARM_A5_TLBDATA_ASID);
    165       1.1      matt 			const u_long mask = 1L << (asid & 31);
    166       1.1      matt 			const size_t idx = asid >> 5;
    167       1.1      matt 			if (mapp[idx] & mask)
    168       1.1      matt 				continue;
    169       1.1      matt 
    170       1.1      matt 			mapp[idx] |= mask;
    171       1.1      matt 			nasids++;
    172       1.1      matt 		}
    173       1.1      matt 	}
    174       1.1      matt 	return nasids;
    175       1.1      matt }
    176       1.1      matt #endif
    177       1.1      matt 
    178       1.1      matt #if !defined(MULTIPROCESSOR) && defined(CPU_CORTEXA7)
    179       1.1      matt static u_int
    180      1.10      matt tlb_cortex_a7_record_asids(u_long *mapp, tlb_asid_t asid_max)
    181       1.1      matt {
    182       1.1      matt 	u_int nasids = 0;
    183       1.1      matt 	for (size_t va_index = 0; va_index < 128; va_index++) {
    184       1.1      matt 		for (size_t way = 0; way < 2; way++) {
    185       1.1      matt 			armreg_tlbdataop_write(
    186       1.1      matt 			     __SHIFTIN(way, ARM_TLBDATAOP_WAY)
    187       1.1      matt 			     | __SHIFTIN(va_index, ARM_A7_TLBDATAOP_INDEX));
    188       1.1      matt 			arm_isb();
    189       1.1      matt 			const uint32_t d0 = armreg_tlbdata0_read();
    190       1.1      matt 			const uint32_t d1 = armreg_tlbdata1_read();
    191       1.1      matt 			if (!(d0 & ARM_TLBDATA_VALID)
    192       1.1      matt 			    || !(d1 & ARM_A7_TLBDATA1_nG))
    193       1.1      matt 				continue;
    194       1.1      matt 
    195       1.1      matt 			const uint64_t d01 = ((uint64_t) d1)|d0;
    196       1.1      matt 			const tlb_asid_t asid = __SHIFTOUT(d01,
    197       1.1      matt 			    ARM_A7_TLBDATA01_ASID);
    198       1.1      matt 			const u_long mask = 1L << (asid & 31);
    199       1.1      matt 			const size_t idx = asid >> 5;
    200       1.1      matt 			if (mapp[idx] & mask)
    201       1.1      matt 				continue;
    202       1.1      matt 
    203       1.1      matt 			mapp[idx] |= mask;
    204       1.1      matt 			nasids++;
    205       1.1      matt 		}
    206       1.1      matt 	}
    207       1.1      matt 	return nasids;
    208       1.1      matt }
    209       1.1      matt #endif
    210       1.1      matt 
    211       1.1      matt u_int
    212      1.10      matt tlb_record_asids(u_long *mapp, tlb_asid_t asid_max)
    213       1.1      matt {
    214       1.1      matt #ifndef MULTIPROCESSOR
    215       1.1      matt #ifdef CPU_CORTEXA5
    216       1.1      matt 	if (CPU_ID_CORTEX_A5_P(curcpu()->ci_arm_cpuid))
    217      1.10      matt 		return tlb_cortex_a5_record_asids(mapp, asid_max);
    218       1.1      matt #endif
    219       1.1      matt #ifdef CPU_CORTEXA7
    220       1.1      matt 	if (CPU_ID_CORTEX_A7_P(curcpu()->ci_arm_cpuid))
    221      1.10      matt 		return tlb_cortex_a7_record_asids(mapp, asid_max);
    222       1.1      matt #endif
    223       1.1      matt #endif /* MULTIPROCESSOR */
    224       1.1      matt #ifdef DIAGNOSTIC
    225       1.1      matt 	mapp[0] = 0xfffffffe;
    226       1.1      matt 	mapp[1] = 0xffffffff;
    227       1.1      matt 	mapp[2] = 0xffffffff;
    228       1.1      matt 	mapp[3] = 0xffffffff;
    229       1.1      matt 	mapp[4] = 0xffffffff;
    230       1.1      matt 	mapp[5] = 0xffffffff;
    231       1.1      matt 	mapp[6] = 0xffffffff;
    232       1.1      matt 	mapp[7] = 0xffffffff;
    233       1.1      matt #endif
    234       1.1      matt 	return 255;
    235       1.1      matt }
    236       1.1      matt 
    237       1.1      matt void
    238       1.1      matt tlb_walk(void *ctx, bool (*func)(void *, vaddr_t, tlb_asid_t, pt_entry_t))
    239       1.1      matt {
    240       1.1      matt 	/* no way to view the TLB */
    241       1.1      matt }
    242