Home | History | Annotate | Line # | Download | only in arm32
arm32_tlb.c revision 1.7.2.3
      1      1.1   matt /*-
      2      1.1   matt  * Copyright (c) 2013 The NetBSD Foundation, Inc.
      3      1.1   matt  * All rights reserved.
      4      1.1   matt  *
      5      1.1   matt  * This code is derived from software contributed to The NetBSD Foundation
      6      1.1   matt  * by Matt Thomas of 3am Software Foundry.
      7      1.1   matt  *
      8      1.1   matt  * Redistribution and use in source and binary forms, with or without
      9      1.1   matt  * modification, are permitted provided that the following conditions
     10      1.1   matt  * are met:
     11      1.1   matt  * 1. Redistributions of source code must retain the above copyright
     12      1.1   matt  *    notice, this list of conditions and the following disclaimer.
     13      1.1   matt  * 2. Redistributions in binary form must reproduce the above copyright
     14      1.1   matt  *    notice, this list of conditions and the following disclaimer in the
     15      1.1   matt  *    documentation and/or other materials provided with the distribution.
     16      1.1   matt  *
     17      1.1   matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     18      1.1   matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     19      1.1   matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     20      1.1   matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     21      1.1   matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22      1.1   matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23      1.1   matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     24      1.1   matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     25      1.1   matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     26      1.1   matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     27      1.1   matt  * POSSIBILITY OF SUCH DAMAGE.
     28      1.1   matt  */
     29      1.5  skrll 
     30      1.5  skrll #include "opt_multiprocessor.h"
     31      1.5  skrll 
     32      1.1   matt #include <sys/cdefs.h>
     33  1.7.2.3  skrll __KERNEL_RCSID(1, "$NetBSD: arm32_tlb.c,v 1.7.2.3 2017/08/28 17:51:29 skrll Exp $");
     34      1.1   matt 
     35      1.1   matt #include <sys/param.h>
     36      1.1   matt #include <sys/types.h>
     37      1.1   matt 
     38      1.1   matt #include <uvm/uvm.h>
     39      1.1   matt 
     40      1.1   matt #include <arm/locore.h>
     41      1.1   matt 
     42      1.1   matt bool arm_has_tlbiasid_p;	// CPU supports TLBIASID system coprocessor op
     43  1.7.2.3  skrll bool arm_has_mpext_p;		// CPU supports MP extensions
     44      1.1   matt 
     45      1.1   matt tlb_asid_t
     46      1.1   matt tlb_get_asid(void)
     47      1.1   matt {
     48      1.1   matt 	return armreg_contextidr_read() & 0xff;
     49      1.1   matt }
     50      1.1   matt 
     51      1.1   matt void
     52      1.1   matt tlb_set_asid(tlb_asid_t asid)
     53      1.1   matt {
     54      1.1   matt 	arm_dsb();
     55      1.6  skrll 	if (asid == KERNEL_PID) {
     56      1.1   matt 		armreg_ttbcr_write(armreg_ttbcr_read() | TTBCR_S_PD0);
     57      1.6  skrll 		arm_isb();
     58      1.1   matt 	}
     59      1.1   matt 	armreg_contextidr_write(asid);
     60      1.1   matt 	arm_isb();
     61      1.1   matt }
     62      1.1   matt 
     63      1.1   matt void
     64      1.1   matt tlb_invalidate_all(void)
     65      1.1   matt {
     66      1.2   matt 	const bool vivt_icache_p = arm_pcache.icache_type == CACHE_TYPE_VIVT;
     67      1.1   matt 	arm_dsb();
     68  1.7.2.3  skrll 	if (arm_has_mpext_p) {
     69  1.7.2.3  skrll 		armreg_tlbiallis_write(0);
     70  1.7.2.3  skrll 	} else {
     71  1.7.2.3  skrll 		armreg_tlbiall_write(0);
     72  1.7.2.3  skrll 	}
     73      1.1   matt 	arm_isb();
     74      1.2   matt 	if (__predict_false(vivt_icache_p)) {
     75      1.2   matt 		if (arm_has_tlbiasid_p) {
     76      1.2   matt 			armreg_icialluis_write(0);
     77      1.2   matt 		} else {
     78      1.2   matt 			armreg_iciallu_write(0);
     79      1.2   matt 		}
     80      1.2   matt 	}
     81  1.7.2.2  skrll 	arm_dsb();
     82      1.2   matt 	arm_isb();
     83      1.1   matt }
     84      1.1   matt 
     85      1.1   matt void
     86      1.1   matt tlb_invalidate_globals(void)
     87      1.1   matt {
     88      1.1   matt 	tlb_invalidate_all();
     89      1.1   matt }
     90      1.1   matt 
     91      1.1   matt void
     92      1.1   matt tlb_invalidate_asids(tlb_asid_t lo, tlb_asid_t hi)
     93      1.1   matt {
     94      1.2   matt 	const bool vivt_icache_p = arm_pcache.icache_type == CACHE_TYPE_VIVT;
     95      1.1   matt 	arm_dsb();
     96      1.1   matt 	if (arm_has_tlbiasid_p) {
     97      1.1   matt 		for (; lo <= hi; lo++) {
     98  1.7.2.3  skrll 			if (arm_has_mpext_p) {
     99  1.7.2.3  skrll 				armreg_tlbiasidis_write(lo);
    100  1.7.2.3  skrll 			} else {
    101  1.7.2.3  skrll 				armreg_tlbiasid_write(lo);
    102  1.7.2.3  skrll 			}
    103      1.1   matt 		}
    104  1.7.2.2  skrll 		arm_dsb();
    105      1.2   matt 		arm_isb();
    106      1.2   matt 		if (__predict_false(vivt_icache_p)) {
    107  1.7.2.3  skrll 			if (arm_has_mpext_p) {
    108  1.7.2.3  skrll 				armreg_icialluis_write(0);
    109  1.7.2.3  skrll 			} else {
    110  1.7.2.3  skrll 				armreg_iciallu_write(0);
    111  1.7.2.3  skrll 			}
    112      1.2   matt 		}
    113      1.2   matt 	} else {
    114      1.2   matt 		armreg_tlbiall_write(0);
    115      1.2   matt 		arm_isb();
    116      1.2   matt 		if (__predict_false(vivt_icache_p)) {
    117      1.2   matt 			armreg_iciallu_write(0);
    118      1.2   matt 		}
    119      1.1   matt 	}
    120      1.1   matt 	arm_isb();
    121      1.1   matt }
    122      1.1   matt 
    123      1.1   matt void
    124      1.1   matt tlb_invalidate_addr(vaddr_t va, tlb_asid_t asid)
    125      1.1   matt {
    126      1.1   matt 	arm_dsb();
    127      1.1   matt 	va = trunc_page(va) | asid;
    128      1.1   matt 	for (vaddr_t eva = va + PAGE_SIZE; va < eva; va += L2_S_SIZE) {
    129  1.7.2.3  skrll 		if (arm_has_mpext_p) {
    130  1.7.2.3  skrll 			armreg_tlbimvais_write(va);
    131  1.7.2.3  skrll 		} else {
    132  1.7.2.3  skrll 			armreg_tlbimva_write(va);
    133  1.7.2.3  skrll 		}
    134      1.1   matt 	}
    135      1.1   matt 	arm_isb();
    136      1.1   matt }
    137      1.1   matt 
    138      1.1   matt bool
    139      1.1   matt tlb_update_addr(vaddr_t va, tlb_asid_t asid, pt_entry_t pte, bool insert_p)
    140      1.1   matt {
    141      1.1   matt 	tlb_invalidate_addr(va, asid);
    142      1.1   matt 	return true;
    143      1.1   matt }
    144      1.1   matt 
    145      1.1   matt #if !defined(MULTIPROCESSOR) && defined(CPU_CORTEXA5)
    146      1.1   matt static u_int
    147  1.7.2.2  skrll tlb_cortex_a5_record_asids(u_long *mapp, tlb_asid_t asid_max)
    148      1.1   matt {
    149      1.1   matt 	u_int nasids = 0;
    150      1.1   matt 	for (size_t va_index = 0; va_index < 63; va_index++) {
    151      1.1   matt 		for (size_t way = 0; way < 2; way++) {
    152      1.1   matt 			armreg_tlbdataop_write(
    153      1.1   matt 			     __SHIFTIN(way, ARM_TLBDATAOP_WAY)
    154      1.1   matt 			     | __SHIFTIN(va_index, ARM_A5_TLBDATAOP_INDEX));
    155      1.1   matt 			arm_isb();
    156      1.1   matt 			const uint64_t d = ((uint64_t) armreg_tlbdata1_read())
    157      1.1   matt 			    | armreg_tlbdata0_read();
    158      1.1   matt 			if (!(d & ARM_TLBDATA_VALID)
    159  1.7.2.1  skrll 			    || !(d & ARM_A5_TLBDATA_nG))
    160      1.1   matt 				continue;
    161      1.1   matt 
    162      1.1   matt 			const tlb_asid_t asid = __SHIFTOUT(d,
    163  1.7.2.1  skrll 			    ARM_A5_TLBDATA_ASID);
    164      1.1   matt 			const u_long mask = 1L << (asid & 31);
    165      1.1   matt 			const size_t idx = asid >> 5;
    166      1.1   matt 			if (mapp[idx] & mask)
    167      1.1   matt 				continue;
    168      1.1   matt 
    169      1.1   matt 			mapp[idx] |= mask;
    170      1.1   matt 			nasids++;
    171      1.1   matt 		}
    172      1.1   matt 	}
    173      1.1   matt 	return nasids;
    174      1.1   matt }
    175      1.1   matt #endif
    176      1.1   matt 
    177      1.1   matt #if !defined(MULTIPROCESSOR) && defined(CPU_CORTEXA7)
    178      1.1   matt static u_int
    179  1.7.2.2  skrll tlb_cortex_a7_record_asids(u_long *mapp, tlb_asid_t asid_max)
    180      1.1   matt {
    181      1.1   matt 	u_int nasids = 0;
    182      1.1   matt 	for (size_t va_index = 0; va_index < 128; va_index++) {
    183      1.1   matt 		for (size_t way = 0; way < 2; way++) {
    184      1.1   matt 			armreg_tlbdataop_write(
    185      1.1   matt 			     __SHIFTIN(way, ARM_TLBDATAOP_WAY)
    186      1.1   matt 			     | __SHIFTIN(va_index, ARM_A7_TLBDATAOP_INDEX));
    187      1.1   matt 			arm_isb();
    188      1.1   matt 			const uint32_t d0 = armreg_tlbdata0_read();
    189      1.1   matt 			const uint32_t d1 = armreg_tlbdata1_read();
    190      1.1   matt 			if (!(d0 & ARM_TLBDATA_VALID)
    191      1.1   matt 			    || !(d1 & ARM_A7_TLBDATA1_nG))
    192      1.1   matt 				continue;
    193      1.1   matt 
    194      1.1   matt 			const uint64_t d01 = ((uint64_t) d1)|d0;
    195      1.1   matt 			const tlb_asid_t asid = __SHIFTOUT(d01,
    196      1.1   matt 			    ARM_A7_TLBDATA01_ASID);
    197      1.1   matt 			const u_long mask = 1L << (asid & 31);
    198      1.1   matt 			const size_t idx = asid >> 5;
    199      1.1   matt 			if (mapp[idx] & mask)
    200      1.1   matt 				continue;
    201      1.1   matt 
    202      1.1   matt 			mapp[idx] |= mask;
    203      1.1   matt 			nasids++;
    204      1.1   matt 		}
    205      1.1   matt 	}
    206      1.1   matt 	return nasids;
    207      1.1   matt }
    208      1.1   matt #endif
    209      1.1   matt 
    210      1.1   matt u_int
    211  1.7.2.2  skrll tlb_record_asids(u_long *mapp, tlb_asid_t asid_max)
    212      1.1   matt {
    213      1.1   matt #ifndef MULTIPROCESSOR
    214      1.1   matt #ifdef CPU_CORTEXA5
    215      1.1   matt 	if (CPU_ID_CORTEX_A5_P(curcpu()->ci_arm_cpuid))
    216  1.7.2.2  skrll 		return tlb_cortex_a5_record_asids(mapp, asid_max);
    217      1.1   matt #endif
    218      1.1   matt #ifdef CPU_CORTEXA7
    219      1.1   matt 	if (CPU_ID_CORTEX_A7_P(curcpu()->ci_arm_cpuid))
    220  1.7.2.2  skrll 		return tlb_cortex_a7_record_asids(mapp, asid_max);
    221      1.1   matt #endif
    222      1.1   matt #endif /* MULTIPROCESSOR */
    223      1.1   matt #ifdef DIAGNOSTIC
    224      1.1   matt 	mapp[0] = 0xfffffffe;
    225      1.1   matt 	mapp[1] = 0xffffffff;
    226      1.1   matt 	mapp[2] = 0xffffffff;
    227      1.1   matt 	mapp[3] = 0xffffffff;
    228      1.1   matt 	mapp[4] = 0xffffffff;
    229      1.1   matt 	mapp[5] = 0xffffffff;
    230      1.1   matt 	mapp[6] = 0xffffffff;
    231      1.1   matt 	mapp[7] = 0xffffffff;
    232      1.1   matt #endif
    233      1.1   matt 	return 255;
    234      1.1   matt }
    235      1.1   matt 
    236      1.1   matt void
    237      1.1   matt tlb_walk(void *ctx, bool (*func)(void *, vaddr_t, tlb_asid_t, pt_entry_t))
    238      1.1   matt {
    239      1.1   matt 	/* no way to view the TLB */
    240      1.1   matt }
    241