Home | History | Annotate | Line # | Download | only in arm
      1 /*	$NetBSD: cpufunc_asm_armv6.S,v 1.8 2017/07/15 06:20:22 skrll Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 2002, 2005 ARM Limited
      5  * Portions Copyright (c) 2007 Microsoft
      6  * All rights reserved.
      7  *
      8  * Redistribution and use in source and binary forms, with or without
      9  * modification, are permitted provided that the following conditions
     10  * are met:
     11  * 1. Redistributions of source code must retain the above copyright
     12  *    notice, this list of conditions and the following disclaimer.
     13  * 2. Redistributions in binary form must reproduce the above copyright
     14  *    notice, this list of conditions and the following disclaimer in the
     15  *    documentation and/or other materials provided with the distribution.
     16  * 3. The name of the company may not be used to endorse or promote
     17  *    products derived from this software without specific prior written
     18  *    permission.
     19  *
     20  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
     21  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     22  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     23  * IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
     24  * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     25  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
     26  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     27  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     28  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     29  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     30  * SUCH DAMAGE.
     31  *
     32  * ARMv6 assembly functions for manipulating caches.
     33  * These routines can be used by any core that supports the mcrr address
     34  * range operations.
     35  */
     36 
     37 #include "assym.h"
     38 #include <machine/asm.h>
     39 #include <arm/locore.h>
     40 
     41 	.arch	armv6
     42 
     43 /*
     44  * Functions to set the MMU Translation Table Base register
     45  *
     46  * We need to clean and flush the cache as it uses virtual
     47  * addresses that are about to change.
     48  */
     49 ENTRY(armv6_setttb)
     50 	cmp	r1, #0
     51 #ifdef PMAP_CACHE_VIVT
     52 	mcrne	p15, 0, r0, c7, c5, 0	/* Flush I cache */
     53 	mcrne	p15, 0, r0, c7, c14, 0	/* clean and invalidate D cache */
     54 #endif
     55 	mcrne	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
     56 
     57 	mcr	p15, 0, r0, c2, c0, 0	/* load new TTB */
     58 
     59 	mcrne	p15, 0, r0, c8, c7, 0	/* invalidate I+D TLBs */
     60 	RET
     61 END(armv6_setttb)
     62 
     63 /*
     64  * Cache operations.
     65  */
     66 
     67 /* LINTSTUB: void armv6_icache_sync_range(vaddr_t, vsize_t); */
     68 ENTRY_NP(armv6_icache_sync_range)
     69 	add	r1, r1, r0
     70 	sub	r1, r1, #1
     71 	mcrr	p15, 0, r1, r0, c5	/* invalidate I cache range */
     72 	mcrr	p15, 0, r1, r0, c12	/* clean D cache range */
     73 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
     74 	RET
     75 END(armv6_icache_sync_range)
     76 
     77 /* LINTSTUB: void armv6_icache_sync_all(void); */
     78 ENTRY_NP(armv6_icache_sync_all)
     79 	/*
     80 	 * We assume that the code here can never be out of sync with the
     81 	 * dcache, so that we can safely flush the Icache and fall through
     82 	 * into the Dcache cleaning code.
     83 	 */
     84 	mcr	p15, 0, r0, c7, c5, 0	/* Flush I cache */
     85 	mcr	p15, 0, r0, c7, c10, 0	/* Clean D cache */
     86 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
     87 	RET
     88 END(armv6_icache_sync_all)
     89 
     90 /* LINTSTUB: void armv6_dcache_wb_range(vaddr_t, vsize_t); */
     91 ENTRY(armv6_dcache_wb_range)
     92 	add	r1, r1, r0
     93 	sub	r1, r1, #1
     94 	mcrr	p15, 0, r1, r0, c12	/* clean D cache range */
     95 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
     96 	RET
     97 END(armv6_dcache_wb_range)
     98 
     99 /* LINTSTUB: void armv6_dcache_wbinv_range(vaddr_t, vsize_t); */
    100 ENTRY(armv6_dcache_wbinv_range)
    101 	add	r1, r1, r0
    102 	sub	r1, r1, #1
    103 	mcrr	p15, 0, r1, r0, c14	/* clean and invalidate D cache range */
    104 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
    105 	RET
    106 END(armv6_dcache_wbinv_range)
    107 
    108 /*
    109  * Note, we must not invalidate everything.  If the range is too big we
    110  * must use wb-inv of the entire cache.
    111  *
    112  * LINTSTUB: void armv6_dcache_inv_range(vaddr_t, vsize_t);
    113  */
    114 ENTRY(armv6_dcache_inv_range)
    115 	add	r1, r1, r0
    116 	sub	r1, r1, #1
    117 	mcrr	p15, 0, r1, r0, c6	/* invalidate D cache range */
    118 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
    119 	RET
    120 END(armv6_dcache_inv_range)
    121 
    122 /* LINTSTUB: void armv6_idcache_wbinv_range(vaddr_t, vsize_t); */
    123 ENTRY(armv6_idcache_wbinv_range)
    124 	add	r1, r1, r0
    125 	sub	r1, r1, #1
    126 	mcrr	p15, 0, r1, r0, c5	/* invalidate I cache range */
    127 	mcrr	p15, 0, r1, r0, c14	/* clean & invalidate D cache range */
    128 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
    129 	RET
    130 END(armv6_idcache_wbinv_range)
    131 
    132 /* LINTSTUB: void armv6_idcache_wbinv_all(void); */
    133 ENTRY_NP(armv6_idcache_wbinv_all)
    134 	/*
    135 	 * We assume that the code here can never be out of sync with the
    136 	 * dcache, so that we can safely flush the Icache and fall through
    137 	 * into the Dcache purging code.
    138 	 */
    139 	mcr	p15, 0, r0, c7, c5, 0	/* Flush I cache */
    140 	/* Fall through to purge Dcache. */
    141 
    142 /* LINTSTUB: void armv6_dcache_wbinv_all(void); */
    143 ENTRY(armv6_dcache_wbinv_all)
    144 	mcr	p15, 0, r0, c7, c14, 0	/* clean & invalidate D cache */
    145 	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
    146 	RET
    147 END(armv6_dcache_wbinv_all)
    148 END(armv6_idcache_wbinv_all)
    149