Home | History | Annotate | Line # | Download | only in bootaa64
cache.S revision 1.1
      1 /*	$NetBSD: cache.S,v 1.1 2018/08/24 02:01:06 jmcneill Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2014 Robin Randhawa
      5  * Copyright (c) 2015 The FreeBSD Foundation
      6  * All rights reserved.
      7  *
      8  * Portions of this software were developed by Andrew Turner
      9  * under sponsorship from the FreeBSD Foundation
     10  *
     11  * Redistribution and use in source and binary forms, with or without
     12  * modification, are permitted provided that the following conditions
     13  * are met:
     14  * 1. Redistributions of source code must retain the above copyright
     15  *    notice, this list of conditions and the following disclaimer.
     16  * 2. Redistributions in binary form must reproduce the above copyright
     17  *    notice, this list of conditions and the following disclaimer in the
     18  *    documentation and/or other materials provided with the distribution.
     19  *
     20  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
     21  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     22  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     23  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
     24  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     25  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     26  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     27  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     28  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     29  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     30  * SUCH DAMAGE.
     31  *
     32  * $FreeBSD: head/sys/arm64/arm64/cpufunc_asm.S 313347 2017-02-06 17:50:09Z andrew $
     33  */
     34 
     35 #include <aarch64/asm.h>
     36 
     37 	.text
     38 	.align	2
     39 
     40 /*
     41  * Macro to handle the cache. This takes the start address in x0, length
     42  * in x1. It will corrupt x0, x1, x2, and x3.
     43  */
     44 .macro cache_handle_range dcop = 0, ic = 0, icop = 0
     45 .if \ic == 0
     46 	mrs	x3, ctr_el0
     47 	ubfx	x3, x3, #16, #4		/* x3 = D cache shift */
     48 	mov	x2, #4			/* size of word */
     49 	lsl	x3, x2, x3		/* x3 = D cache line size */
     50 .else
     51 	mrs	x3, ctr_el0
     52 	ubfx	x2, x3, #16, #4		/* x2 = D cache shift */
     53 	and	x3, x3, #15		/* x3 = I cache shift */
     54 	cmp	x3, x2
     55 	bcs	1f
     56 	mov	x3, x2
     57 1:					/* x3 = MAX(IcacheShift,DcacheShift) */
     58 	mov	x2, #4			/* size of word */
     59 	lsl	x3, x2, x3		/* x3 = cache line size */
     60 .endif
     61 	sub	x4, x3, #1		/* Get the address mask */
     62 	and	x2, x0, x4		/* Get the low bits of the address */
     63 	add	x1, x1, x2		/* Add these to the size */
     64 	bic	x0, x0, x4		/* Clear the low bit of the address */
     65 1:
     66 	dc	\dcop, x0
     67 	dsb	ish
     68 .if \ic != 0
     69 	ic	\icop, x0
     70 	dsb	ish
     71 .endif
     72 	add	x0, x0, x3		/* Move to the next line */
     73 	subs	x1, x1, x3		/* Reduce the size */
     74 	b.hi	1b			/* Check if we are done */
     75 .if \ic != 0
     76 	isb
     77 .endif
     78 	ret
     79 .endm
     80 
     81 
     82 /*
     83  * void aarch64_dcache_wbinv_range(vaddr_t, vsize_t)
     84  */
     85 ENTRY(aarch64_dcache_wbinv_range)
     86 	cache_handle_range	dcop = civac
     87 END(aarch64_dcache_wbinv_range)
     88 
     89 /*
     90  * void aarch64_icache_inv_all(void)
     91  */
     92 ENTRY(aarch64_icache_inv_all)
     93 	dsb	ish
     94 	ic	ialluis
     95 	dsb	ish
     96 	isb
     97 	ret
     98 END(aarch64_icache_inv_all)
     99