Home | History | Annotate | Line # | Download | only in bootaa64
      1 /*	$NetBSD: cache.S,v 1.2 2018/09/07 17:30:32 jmcneill Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2014 Robin Randhawa
      5  * Copyright (c) 2015 The FreeBSD Foundation
      6  * All rights reserved.
      7  *
      8  * Portions of this software were developed by Andrew Turner
      9  * under sponsorship from the FreeBSD Foundation
     10  *
     11  * Redistribution and use in source and binary forms, with or without
     12  * modification, are permitted provided that the following conditions
     13  * are met:
     14  * 1. Redistributions of source code must retain the above copyright
     15  *    notice, this list of conditions and the following disclaimer.
     16  * 2. Redistributions in binary form must reproduce the above copyright
     17  *    notice, this list of conditions and the following disclaimer in the
     18  *    documentation and/or other materials provided with the distribution.
     19  *
     20  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
     21  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     22  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     23  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
     24  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     25  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     26  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     27  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     28  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     29  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     30  * SUCH DAMAGE.
     31  *
     32  * $FreeBSD: head/sys/arm64/arm64/cpufunc_asm.S 313347 2017-02-06 17:50:09Z andrew $
     33  */
     34 
     35 #include <aarch64/asm.h>
     36 
     37 #define	SCTLR_M	(1<<0)
     38 #define	SCTLR_C	(1<<2)
     39 
     40 	.text
     41 	.align	2
     42 
     43 /*
     44  * Macro to handle the cache. This takes the start address in x0, length
     45  * in x1. It will corrupt x0, x1, x2, and x3.
     46  */
     47 .macro cache_handle_range dcop = 0, ic = 0, icop = 0
     48 .if \ic == 0
     49 	mrs	x3, ctr_el0
     50 	ubfx	x3, x3, #16, #4		/* x3 = D cache shift */
     51 	mov	x2, #4			/* size of word */
     52 	lsl	x3, x2, x3		/* x3 = D cache line size */
     53 .else
     54 	mrs	x3, ctr_el0
     55 	ubfx	x2, x3, #16, #4		/* x2 = D cache shift */
     56 	and	x3, x3, #15		/* x3 = I cache shift */
     57 	cmp	x3, x2
     58 	bcs	1f
     59 	mov	x3, x2
     60 1:					/* x3 = MAX(IcacheShift,DcacheShift) */
     61 	mov	x2, #4			/* size of word */
     62 	lsl	x3, x2, x3		/* x3 = cache line size */
     63 .endif
     64 	sub	x4, x3, #1		/* Get the address mask */
     65 	and	x2, x0, x4		/* Get the low bits of the address */
     66 	add	x1, x1, x2		/* Add these to the size */
     67 	bic	x0, x0, x4		/* Clear the low bit of the address */
     68 1:
     69 	dc	\dcop, x0
     70 	dsb	ish
     71 .if \ic != 0
     72 	ic	\icop, x0
     73 	dsb	ish
     74 .endif
     75 	add	x0, x0, x3		/* Move to the next line */
     76 	subs	x1, x1, x3		/* Reduce the size */
     77 	b.hi	1b			/* Check if we are done */
     78 .if \ic != 0
     79 	isb
     80 .endif
     81 	ret
     82 .endm
     83 
     84 
     85 /*
     86  * void aarch64_dcache_wbinv_range(vaddr_t, vsize_t)
     87  */
     88 ENTRY(aarch64_dcache_wbinv_range)
     89 	cache_handle_range	dcop = civac
     90 END(aarch64_dcache_wbinv_range)
     91 
     92 /*
     93  * void aarch64_icache_inv_all(void)
     94  */
     95 ENTRY(aarch64_icache_inv_all)
     96 	dsb	ish
     97 	ic	ialluis
     98 	dsb	ish
     99 	isb
    100 	ret
    101 END(aarch64_icache_inv_all)
    102 
    103 /*
    104  * void aarch64_exec_kernel(paddr_t entry, paddr_t dtb)
    105  */
    106 ENTRY(aarch64_exec_kernel)
    107 	mov	x20, x0	/* kernel entry point */
    108 	mov	x21, x1	/* dtb address */
    109 
    110 	mrs	x0, CurrentEL
    111 	lsr	x0, x0, #2
    112 	cmp	x0, #0x2
    113 	b.eq	1f
    114 
    115 	/* Disable MMU and dcache, CurrentEL = EL1 */
    116 	mrs	x0, sctlr_el1
    117 	bic	x0, x0, #SCTLR_M
    118 	bic	x0, x0, #SCTLR_C
    119 	msr	sctlr_el1, x0
    120 	isb
    121 	b	2f
    122 1:
    123 	/* Disable MMU and dcache, CurrentEL = EL2 */
    124 	mrs	x0, sctlr_el2
    125 	bic	x0, x0, #SCTLR_M
    126 	bic	x0, x0, #SCTLR_C
    127 	msr	sctlr_el2, x0
    128 	isb
    129 2:
    130 
    131 	/* Jump to kernel */
    132 	mov	x0, x21
    133 	mov	x1, xzr
    134 	mov	x2, xzr
    135 	mov	x3, xzr
    136 	br	x20
    137 
    138 END(aarch64_exec_kernel)
    139