Home | History | Annotate | Line # | Download | only in bootaa64
      1  1.2  jmcneill /*	$NetBSD: cache.S,v 1.2 2018/09/07 17:30:32 jmcneill Exp $	*/
      2  1.1  jmcneill 
      3  1.1  jmcneill /*-
      4  1.1  jmcneill  * Copyright (c) 2014 Robin Randhawa
      5  1.1  jmcneill  * Copyright (c) 2015 The FreeBSD Foundation
      6  1.1  jmcneill  * All rights reserved.
      7  1.1  jmcneill  *
      8  1.1  jmcneill  * Portions of this software were developed by Andrew Turner
      9  1.1  jmcneill  * under sponsorship from the FreeBSD Foundation
     10  1.1  jmcneill  *
     11  1.1  jmcneill  * Redistribution and use in source and binary forms, with or without
     12  1.1  jmcneill  * modification, are permitted provided that the following conditions
     13  1.1  jmcneill  * are met:
     14  1.1  jmcneill  * 1. Redistributions of source code must retain the above copyright
     15  1.1  jmcneill  *    notice, this list of conditions and the following disclaimer.
     16  1.1  jmcneill  * 2. Redistributions in binary form must reproduce the above copyright
     17  1.1  jmcneill  *    notice, this list of conditions and the following disclaimer in the
     18  1.1  jmcneill  *    documentation and/or other materials provided with the distribution.
     19  1.1  jmcneill  *
     20  1.1  jmcneill  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
     21  1.1  jmcneill  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     22  1.1  jmcneill  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     23  1.1  jmcneill  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
     24  1.1  jmcneill  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     25  1.1  jmcneill  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     26  1.1  jmcneill  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     27  1.1  jmcneill  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     28  1.1  jmcneill  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     29  1.1  jmcneill  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     30  1.1  jmcneill  * SUCH DAMAGE.
     31  1.1  jmcneill  *
     32  1.1  jmcneill  * $FreeBSD: head/sys/arm64/arm64/cpufunc_asm.S 313347 2017-02-06 17:50:09Z andrew $
     33  1.1  jmcneill  */
     34  1.1  jmcneill 
     35  1.1  jmcneill #include <aarch64/asm.h>
     36  1.1  jmcneill 
     37  1.2  jmcneill #define	SCTLR_M	(1<<0)
     38  1.2  jmcneill #define	SCTLR_C	(1<<2)
     39  1.2  jmcneill 
     40  1.1  jmcneill 	.text
     41  1.1  jmcneill 	.align	2
     42  1.1  jmcneill 
     43  1.1  jmcneill /*
     44  1.1  jmcneill  * Macro to handle the cache. This takes the start address in x0, length
     45  1.1  jmcneill  * in x1. It will corrupt x0, x1, x2, and x3.
     46  1.1  jmcneill  */
     47  1.1  jmcneill .macro cache_handle_range dcop = 0, ic = 0, icop = 0
     48  1.1  jmcneill .if \ic == 0
     49  1.1  jmcneill 	mrs	x3, ctr_el0
     50  1.1  jmcneill 	ubfx	x3, x3, #16, #4		/* x3 = D cache shift */
     51  1.1  jmcneill 	mov	x2, #4			/* size of word */
     52  1.1  jmcneill 	lsl	x3, x2, x3		/* x3 = D cache line size */
     53  1.1  jmcneill .else
     54  1.1  jmcneill 	mrs	x3, ctr_el0
     55  1.1  jmcneill 	ubfx	x2, x3, #16, #4		/* x2 = D cache shift */
     56  1.1  jmcneill 	and	x3, x3, #15		/* x3 = I cache shift */
     57  1.1  jmcneill 	cmp	x3, x2
     58  1.1  jmcneill 	bcs	1f
     59  1.1  jmcneill 	mov	x3, x2
     60  1.1  jmcneill 1:					/* x3 = MAX(IcacheShift,DcacheShift) */
     61  1.1  jmcneill 	mov	x2, #4			/* size of word */
     62  1.1  jmcneill 	lsl	x3, x2, x3		/* x3 = cache line size */
     63  1.1  jmcneill .endif
     64  1.1  jmcneill 	sub	x4, x3, #1		/* Get the address mask */
     65  1.1  jmcneill 	and	x2, x0, x4		/* Get the low bits of the address */
     66  1.1  jmcneill 	add	x1, x1, x2		/* Add these to the size */
     67  1.1  jmcneill 	bic	x0, x0, x4		/* Clear the low bit of the address */
     68  1.1  jmcneill 1:
     69  1.1  jmcneill 	dc	\dcop, x0
     70  1.1  jmcneill 	dsb	ish
     71  1.1  jmcneill .if \ic != 0
     72  1.1  jmcneill 	ic	\icop, x0
     73  1.1  jmcneill 	dsb	ish
     74  1.1  jmcneill .endif
     75  1.1  jmcneill 	add	x0, x0, x3		/* Move to the next line */
     76  1.1  jmcneill 	subs	x1, x1, x3		/* Reduce the size */
     77  1.1  jmcneill 	b.hi	1b			/* Check if we are done */
     78  1.1  jmcneill .if \ic != 0
     79  1.1  jmcneill 	isb
     80  1.1  jmcneill .endif
     81  1.1  jmcneill 	ret
     82  1.1  jmcneill .endm
     83  1.1  jmcneill 
     84  1.1  jmcneill 
     85  1.1  jmcneill /*
     86  1.1  jmcneill  * void aarch64_dcache_wbinv_range(vaddr_t, vsize_t)
     87  1.1  jmcneill  */
     88  1.1  jmcneill ENTRY(aarch64_dcache_wbinv_range)
     89  1.1  jmcneill 	cache_handle_range	dcop = civac
     90  1.1  jmcneill END(aarch64_dcache_wbinv_range)
     91  1.1  jmcneill 
     92  1.1  jmcneill /*
     93  1.1  jmcneill  * void aarch64_icache_inv_all(void)
     94  1.1  jmcneill  */
     95  1.1  jmcneill ENTRY(aarch64_icache_inv_all)
     96  1.1  jmcneill 	dsb	ish
     97  1.1  jmcneill 	ic	ialluis
     98  1.1  jmcneill 	dsb	ish
     99  1.1  jmcneill 	isb
    100  1.1  jmcneill 	ret
    101  1.1  jmcneill END(aarch64_icache_inv_all)
    102  1.2  jmcneill 
    103  1.2  jmcneill /*
    104  1.2  jmcneill  * void aarch64_exec_kernel(paddr_t entry, paddr_t dtb)
    105  1.2  jmcneill  */
    106  1.2  jmcneill ENTRY(aarch64_exec_kernel)
    107  1.2  jmcneill 	mov	x20, x0	/* kernel entry point */
    108  1.2  jmcneill 	mov	x21, x1	/* dtb address */
    109  1.2  jmcneill 
    110  1.2  jmcneill 	mrs	x0, CurrentEL
    111  1.2  jmcneill 	lsr	x0, x0, #2
    112  1.2  jmcneill 	cmp	x0, #0x2
    113  1.2  jmcneill 	b.eq	1f
    114  1.2  jmcneill 
    115  1.2  jmcneill 	/* Disable MMU and dcache, CurrentEL = EL1 */
    116  1.2  jmcneill 	mrs	x0, sctlr_el1
    117  1.2  jmcneill 	bic	x0, x0, #SCTLR_M
    118  1.2  jmcneill 	bic	x0, x0, #SCTLR_C
    119  1.2  jmcneill 	msr	sctlr_el1, x0
    120  1.2  jmcneill 	isb
    121  1.2  jmcneill 	b	2f
    122  1.2  jmcneill 1:
    123  1.2  jmcneill 	/* Disable MMU and dcache, CurrentEL = EL2 */
    124  1.2  jmcneill 	mrs	x0, sctlr_el2
    125  1.2  jmcneill 	bic	x0, x0, #SCTLR_M
    126  1.2  jmcneill 	bic	x0, x0, #SCTLR_C
    127  1.2  jmcneill 	msr	sctlr_el2, x0
    128  1.2  jmcneill 	isb
    129  1.2  jmcneill 2:
    130  1.2  jmcneill 
    131  1.2  jmcneill 	/* Jump to kernel */
    132  1.2  jmcneill 	mov	x0, x21
    133  1.2  jmcneill 	mov	x1, xzr
    134  1.2  jmcneill 	mov	x2, xzr
    135  1.2  jmcneill 	mov	x3, xzr
    136  1.2  jmcneill 	br	x20
    137  1.2  jmcneill 
    138  1.2  jmcneill END(aarch64_exec_kernel)
    139