locore.h revision 1.5 1 1.5 jmcneill /* $NetBSD: locore.h,v 1.5 2018/07/09 09:09:47 jmcneill Exp $ */
2 1.1 matt
3 1.1 matt /*-
4 1.1 matt * Copyright (c) 2014 The NetBSD Foundation, Inc.
5 1.1 matt * All rights reserved.
6 1.1 matt *
7 1.1 matt * This code is derived from software contributed to The NetBSD Foundation
8 1.1 matt * by Matt Thomas of 3am Software Foundry.
9 1.1 matt *
10 1.1 matt * Redistribution and use in source and binary forms, with or without
11 1.1 matt * modification, are permitted provided that the following conditions
12 1.1 matt * are met:
13 1.1 matt * 1. Redistributions of source code must retain the above copyright
14 1.1 matt * notice, this list of conditions and the following disclaimer.
15 1.1 matt * 2. Redistributions in binary form must reproduce the above copyright
16 1.1 matt * notice, this list of conditions and the following disclaimer in the
17 1.1 matt * documentation and/or other materials provided with the distribution.
18 1.1 matt *
19 1.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 1.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 1.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 1.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 1.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 1.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 1.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 1.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 1.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 1.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 1.1 matt * POSSIBILITY OF SUCH DAMAGE.
30 1.1 matt */
31 1.1 matt
32 1.1 matt #ifndef _AARCH64_LOCORE_H_
33 1.1 matt #define _AARCH64_LOCORE_H_
34 1.1 matt
35 1.1 matt #ifdef __aarch64__
36 1.1 matt
37 1.3 ryo #ifdef _KERNEL_OPT
38 1.3 ryo #include "opt_multiprocessor.h"
39 1.3 ryo #endif
40 1.3 ryo
41 1.3 ryo #ifdef _LOCORE
42 1.3 ryo
43 1.3 ryo #define ENABLE_INTERRUPT \
44 1.3 ryo msr daifclr, #((DAIF_I|DAIF_F) >> DAIF_SETCLR_SHIFT)
45 1.3 ryo #define DISABLE_INTERRUPT \
46 1.3 ryo msr daifset, #((DAIF_I|DAIF_F) >> DAIF_SETCLR_SHIFT)
47 1.3 ryo
48 1.3 ryo #else /* _LOCORE */
49 1.3 ryo
50 1.1 matt #include <sys/types.h>
51 1.1 matt
52 1.1 matt #include <aarch64/armreg.h>
53 1.5 jmcneill #include <aarch64/machdep.h> /* arm32 compat */
54 1.1 matt
55 1.3 ryo /* for compatibility arch/arm */
56 1.3 ryo #define I32_bit DAIF_I
57 1.3 ryo #define F32_bit DAIF_F
58 1.3 ryo #define cpsie(psw) daif_enable((psw))
59 1.3 ryo #define cpsid(psw) daif_disable((psw))
60 1.1 matt
61 1.1 matt
62 1.3 ryo #define ENABLE_INTERRUPT() daif_enable(DAIF_I|DAIF_F)
63 1.3 ryo #define DISABLE_INTERRUPT() daif_disable(DAIF_I|DAIF_F)
64 1.1 matt
65 1.3 ryo #define DAIF_MASK (DAIF_D|DAIF_A|DAIF_I|DAIF_F)
66 1.1 matt
67 1.3 ryo static inline void __unused
68 1.3 ryo daif_enable(register_t psw)
69 1.1 matt {
70 1.1 matt if (!__builtin_constant_p(psw)) {
71 1.3 ryo reg_daif_write(reg_daif_read() & ~psw);
72 1.1 matt } else {
73 1.3 ryo reg_daifclr_write((psw & DAIF_MASK) >> DAIF_SETCLR_SHIFT);
74 1.1 matt }
75 1.1 matt }
76 1.1 matt
77 1.3 ryo static inline register_t __unused
78 1.3 ryo daif_disable(register_t psw)
79 1.1 matt {
80 1.1 matt register_t oldpsw = reg_daif_read();
81 1.1 matt if (!__builtin_constant_p(psw)) {
82 1.3 ryo reg_daif_write(oldpsw | psw);
83 1.1 matt } else {
84 1.3 ryo reg_daifset_write((psw & DAIF_MASK) >> DAIF_SETCLR_SHIFT);
85 1.1 matt }
86 1.1 matt return oldpsw;
87 1.1 matt }
88 1.1 matt
89 1.3 ryo static inline void
90 1.3 ryo arm_dsb(void)
91 1.1 matt {
92 1.3 ryo __asm __volatile("dsb sy" ::: "memory");
93 1.1 matt }
94 1.1 matt
95 1.3 ryo static inline void
96 1.3 ryo arm_isb(void)
97 1.1 matt {
98 1.3 ryo __asm __volatile("isb" ::: "memory");
99 1.1 matt }
100 1.1 matt
101 1.3 ryo #endif /* _LOCORE */
102 1.3 ryo
103 1.1 matt #elif defined(__arm__)
104 1.1 matt
105 1.1 matt #include <arm/locore.h>
106 1.1 matt
107 1.1 matt #endif /* __aarch64__/__arm__ */
108 1.1 matt
109 1.1 matt #endif /* _AARCH64_LOCORE_H_ */
110