locore.h revision 1.3 1 1.3 ryo /* $NetBSD: locore.h,v 1.3 2018/04/01 04:35:03 ryo Exp $ */
2 1.1 matt
3 1.1 matt /*-
4 1.1 matt * Copyright (c) 2014 The NetBSD Foundation, Inc.
5 1.1 matt * All rights reserved.
6 1.1 matt *
7 1.1 matt * This code is derived from software contributed to The NetBSD Foundation
8 1.1 matt * by Matt Thomas of 3am Software Foundry.
9 1.1 matt *
10 1.1 matt * Redistribution and use in source and binary forms, with or without
11 1.1 matt * modification, are permitted provided that the following conditions
12 1.1 matt * are met:
13 1.1 matt * 1. Redistributions of source code must retain the above copyright
14 1.1 matt * notice, this list of conditions and the following disclaimer.
15 1.1 matt * 2. Redistributions in binary form must reproduce the above copyright
16 1.1 matt * notice, this list of conditions and the following disclaimer in the
17 1.1 matt * documentation and/or other materials provided with the distribution.
18 1.1 matt *
19 1.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 1.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 1.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 1.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 1.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 1.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 1.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 1.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 1.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 1.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 1.1 matt * POSSIBILITY OF SUCH DAMAGE.
30 1.1 matt */
31 1.1 matt
32 1.1 matt #ifndef _AARCH64_LOCORE_H_
33 1.1 matt #define _AARCH64_LOCORE_H_
34 1.1 matt
35 1.1 matt #ifdef __aarch64__
36 1.1 matt
37 1.3 ryo #ifdef _KERNEL_OPT
38 1.3 ryo #include "opt_multiprocessor.h"
39 1.3 ryo #endif
40 1.3 ryo
41 1.3 ryo #ifdef _LOCORE
42 1.3 ryo
43 1.3 ryo #define ENABLE_INTERRUPT \
44 1.3 ryo msr daifclr, #((DAIF_I|DAIF_F) >> DAIF_SETCLR_SHIFT)
45 1.3 ryo #define DISABLE_INTERRUPT \
46 1.3 ryo msr daifset, #((DAIF_I|DAIF_F) >> DAIF_SETCLR_SHIFT)
47 1.3 ryo
48 1.3 ryo #else /* _LOCORE */
49 1.3 ryo
50 1.1 matt #include <sys/types.h>
51 1.1 matt
52 1.1 matt #include <aarch64/armreg.h>
53 1.1 matt
54 1.3 ryo #ifdef MULTIPROCESSOR
55 1.3 ryo /* for compatibility arch/arm/pic/pic.c */
56 1.3 ryo extern u_int arm_cpu_max;
57 1.3 ryo #endif
58 1.1 matt
59 1.3 ryo /* for compatibility arch/arm */
60 1.3 ryo #define I32_bit DAIF_I
61 1.3 ryo #define F32_bit DAIF_F
62 1.3 ryo #define cpsie(psw) daif_enable((psw))
63 1.3 ryo #define cpsid(psw) daif_disable((psw))
64 1.1 matt
65 1.1 matt
66 1.3 ryo #define ENABLE_INTERRUPT() daif_enable(DAIF_I|DAIF_F)
67 1.3 ryo #define DISABLE_INTERRUPT() daif_disable(DAIF_I|DAIF_F)
68 1.1 matt
69 1.3 ryo #define DAIF_MASK (DAIF_D|DAIF_A|DAIF_I|DAIF_F)
70 1.1 matt
71 1.3 ryo static inline void __unused
72 1.3 ryo daif_enable(register_t psw)
73 1.1 matt {
74 1.1 matt if (!__builtin_constant_p(psw)) {
75 1.3 ryo reg_daif_write(reg_daif_read() & ~psw);
76 1.1 matt } else {
77 1.3 ryo reg_daifclr_write((psw & DAIF_MASK) >> DAIF_SETCLR_SHIFT);
78 1.1 matt }
79 1.1 matt }
80 1.1 matt
81 1.3 ryo static inline register_t __unused
82 1.3 ryo daif_disable(register_t psw)
83 1.1 matt {
84 1.1 matt register_t oldpsw = reg_daif_read();
85 1.1 matt if (!__builtin_constant_p(psw)) {
86 1.3 ryo reg_daif_write(oldpsw | psw);
87 1.1 matt } else {
88 1.3 ryo reg_daifset_write((psw & DAIF_MASK) >> DAIF_SETCLR_SHIFT);
89 1.1 matt }
90 1.1 matt return oldpsw;
91 1.1 matt }
92 1.1 matt
93 1.3 ryo static inline void
94 1.3 ryo arm_dsb(void)
95 1.1 matt {
96 1.3 ryo __asm __volatile("dsb sy" ::: "memory");
97 1.1 matt }
98 1.1 matt
99 1.3 ryo static inline void
100 1.3 ryo arm_isb(void)
101 1.1 matt {
102 1.3 ryo __asm __volatile("isb" ::: "memory");
103 1.1 matt }
104 1.1 matt
105 1.3 ryo #endif /* _LOCORE */
106 1.3 ryo
107 1.1 matt #elif defined(__arm__)
108 1.1 matt
109 1.1 matt #include <arm/locore.h>
110 1.1 matt
111 1.1 matt #endif /* __aarch64__/__arm__ */
112 1.1 matt
113 1.1 matt #endif /* _AARCH64_LOCORE_H_ */
114