1 /* SPDX-License-Identifier: GPL-2.0+ */
2 /*
3  * include/asm-arm/macro.h
4  *
5  * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
6  */
7 
8 #ifndef __ASM_ARM_MACRO_H__
9 #define __ASM_ARM_MACRO_H__
10 
11 #ifdef CONFIG_ARM64
12 #include <asm/system.h>
13 #endif
14 
15 #ifdef __ASSEMBLY__
16 
17 /*
18  * These macros provide a convenient way to write 8, 16 and 32 bit data
19  * to any address.
20  * Registers r4 and r5 are used, any data in these registers are
21  * overwritten by the macros.
22  * The macros are valid for any ARM architecture, they do not implement
23  * any memory barriers so caution is recommended when using these when the
24  * caches are enabled or on a multi-core system.
25  */
26 
27 .macro	write32, addr, data
28 	ldr	r4, =\addr
29 	ldr	r5, =\data
30 	str	r5, [r4]
31 .endm
32 
33 .macro	write16, addr, data
34 	ldr	r4, =\addr
35 	ldrh	r5, =\data
36 	strh	r5, [r4]
37 .endm
38 
39 .macro	write8, addr, data
40 	ldr	r4, =\addr
41 	ldrb	r5, =\data
42 	strb	r5, [r4]
43 .endm
44 
45 /*
46  * This macro generates a loop that can be used for delays in the code.
47  * Register r4 is used, any data in this register is overwritten by the
48  * macro.
49  * The macro is valid for any ARM architeture. The actual time spent in the
50  * loop will vary from CPU to CPU though.
51  */
52 
53 .macro	wait_timer, time
54 	ldr	r4, =\time
55 1:
56 	nop
57 	subs	r4, r4, #1
58 	bcs	1b
59 .endm
60 
61 #ifdef CONFIG_ARM64
62 /*
63  * Register aliases.
64  */
65 lr	.req	x30
66 
67 /*
68  * Branch according to exception level
69  */
70 .macro	switch_el, xreg, el3_label, el2_label, el1_label
71 	mrs	\xreg, CurrentEL
72 	cmp	\xreg, 0xc
73 	b.eq	\el3_label
74 	cmp	\xreg, 0x8
75 	b.eq	\el2_label
76 	cmp	\xreg, 0x4
77 	b.eq	\el1_label
78 .endm
79 
80 /*
81  * Branch if we are not in the highest exception level
82  */
83 .macro	branch_if_not_highest_el, xreg, label
84 	switch_el \xreg, 3f, 2f, 1f
85 
86 2:	mrs	\xreg, ID_AA64PFR0_EL1
87 	and	\xreg, \xreg, #(ID_AA64PFR0_EL1_EL3)
88 	cbnz	\xreg, \label
89 	b	3f
90 
91 1:	mrs	\xreg, ID_AA64PFR0_EL1
92 	and	\xreg, \xreg, #(ID_AA64PFR0_EL1_EL3 | ID_AA64PFR0_EL1_EL2)
93 	cbnz	\xreg, \label
94 
95 3:
96 .endm
97 
98 /*
99  * Branch if current processor is a Cortex-A57 core.
100  */
101 .macro	branch_if_a57_core, xreg, a57_label
102 	mrs	\xreg, midr_el1
103 	lsr	\xreg, \xreg, #4
104 	and	\xreg, \xreg, #0x00000FFF
105 	cmp	\xreg, #0xD07		/* Cortex-A57 MPCore processor. */
106 	b.eq	\a57_label
107 .endm
108 
109 /*
110  * Branch if current processor is a Cortex-A53 core.
111  */
112 .macro	branch_if_a53_core, xreg, a53_label
113 	mrs	\xreg, midr_el1
114 	lsr	\xreg, \xreg, #4
115 	and	\xreg, \xreg, #0x00000FFF
116 	cmp	\xreg, #0xD03		/* Cortex-A53 MPCore processor. */
117 	b.eq	\a53_label
118 .endm
119 
120 /*
121  * Branch if current processor is a slave,
122  * choose processor with all zero affinity value as the master.
123  */
124 .macro	branch_if_slave, xreg, slave_label
125 #ifdef CONFIG_ARMV8_MULTIENTRY
126 	/* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
127 	mrs	\xreg, mpidr_el1
128 	tst	\xreg, #0xff		/* Test Affinity 0 */
129 	b.ne	\slave_label
130 	lsr	\xreg, \xreg, #8
131 	tst	\xreg, #0xff		/* Test Affinity 1 */
132 	b.ne	\slave_label
133 	lsr	\xreg, \xreg, #8
134 	tst	\xreg, #0xff		/* Test Affinity 2 */
135 	b.ne	\slave_label
136 	lsr	\xreg, \xreg, #16
137 	tst	\xreg, #0xff		/* Test Affinity 3 */
138 	b.ne	\slave_label
139 #endif
140 .endm
141 
142 /*
143  * Branch if current processor is a master,
144  * choose processor with all zero affinity value as the master.
145  */
146 .macro	branch_if_master, xreg1, xreg2, master_label
147 #ifdef CONFIG_ARMV8_MULTIENTRY
148 	/* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
149 	mrs	\xreg1, mpidr_el1
150 	lsr	\xreg2, \xreg1, #32
151 	lsl	\xreg2, \xreg2, #32
152 	lsl	\xreg1, \xreg1, #40
153 	lsr	\xreg1, \xreg1, #40
154 	orr	\xreg1, \xreg1, \xreg2
155 	cbz	\xreg1, \master_label
156 #else
157 	b	\master_label
158 #endif
159 .endm
160 
161 /*
162  * Switch from EL3 to EL2 for ARMv8
163  * @ep:     kernel entry point
164  * @flag:   The execution state flag for lower exception
165  *          level, ES_TO_AARCH64 or ES_TO_AARCH32
166  * @tmp:    temporary register
167  *
168  * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
169  * For loading 64-bit OS, x0 is physical address to the FDT blob.
170  * They will be passed to the guest.
171  */
172 .macro armv8_switch_to_el2_m, ep, flag, tmp
173 	msr	cptr_el3, xzr		/* Disable coprocessor traps to EL3 */
174 	mov	\tmp, #CPTR_EL2_RES1
175 	msr	cptr_el2, \tmp		/* Disable coprocessor traps to EL2 */
176 
177 	/* Initialize Generic Timers */
178 	msr	cntvoff_el2, xzr
179 
180 	/* Initialize SCTLR_EL2
181 	 *
182 	 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
183 	 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
184 	 * EE,WXN,I,SA,C,A,M to 0
185 	 */
186 	ldr	\tmp, =(SCTLR_EL2_RES1 | SCTLR_EL2_EE_LE |\
187 			SCTLR_EL2_WXN_DIS | SCTLR_EL2_ICACHE_DIS |\
188 			SCTLR_EL2_SA_DIS | SCTLR_EL2_DCACHE_DIS |\
189 			SCTLR_EL2_ALIGN_DIS | SCTLR_EL2_MMU_DIS)
190 	msr	sctlr_el2, \tmp
191 
192 	mov	\tmp, sp
193 	msr	sp_el2, \tmp		/* Migrate SP */
194 	mrs	\tmp, vbar_el3
195 	msr	vbar_el2, \tmp		/* Migrate VBAR */
196 
197 	/* Check switch to AArch64 EL2 or AArch32 Hypervisor mode */
198 	cmp	\flag, #ES_TO_AARCH32
199 	b.eq	1f
200 
201 	/*
202 	 * The next lower exception level is AArch64, 64bit EL2 | HCE |
203 	 * RES1 (Bits[5:4]) | Non-secure EL0/EL1.
204 	 * and the SMD depends on requirements.
205 	 */
206 #ifdef CONFIG_ARMV8_PSCI
207 	ldr	\tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
208 			SCR_EL3_RES1 | SCR_EL3_NS_EN)
209 #else
210 	ldr	\tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
211 			SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
212 			SCR_EL3_NS_EN)
213 #endif
214 
215 #ifdef CONFIG_ARMV8_EA_EL3_FIRST
216 	orr	\tmp, \tmp, #SCR_EL3_EA_EN
217 #endif
218 	msr	scr_el3, \tmp
219 
220 	/* Return to the EL2_SP2 mode from EL3 */
221 	ldr	\tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
222 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
223 			SPSR_EL_M_AARCH64 | SPSR_EL_M_EL2H)
224 	msr	spsr_el3, \tmp
225 	msr	elr_el3, \ep
226 	eret
227 
228 1:
229 	/*
230 	 * The next lower exception level is AArch32, 32bit EL2 | HCE |
231 	 * SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1.
232 	 */
233 	ldr	\tmp, =(SCR_EL3_RW_AARCH32 | SCR_EL3_HCE_EN |\
234 			SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
235 			SCR_EL3_NS_EN)
236 	msr	scr_el3, \tmp
237 
238 	/* Return to AArch32 Hypervisor mode */
239 	ldr     \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
240 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
241 			SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
242 			SPSR_EL_M_HYP)
243 	msr	spsr_el3, \tmp
244 	msr     elr_el3, \ep
245 	eret
246 .endm
247 
248 /*
249  * Switch from EL2 to EL1 for ARMv8
250  * @ep:     kernel entry point
251  * @flag:   The execution state flag for lower exception
252  *          level, ES_TO_AARCH64 or ES_TO_AARCH32
253  * @tmp:    temporary register
254  *
255  * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
256  * For loading 64-bit OS, x0 is physical address to the FDT blob.
257  * They will be passed to the guest.
258  */
259 .macro armv8_switch_to_el1_m, ep, flag, tmp, tmp2
260 	/* Initialize Generic Timers */
261 	mrs	\tmp, cnthctl_el2
262 	/* Enable EL1 access to timers */
263 	orr	\tmp, \tmp, #(CNTHCTL_EL2_EL1PCEN_EN |\
264 		CNTHCTL_EL2_EL1PCTEN_EN)
265 	msr	cnthctl_el2, \tmp
266 	msr	cntvoff_el2, xzr
267 
268 	/* Initilize MPID/MPIDR registers */
269 	mrs	\tmp, midr_el1
270 	msr	vpidr_el2, \tmp
271 	mrs	\tmp, mpidr_el1
272 	msr	vmpidr_el2, \tmp
273 
274 	/* Disable coprocessor traps */
275 	mov	\tmp, #CPTR_EL2_RES1
276 	msr	cptr_el2, \tmp		/* Disable coprocessor traps to EL2 */
277 	msr	hstr_el2, xzr		/* Disable coprocessor traps to EL2 */
278 	mov	\tmp, #CPACR_EL1_FPEN_EN
279 	msr	cpacr_el1, \tmp		/* Enable FP/SIMD at EL1 */
280 
281 	/* SCTLR_EL1 initialization
282 	 *
283 	 * setting RES1 bits (29,28,23,22,20,11) to 1
284 	 * and RES0 bits (31,30,27,21,17,13,10,6) +
285 	 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
286 	 * CP15BEN,SA0,SA,C,A,M to 0
287 	 */
288 	ldr	\tmp, =(SCTLR_EL1_RES1 | SCTLR_EL1_UCI_DIS |\
289 			SCTLR_EL1_EE_LE | SCTLR_EL1_WXN_DIS |\
290 			SCTLR_EL1_NTWE_DIS | SCTLR_EL1_NTWI_DIS |\
291 			SCTLR_EL1_UCT_DIS | SCTLR_EL1_DZE_DIS |\
292 			SCTLR_EL1_ICACHE_DIS | SCTLR_EL1_UMA_DIS |\
293 			SCTLR_EL1_SED_EN | SCTLR_EL1_ITD_EN |\
294 			SCTLR_EL1_CP15BEN_DIS | SCTLR_EL1_SA0_DIS |\
295 			SCTLR_EL1_SA_DIS | SCTLR_EL1_DCACHE_DIS |\
296 			SCTLR_EL1_ALIGN_DIS | SCTLR_EL1_MMU_DIS)
297 	msr	sctlr_el1, \tmp
298 
299 	mov	\tmp, sp
300 	msr	sp_el1, \tmp		/* Migrate SP */
301 	mrs	\tmp, vbar_el2
302 	msr	vbar_el1, \tmp		/* Migrate VBAR */
303 
304 	/* Check switch to AArch64 EL1 or AArch32 Supervisor mode */
305 	cmp	\flag, #ES_TO_AARCH32
306 	b.eq	1f
307 
308 	/* Initialize HCR_EL2 */
309 	/* Only disable PAuth traps if PAuth is supported */
310 	mrs	\tmp, id_aa64isar1_el1
311 	ldr	\tmp2, =(ID_AA64ISAR1_EL1_GPI | ID_AA64ISAR1_EL1_GPA | \
312 		      ID_AA64ISAR1_EL1_API | ID_AA64ISAR1_EL1_APA)
313 	tst	\tmp, \tmp2
314 	mov	\tmp2, #(HCR_EL2_RW_AARCH64 | HCR_EL2_HCD_DIS)
315 	orr	\tmp, \tmp2, #(HCR_EL2_APK | HCR_EL2_API)
316 	csel	\tmp, \tmp2, \tmp, eq
317 	msr	hcr_el2, \tmp
318 
319 	/* Return to the EL1_SP1 mode from EL2 */
320 	ldr	\tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
321 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
322 			SPSR_EL_M_AARCH64 | SPSR_EL_M_EL1H)
323 	msr	spsr_el2, \tmp
324 	msr     elr_el2, \ep
325 	eret
326 
327 1:
328 	/* Initialize HCR_EL2 */
329 	ldr	\tmp, =(HCR_EL2_RW_AARCH32 | HCR_EL2_HCD_DIS)
330 	msr	hcr_el2, \tmp
331 
332 	/* Return to AArch32 Supervisor mode from EL2 */
333 	ldr	\tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
334 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
335 			SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
336 			SPSR_EL_M_SVC)
337 	msr     spsr_el2, \tmp
338 	msr     elr_el2, \ep
339 	eret
340 .endm
341 
342 #if defined(CONFIG_GICV3)
343 .macro gic_wait_for_interrupt_m xreg1
344 0 :	wfi
345 	mrs     \xreg1, ICC_IAR1_EL1
346 	msr     ICC_EOIR1_EL1, \xreg1
347 	cbnz    \xreg1, 0b
348 .endm
349 #elif defined(CONFIG_GICV2)
350 .macro gic_wait_for_interrupt_m xreg1, wreg2
351 0 :	wfi
352 	ldr     \wreg2, [\xreg1, GICC_AIAR]
353 	str     \wreg2, [\xreg1, GICC_AEOIR]
354 	and	\wreg2, \wreg2, #0x3ff
355 	cbnz    \wreg2, 0b
356 .endm
357 #endif
358 
359 #endif /* CONFIG_ARM64 */
360 
361 #endif /* __ASSEMBLY__ */
362 #endif /* __ASM_ARM_MACRO_H__ */
363