1/*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * SPDX-FileCopyrightText: Copyright TF-RMM Contributors.
4 */
5#ifndef ASM_MACROS_S
6#define ASM_MACROS_S
7
8#define TLB_INVALIDATE(_type) \
9	tlbi	_type
10
11#define ENTRY(x) .global x; x
12#define ENDPROC(x)
13
14	/*
15	 * This macro is used to create a function label and place the
16	 * code into a separate text section based on the function name
17	 * to enable elimination of unused code during linking. It also adds
18	 * basic debug information to enable call stack printing most of the
19	 * time. The optional _align parameter can be used to force a
20	 * non-standard alignment (indicated in powers of 2). The default is
21	 * _align=2 because aarch64 instructions must be word aligned.
22	 * Do *not* try to use a raw .align directive. Since func
23	 * switches to a new section, this would not have the desired effect.
24	 */
25	.macro func _name, _align=2
26	/*
27	 * Add Call Frame Information entry in the .debug_frame section for
28	 * debugger consumption. This enables callstack printing in debuggers.
29	 * This does not use any space in the final loaded binary, only in the
30	 * ELF file.
31	 * Note that a function manipulating the CFA pointer location (i.e. the
32	 * x29 frame pointer on AArch64) should declare it using the
33	 * appropriate .cfi* directives, or be prepared to have a degraded
34	 * debugging experience.
35	 */
36	.cfi_sections .debug_frame
37	.section .text.asm.\_name, "ax"
38	.type \_name, %function
39	/*
40	 * .cfi_startproc and .cfi_endproc are needed to output entries in
41	 * .debug_frame
42	 */
43	.cfi_startproc
44	.align \_align
45	\_name:
46	.endm
47
48	/*
49	 * This macro is used to mark the end of a function.
50	 */
51	.macro endfunc _name
52		.cfi_endproc
53		.size \_name, . - \_name
54	.endm
55
56
57	.macro	dcache_line_size  reg, tmp
58	mrs	\tmp, ctr_el0
59	ubfx	\tmp, \tmp, #16, #4
60	mov	\reg, #4
61	lsl	\reg, \reg, \tmp
62	.endm
63
64	/*
65	 * Declare the exception vector table, enforcing it is aligned on a
66	 * 2KB boundary, as required by the ARMv8 architecture.
67	 * Use zero bytes as the fill value to be stored in the padding bytes
68	 * so that it inserts illegal AArch64 instructions. This increases
69	 * security, robustness and potentially facilitates debugging.
70	 */
71	.macro vector_base  label, section_name=.vectors
72	.section \section_name, "ax"
73	.align 11, 0
74	\label:
75	.endm
76
77	/*
78	 * Create an entry in the exception vector table, enforcing it is
79	 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
80	 * Use zero bytes as the fill value to be stored in the padding bytes
81	 * so that it inserts illegal AArch64 instructions. This increases
82	 * security, robustness and potentially facilitates debugging.
83	 */
84	.macro vector_entry  label, section_name=.vectors
85	.cfi_sections .debug_frame
86	.section \section_name, "ax"
87	.align 7, 0
88	.type \label, %function
89	.cfi_startproc
90	\label:
91	.endm
92
93	/*
94	 * Add the bytes until fill the full exception vector, whose size is always
95	 * 32 instructions. If there are more than 32 instructions in the
96	 * exception vector then an error is emitted.
97	 */
98	.macro end_vector_entry label
99	.cfi_endproc
100	.fill	\label + (32 * 4) - .
101	.endm
102
103	/*
104	 * Helper macro to generate the best mov/movk combinations according
105	 * the value to be moved. The 16 bits from '_shift' are tested and
106	 * if not zero, they are moved into '_reg' without affecting
107	 * other bits.
108	 */
109	.macro _mov_imm16 _reg, _val, _shift
110		.if (\_val >> \_shift) & 0xffff
111			.if (\_val & (1 << \_shift - 1))
112				movk	\_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
113			.else
114				mov	\_reg, \_val & (0xffff << \_shift)
115			.endif
116		.endif
117	.endm
118
119	/*
120	 * Helper macro to load arbitrary values into 32 or 64-bit registers
121	 * which generates the best mov/movk combinations. Many base addresses
122	 * are 64KB aligned the macro will eliminate updating bits 15:0 in
123	 * that case
124	 */
125	.macro mov_imm _reg, _val
126		.if (\_val) == 0
127			mov	\_reg, #0
128		.else
129			_mov_imm16	\_reg, (\_val), 0
130			_mov_imm16	\_reg, (\_val), 16
131			_mov_imm16	\_reg, (\_val), 32
132			_mov_imm16	\_reg, (\_val), 48
133		.endif
134	.endm
135
136	/*
137	 * Assembler panic. At the moment there is no support for crash
138	 * reporting in assembler without having a stack available, so for
139	 * the time being just enter into a busy loop and stay there.
140	 */
141	.macro asm_panic
142		b	.
143	.endm
144
145	/*
146	 * Assembler macro to enable asm_assert. Use this macro wherever
147	 * assert is required in assembly. Please note that the macro makes
148	 * use of label '300' to provide the logic and the caller
149	 * should make sure that this label is not used to branch prior
150	 * to calling this macro.
151	 */
152	.macro ASM_ASSERT _cc
153		.ifndef .L_assert_filename
154			.pushsection .rodata.str1.1, "aS"
155			.L_assert_filename:
156				.string	__FILE__
157			.popsection
158		.endif
159		b.\_cc	300f
160		adr	x0, .L_assert_filename
161		mov	x1, __LINE__
162		asm_panic
163	300:
164	.endm
165
166#endif /* ASM_MACROS_S */
167