1/* SPDX-License-Identifier: GPL-2.0+ */ 2/* 3 * armboot - Startup Code for OMAP3530/ARM Cortex CPU-core 4 * 5 * Copyright (c) 2004 Texas Instruments <r-woodruff2@ti.com> 6 * 7 * Copyright (c) 2001 Marius Gröger <mag@sysgo.de> 8 * Copyright (c) 2002 Alex Züpke <azu@sysgo.de> 9 * Copyright (c) 2002 Gary Jennejohn <garyj@denx.de> 10 * Copyright (c) 2003 Richard Woodruff <r-woodruff2@ti.com> 11 * Copyright (c) 2003 Kshitij <kshitij@ti.com> 12 * Copyright (c) 2006-2008 Syed Mohammed Khasim <x0khasim@ti.com> 13 */ 14 15#include <asm-offsets.h> 16#include <config.h> 17#include <asm/system.h> 18#include <linux/linkage.h> 19#include <asm/armv7.h> 20 21/************************************************************************* 22 * 23 * Startup Code (reset vector) 24 * 25 * Do important init only if we don't start from memory! 26 * Setup memory and board specific bits prior to relocation. 27 * Relocate armboot to ram. Setup stack. 28 * 29 *************************************************************************/ 30 31 .globl reset 32 .globl save_boot_params_ret 33 .type save_boot_params_ret,%function 34#ifdef CONFIG_ARMV7_LPAE 35 .global switch_to_hypervisor_ret 36#endif 37 38reset: 39 /* Allow the board to save important registers */ 40 b save_boot_params 41save_boot_params_ret: 42#ifdef CONFIG_POSITION_INDEPENDENT 43 /* 44 * Fix .rela.dyn relocations. This allows U-Boot to loaded to and 45 * executed at a different address than it was linked at. 46 */ 47pie_fixup: 48 adr r0, reset /* r0 <- Runtime value of reset label */ 49 ldr r1, =reset /* r1 <- Linked value of reset label */ 50 subs r4, r0, r1 /* r4 <- Runtime-vs-link offset */ 51 beq pie_fixup_done 52 53 adr r0, pie_fixup 54 ldr r1, _rel_dyn_start_ofs 55 add r2, r0, r1 /* r2 <- Runtime &__rel_dyn_start */ 56 ldr r1, _rel_dyn_end_ofs 57 add r3, r0, r1 /* r3 <- Runtime &__rel_dyn_end */ 58 59pie_fix_loop: 60 ldr r0, [r2] /* r0 <- Link location */ 61 ldr r1, [r2, #4] /* r1 <- fixup */ 62 cmp r1, #23 /* relative fixup? */ 63 bne pie_skip_reloc 64 65 /* relative fix: increase location by offset */ 66 add r0, r4 67 ldr r1, [r0] 68 add r1, r4 69 str r1, [r0] 70 str r0, [r2] 71 add r2, #8 72pie_skip_reloc: 73 cmp r2, r3 74 blo pie_fix_loop 75pie_fixup_done: 76#endif 77 78#ifdef CONFIG_ARMV7_LPAE 79/* 80 * check for Hypervisor support 81 */ 82 mrc p15, 0, r0, c0, c1, 1 @ read ID_PFR1 83 and r0, r0, #CPUID_ARM_VIRT_MASK @ mask virtualization bits 84 cmp r0, #(1 << CPUID_ARM_VIRT_SHIFT) 85 beq switch_to_hypervisor 86switch_to_hypervisor_ret: 87#endif 88 /* 89 * disable interrupts (FIQ and IRQ), also set the cpu to SVC32 mode, 90 * except if in HYP mode already 91 */ 92 mrs r0, cpsr 93 and r1, r0, #0x1f @ mask mode bits 94 teq r1, #0x1a @ test for HYP mode 95 bicne r0, r0, #0x1f @ clear all mode bits 96 orrne r0, r0, #0x13 @ set SVC mode 97 orr r0, r0, #0xc0 @ disable FIQ and IRQ 98 msr cpsr,r0 99 100/* 101 * Setup vector: 102 * (OMAP4 spl TEXT_BASE is not 32 byte aligned. 103 * Continue to use ROM code vector only in OMAP4 spl) 104 */ 105#if !(defined(CONFIG_OMAP44XX) && defined(CONFIG_SPL_BUILD)) 106 /* Set V=0 in CP15 SCTLR register - for VBAR to point to vector */ 107 mrc p15, 0, r0, c1, c0, 0 @ Read CP15 SCTLR Register 108 bic r0, #CR_V @ V = 0 109 mcr p15, 0, r0, c1, c0, 0 @ Write CP15 SCTLR Register 110 111#ifdef CONFIG_HAS_VBAR 112 /* Set vector address in CP15 VBAR register */ 113 ldr r0, =_start 114 mcr p15, 0, r0, c12, c0, 0 @Set VBAR 115#endif 116#endif 117 118 /* the mask ROM code should have PLL and others stable */ 119#if !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT) 120#ifdef CONFIG_CPU_V7A 121 bl cpu_init_cp15 122#endif 123#if !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT_ONLY) 124 bl cpu_init_crit 125#endif 126#endif 127 128 bl _main 129 130/*------------------------------------------------------------------------------*/ 131 132ENTRY(c_runtime_cpu_setup) 133/* 134 * If I-cache is enabled invalidate it 135 */ 136#if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF) 137 mcr p15, 0, r0, c7, c5, 0 @ invalidate icache 138 mcr p15, 0, r0, c7, c10, 4 @ DSB 139 mcr p15, 0, r0, c7, c5, 4 @ ISB 140#endif 141 142 bx lr 143 144ENDPROC(c_runtime_cpu_setup) 145 146/************************************************************************* 147 * 148 * void save_boot_params(u32 r0, u32 r1, u32 r2, u32 r3) 149 * __attribute__((weak)); 150 * 151 * Stack pointer is not yet initialized at this moment 152 * Don't save anything to stack even if compiled with -O0 153 * 154 *************************************************************************/ 155ENTRY(save_boot_params) 156 b save_boot_params_ret @ back to my caller 157ENDPROC(save_boot_params) 158 .weak save_boot_params 159 160#ifdef CONFIG_ARMV7_LPAE 161ENTRY(switch_to_hypervisor) 162 b switch_to_hypervisor_ret 163ENDPROC(switch_to_hypervisor) 164 .weak switch_to_hypervisor 165#endif 166 167/************************************************************************* 168 * 169 * cpu_init_cp15 170 * 171 * Setup CP15 registers (cache, MMU, TLBs). The I-cache is turned on unless 172 * CONFIG_SYS_ICACHE_OFF is defined. 173 * 174 *************************************************************************/ 175ENTRY(cpu_init_cp15) 176 /* 177 * Invalidate L1 I/D 178 */ 179 mov r0, #0 @ set up for MCR 180 mcr p15, 0, r0, c8, c7, 0 @ invalidate TLBs 181 mcr p15, 0, r0, c7, c5, 0 @ invalidate icache 182 mcr p15, 0, r0, c7, c5, 6 @ invalidate BP array 183 mcr p15, 0, r0, c7, c10, 4 @ DSB 184 mcr p15, 0, r0, c7, c5, 4 @ ISB 185 186 /* 187 * disable MMU stuff and caches 188 */ 189 mrc p15, 0, r0, c1, c0, 0 190 bic r0, r0, #0x00002000 @ clear bits 13 (--V-) 191 bic r0, r0, #0x00000007 @ clear bits 2:0 (-CAM) 192 orr r0, r0, #0x00000002 @ set bit 1 (--A-) Align 193 orr r0, r0, #0x00000800 @ set bit 11 (Z---) BTB 194#if CONFIG_IS_ENABLED(SYS_ICACHE_OFF) 195 bic r0, r0, #0x00001000 @ clear bit 12 (I) I-cache 196#else 197 orr r0, r0, #0x00001000 @ set bit 12 (I) I-cache 198#endif 199 mcr p15, 0, r0, c1, c0, 0 200 201#ifdef CONFIG_ARM_ERRATA_716044 202 mrc p15, 0, r0, c1, c0, 0 @ read system control register 203 orr r0, r0, #1 << 11 @ set bit #11 204 mcr p15, 0, r0, c1, c0, 0 @ write system control register 205#endif 206 207#if (defined(CONFIG_ARM_ERRATA_742230) || defined(CONFIG_ARM_ERRATA_794072)) 208 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 209 orr r0, r0, #1 << 4 @ set bit #4 210 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 211#endif 212 213#ifdef CONFIG_ARM_ERRATA_743622 214 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 215 orr r0, r0, #1 << 6 @ set bit #6 216 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 217#endif 218 219#ifdef CONFIG_ARM_ERRATA_751472 220 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 221 orr r0, r0, #1 << 11 @ set bit #11 222 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 223#endif 224#ifdef CONFIG_ARM_ERRATA_761320 225 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 226 orr r0, r0, #1 << 21 @ set bit #21 227 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 228#endif 229 230#ifdef CONFIG_ARM_ERRATA_845369 231 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 232 orr r0, r0, #1 << 22 @ set bit #22 233 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 234#endif 235 236 mov r5, lr @ Store my Caller 237 mrc p15, 0, r1, c0, c0, 0 @ r1 has Read Main ID Register (MIDR) 238 mov r3, r1, lsr #20 @ get variant field 239 and r3, r3, #0xf @ r3 has CPU variant 240 and r4, r1, #0xf @ r4 has CPU revision 241 mov r2, r3, lsl #4 @ shift variant field for combined value 242 orr r2, r4, r2 @ r2 has combined CPU variant + revision 243 244/* Early stack for ERRATA that needs into call C code */ 245#if defined(CONFIG_SPL_BUILD) && defined(CONFIG_SPL_STACK) 246 ldr r0, =(CONFIG_SPL_STACK) 247#else 248 ldr r0, =(CONFIG_SYS_INIT_SP_ADDR) 249#endif 250 bic r0, r0, #7 /* 8-byte alignment for ABI compliance */ 251 mov sp, r0 252 253#ifdef CONFIG_ARM_ERRATA_798870 254 cmp r2, #0x30 @ Applies to lower than R3p0 255 bge skip_errata_798870 @ skip if not affected rev 256 cmp r2, #0x20 @ Applies to including and above R2p0 257 blt skip_errata_798870 @ skip if not affected rev 258 259 mrc p15, 1, r0, c15, c0, 0 @ read l2 aux ctrl reg 260 orr r0, r0, #1 << 7 @ Enable hazard-detect timeout 261 push {r1-r5} @ Save the cpu info registers 262 bl v7_arch_cp15_set_l2aux_ctrl 263 isb @ Recommended ISB after l2actlr update 264 pop {r1-r5} @ Restore the cpu info - fall through 265skip_errata_798870: 266#endif 267 268#ifdef CONFIG_ARM_ERRATA_801819 269 cmp r2, #0x24 @ Applies to lt including R2p4 270 bgt skip_errata_801819 @ skip if not affected rev 271 cmp r2, #0x20 @ Applies to including and above R2p0 272 blt skip_errata_801819 @ skip if not affected rev 273 mrc p15, 0, r0, c0, c0, 6 @ pick up REVIDR reg 274 and r0, r0, #1 << 3 @ check REVIDR[3] 275 cmp r0, #1 << 3 276 beq skip_errata_801819 @ skip erratum if REVIDR[3] is set 277 278 mrc p15, 0, r0, c1, c0, 1 @ read auxilary control register 279 orr r0, r0, #3 << 27 @ Disables streaming. All write-allocate 280 @ lines allocate in the L1 or L2 cache. 281 orr r0, r0, #3 << 25 @ Disables streaming. All write-allocate 282 @ lines allocate in the L1 cache. 283 push {r1-r5} @ Save the cpu info registers 284 bl v7_arch_cp15_set_acr 285 pop {r1-r5} @ Restore the cpu info - fall through 286skip_errata_801819: 287#endif 288 289#ifdef CONFIG_ARM_CORTEX_A15_CVE_2017_5715 290 mrc p15, 0, r0, c1, c0, 1 @ read auxilary control register 291 orr r0, r0, #1 << 0 @ Enable invalidates of BTB 292 push {r1-r5} @ Save the cpu info registers 293 bl v7_arch_cp15_set_acr 294 pop {r1-r5} @ Restore the cpu info - fall through 295#endif 296 297#ifdef CONFIG_ARM_ERRATA_454179 298 mrc p15, 0, r0, c1, c0, 1 @ Read ACR 299 300 cmp r2, #0x21 @ Only on < r2p1 301 orrlt r0, r0, #(0x3 << 6) @ Set DBSM(BIT7) and IBE(BIT6) bits 302 303 push {r1-r5} @ Save the cpu info registers 304 bl v7_arch_cp15_set_acr 305 pop {r1-r5} @ Restore the cpu info - fall through 306#endif 307 308#if defined(CONFIG_ARM_ERRATA_430973) || defined (CONFIG_ARM_CORTEX_A8_CVE_2017_5715) 309 mrc p15, 0, r0, c1, c0, 1 @ Read ACR 310 311#ifdef CONFIG_ARM_CORTEX_A8_CVE_2017_5715 312 orr r0, r0, #(0x1 << 6) @ Set IBE bit always to enable OS WA 313#else 314 cmp r2, #0x21 @ Only on < r2p1 315 orrlt r0, r0, #(0x1 << 6) @ Set IBE bit 316#endif 317 push {r1-r5} @ Save the cpu info registers 318 bl v7_arch_cp15_set_acr 319 pop {r1-r5} @ Restore the cpu info - fall through 320#endif 321 322#ifdef CONFIG_ARM_ERRATA_621766 323 mrc p15, 0, r0, c1, c0, 1 @ Read ACR 324 325 cmp r2, #0x21 @ Only on < r2p1 326 orrlt r0, r0, #(0x1 << 5) @ Set L1NEON bit 327 328 push {r1-r5} @ Save the cpu info registers 329 bl v7_arch_cp15_set_acr 330 pop {r1-r5} @ Restore the cpu info - fall through 331#endif 332 333#ifdef CONFIG_ARM_ERRATA_725233 334 mrc p15, 1, r0, c9, c0, 2 @ Read L2ACR 335 336 cmp r2, #0x21 @ Only on < r2p1 (Cortex A8) 337 orrlt r0, r0, #(0x1 << 27) @ L2 PLD data forwarding disable 338 339 push {r1-r5} @ Save the cpu info registers 340 bl v7_arch_cp15_set_l2aux_ctrl 341 pop {r1-r5} @ Restore the cpu info - fall through 342#endif 343 344#ifdef CONFIG_ARM_ERRATA_852421 345 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 346 orr r0, r0, #1 << 24 @ set bit #24 347 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 348#endif 349 350#ifdef CONFIG_ARM_ERRATA_852423 351 mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register 352 orr r0, r0, #1 << 12 @ set bit #12 353 mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register 354#endif 355 356 mov pc, r5 @ back to my caller 357ENDPROC(cpu_init_cp15) 358 359#if !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT) && \ 360 !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT_ONLY) 361/************************************************************************* 362 * 363 * CPU_init_critical registers 364 * 365 * setup important registers 366 * setup memory timing 367 * 368 *************************************************************************/ 369ENTRY(cpu_init_crit) 370 /* 371 * Jump to board specific initialization... 372 * The Mask ROM will have already initialized 373 * basic memory. Go here to bump up clock rate and handle 374 * wake up conditions. 375 */ 376 b lowlevel_init @ go setup pll,mux,memory 377ENDPROC(cpu_init_crit) 378#endif 379 380#if CONFIG_POSITION_INDEPENDENT 381_rel_dyn_start_ofs: 382 .word __rel_dyn_start - pie_fixup 383_rel_dyn_end_ofs: 384 .word __rel_dyn_end - pie_fixup 385#endif 386