X-Git-Url: https://git.librecmc.org/?a=blobdiff_plain;f=arch%2Farm%2Fcpu%2Farmv8%2Fstart.S;h=03e744e4a673a27115c29e001c295037d75548d1;hb=6c7010b779ef29a40ed66acc414f65d2f4b75ced;hp=670e323b61b37bc56bce5972fb80f13541263f94;hpb=44faff24f58859bdc1acf28ac739020b5091678a;p=oweals%2Fu-boot.git diff --git a/arch/arm/cpu/armv8/start.S b/arch/arm/cpu/armv8/start.S index 670e323b61..03e744e4a6 100644 --- a/arch/arm/cpu/armv8/start.S +++ b/arch/arm/cpu/armv8/start.S @@ -19,8 +19,6 @@ .globl _start _start: - b reset - #ifdef CONFIG_ENABLE_ARM_SOC_BOOT0_HOOK /* * Various SoCs need something special and SoC-specific up front in @@ -28,7 +26,8 @@ _start: * use it here. */ #include -ARM_SOC_BOOT0_HOOK +#else + b reset #endif .align 3 @@ -53,6 +52,37 @@ _bss_end_ofs: .quad __bss_end - _start reset: + /* Allow the board to save important registers */ + b save_boot_params +.globl save_boot_params_ret +save_boot_params_ret: + +#if CONFIG_POSITION_INDEPENDENT + /* + * Fix .rela.dyn relocations. This allows U-Boot to be loaded to and + * executed at a different address than it was linked at. + */ +pie_fixup: + adr x0, _start /* x0 <- Runtime value of _start */ + ldr x1, _TEXT_BASE /* x1 <- Linked value of _start */ + sub x9, x0, x1 /* x9 <- Run-vs-link offset */ + adr x2, __rel_dyn_start /* x2 <- Runtime &__rel_dyn_start */ + adr x3, __rel_dyn_end /* x3 <- Runtime &__rel_dyn_end */ +pie_fix_loop: + ldp x0, x1, [x2], #16 /* (x0, x1) <- (Link location, fixup) */ + ldr x4, [x2], #8 /* x4 <- addend */ + cmp w1, #1027 /* relative fixup? */ + bne pie_skip_reloc + /* relative fix: store addend plus offset at dest location */ + add x0, x0, x9 + add x4, x4, x9 + str x4, [x0] +pie_skip_reloc: + cmp x2, x3 + b.lo pie_fix_loop +pie_fixup_done: +#endif + #ifdef CONFIG_SYS_RESET_SCTRL bl reset_sctrl #endif @@ -81,6 +111,20 @@ reset: msr cpacr_el1, x0 /* Enable FP/SIMD */ 0: + /* + * Enable SMPEN bit for coherency. + * This register is not architectural but at the moment + * this bit should be set for A53/A57/A72. + */ +#ifdef CONFIG_ARMV8_SET_SMPEN + switch_el x1, 3f, 1f, 1f +3: + mrs x0, S3_1_c15_c2_1 /* cpuectlr_el1 */ + orr x0, x0, #0x40 + msr S3_1_c15_c2_1, x0 +1: +#endif + /* Apply ARM core specific erratas */ bl apply_core_errata @@ -94,7 +138,11 @@ reset: /* Processor specific initialization */ bl lowlevel_init -#ifdef CONFIG_ARMV8_MULTIENTRY +#if defined(CONFIG_ARMV8_SPIN_TABLE) && !defined(CONFIG_SPL_BUILD) + branch_if_master x0, x1, master_cpu + b spin_table_secondary_jump + /* never return */ +#elif defined(CONFIG_ARMV8_MULTIENTRY) branch_if_master x0, x1, master_cpu /* @@ -106,10 +154,8 @@ slave_cpu: ldr x0, [x1] cbz x0, slave_cpu br x0 /* branch to the given address */ -master_cpu: - /* On the master CPU */ #endif /* CONFIG_ARMV8_MULTIENTRY */ - +master_cpu: bl _main #ifdef CONFIG_SYS_RESET_SCTRL @@ -244,9 +290,17 @@ WEAK(lowlevel_init) /* * All slaves will enter EL2 and optionally EL1. */ + adr x4, lowlevel_in_el2 + ldr x5, =ES_TO_AARCH64 bl armv8_switch_to_el2 + +lowlevel_in_el2: #ifdef CONFIG_ARMV8_SWITCH_TO_EL1 + adr x4, lowlevel_in_el1 + ldr x5, =ES_TO_AARCH64 bl armv8_switch_to_el1 + +lowlevel_in_el1: #endif #endif /* CONFIG_ARMV8_MULTIENTRY */ @@ -280,3 +334,7 @@ ENTRY(c_runtime_cpu_setup) ret ENDPROC(c_runtime_cpu_setup) + +WEAK(save_boot_params) + b save_boot_params_ret /* back to my caller */ +ENDPROC(save_boot_params)