diff options
Diffstat (limited to 'src/arch/arm64/armv8')
-rw-r--r-- | src/arch/arm64/armv8/cache.c | 4 | ||||
-rw-r--r-- | src/arch/arm64/armv8/cpu.S | 13 | ||||
-rw-r--r-- | src/arch/arm64/armv8/exception.c | 8 | ||||
-rw-r--r-- | src/arch/arm64/armv8/mmu.c | 28 |
4 files changed, 29 insertions, 24 deletions
diff --git a/src/arch/arm64/armv8/cache.c b/src/arch/arm64/armv8/cache.c index dbaedec31d..879ef6297b 100644 --- a/src/arch/arm64/armv8/cache.c +++ b/src/arch/arm64/armv8/cache.c @@ -148,10 +148,12 @@ void dcache_invalidate_by_mva(void const *addr, size_t len) */ void arch_segment_loaded(uintptr_t start, size_t size, int flags) { - uint32_t sctlr = raw_read_sctlr_el3(); + uint32_t sctlr = raw_read_sctlr(); + if (sctlr & SCTLR_C) dcache_clean_by_mva((void *)start, size); else if (sctlr & SCTLR_I) dcache_clean_invalidate_by_mva((void *)start, size); + icache_invalidate_all(); } diff --git a/src/arch/arm64/armv8/cpu.S b/src/arch/arm64/armv8/cpu.S index 04bf6a7021..a40ee64536 100644 --- a/src/arch/arm64/armv8/cpu.S +++ b/src/arch/arm64/armv8/cpu.S @@ -77,10 +77,10 @@ ENDPROC(dcache_clean_invalidate_all) memory (e.g. the stack) in between disabling and flushing the cache. */ ENTRY(mmu_disable) str x30, [sp, #-0x8] - mrs x0, sctlr_el3 + mrs x0, CURRENT_EL(sctlr) mov x1, #~(SCTLR_C | SCTLR_M) and x0, x0, x1 - msr sctlr_el3, x0 + msr CURRENT_EL(sctlr), x0 isb bl dcache_clean_invalidate_all ldr x30, [sp, #-0x8] @@ -102,12 +102,11 @@ ENTRY(arm64_init_cpu) /* x22: SCTLR, return address: x23 (callee-saved by subroutine) */ mov x23, x30 - /* TODO: Assert that we always start running at EL3 */ - mrs x22, sctlr_el3 + mrs x22, CURRENT_EL(sctlr) /* Activate ICache already for speed during cache flush below. */ orr x22, x22, #SCTLR_I - msr sctlr_el3, x22 + msr CURRENT_EL(sctlr), x22 isb /* Invalidate dcache */ @@ -116,13 +115,15 @@ ENTRY(arm64_init_cpu) /* Reinitialize SCTLR from scratch to known-good state. This may disable MMU or DCache. */ ldr w22, =(SCTLR_RES1 | SCTLR_I | SCTLR_SA) - msr sctlr_el3, x22 + msr CURRENT_EL(sctlr), x22 +#if CONFIG_ARM64_CURRENT_EL == EL3 /* Initialize SCR to unmask all interrupts (so that if we get a spurious IRQ/SError we'll see it when it happens, not hang in BL31). This will only have an effect after we DAIFClr in exception_init(). */ mov x22, #SCR_RES1 | SCR_IRQ | SCR_FIQ | SCR_EA msr scr_el3, x22 +#endif /* Invalidate icache and TLB for good measure */ ic iallu diff --git a/src/arch/arm64/armv8/exception.c b/src/arch/arm64/armv8/exception.c index 8583fd5172..6035d749c7 100644 --- a/src/arch/arm64/armv8/exception.c +++ b/src/arch/arm64/armv8/exception.c @@ -51,9 +51,10 @@ static void print_regs(struct exc_state *exc_state) struct regs *regs = &exc_state->regs; printk(BIOS_DEBUG, "ELR = 0x%016llx ESR = 0x%08llx\n", - elx->elr, raw_read_esr_el3()); + elx->elr, raw_read_esr()); printk(BIOS_DEBUG, "FAR = 0x%016llx SPSR = 0x%08llx\n", - raw_read_far_el3(), raw_read_spsr_el3()); + raw_read_far(), raw_read_spsr()); + for (i = 0; i < 30; i += 2) { printk(BIOS_DEBUG, "X%02d = 0x%016llx X%02d = 0x%016llx\n", @@ -173,7 +174,8 @@ static int test_exception_handler(struct exc_state *state, uint64_t vector_id) { /* Update instruction pointer to next instruction. */ state->elx.elr += sizeof(uint32_t); - raw_write_elr_el3(state->elx.elr); + raw_write_elr(state->elx.elr); + return EXC_RET_HANDLED; } diff --git a/src/arch/arm64/armv8/mmu.c b/src/arch/arm64/armv8/mmu.c index 3cedbcf58f..6105f9a9b2 100644 --- a/src/arch/arm64/armv8/mmu.c +++ b/src/arch/arm64/armv8/mmu.c @@ -224,7 +224,7 @@ void mmu_config_range(void *start, size_t size, uint64_t tag) /* ARMv8 MMUs snoop L1 data cache, no need to flush it. */ dsb(); - tlbiall_el3(); + tlbiall(); dsb(); isb(); } @@ -245,15 +245,15 @@ void mmu_init(void) assert((u8 *)root == _ttb); /* Initialize TTBR */ - raw_write_ttbr0_el3((uintptr_t)root); + raw_write_ttbr0((uintptr_t)root); /* Initialize MAIR indices */ - raw_write_mair_el3(MAIR_ATTRIBUTES); + raw_write_mair(MAIR_ATTRIBUTES); /* Initialize TCR flags */ - raw_write_tcr_el3(TCR_TOSZ | TCR_IRGN0_NM_WBWAC | TCR_ORGN0_NM_WBWAC | - TCR_SH0_IS | TCR_TG0_4KB | TCR_PS_256TB | - TCR_TBI_USED); + raw_write_tcr(TCR_TOSZ | TCR_IRGN0_NM_WBWAC | TCR_ORGN0_NM_WBWAC | + TCR_SH0_IS | TCR_TG0_4KB | TCR_PS_256TB | + TCR_TBI_USED); } /* Func : mmu_save_context @@ -264,10 +264,10 @@ void mmu_save_context(struct mmu_context *mmu_context) assert(mmu_context); /* Back-up MAIR_ATTRIBUTES */ - mmu_context->mair = raw_read_mair_el3(); + mmu_context->mair = raw_read_mair(); /* Back-up TCR value */ - mmu_context->tcr = raw_read_tcr_el3(); + mmu_context->tcr = raw_read_tcr(); } /* Func : mmu_restore_context @@ -278,13 +278,13 @@ void mmu_restore_context(const struct mmu_context *mmu_context) assert(mmu_context); /* Restore TTBR */ - raw_write_ttbr0_el3((uintptr_t)_ttb); + raw_write_ttbr0((uintptr_t)_ttb); /* Restore MAIR indices */ - raw_write_mair_el3(mmu_context->mair); + raw_write_mair(mmu_context->mair); /* Restore TCR flags */ - raw_write_tcr_el3(mmu_context->tcr); + raw_write_tcr(mmu_context->tcr); /* invalidate tlb since ttbr is updated. */ tlb_invalidate_all(); @@ -295,8 +295,8 @@ void mmu_enable(void) assert_correct_ttb_mapping(_ttb); assert_correct_ttb_mapping((void *)((uintptr_t)_ettb - 1)); - uint32_t sctlr = raw_read_sctlr_el3(); - sctlr |= SCTLR_C | SCTLR_M | SCTLR_I; - raw_write_sctlr_el3(sctlr); + uint32_t sctlr = raw_read_sctlr(); + raw_write_sctlr(sctlr | SCTLR_C | SCTLR_M | SCTLR_I); + isb(); } |