summaryrefslogtreecommitdiff
path: root/payloads/libpayload/arch
diff options
context:
space:
mode:
authorFurquan Shaikh <furquan@google.com>2014-08-27 12:16:16 -0700
committerPatrick Georgi <pgeorgi@google.com>2015-03-21 13:35:42 +0100
commit635b45d60878887fba7425f61870cf2a9a6f3102 (patch)
tree39eeec5d39550823157390b162bf056e125fbf7e /payloads/libpayload/arch
parent3b1ee0387c70f0b31307f50a5efa5a2b584a3635 (diff)
libpayload arm64: Add library helpers
Add library helpers to access standard arm64 registers. This library also provides functions to directly read/write register based on current el. So, rest of the code doesnt need to keep checking the el and call appropriate function based on that. BUG=chrome-os-partner:31634 BRANCH=None TEST=Libpayload and depthcharge compile successfully for ryu Change-Id: Ibc0ca49f158362d4b7ab2045bf0fbd58ada79360 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: 2ca6da580cb51b4c23abdaf04fee2785e5780510 Original-Change-Id: I9b63e04aa26a98bbeb34fdef634776d49454ca8d Original-Signed-off-by: Furquan Shaikh <furquan@google.com> Original-Reviewed-on: https://chromium-review.googlesource.com/214575 Original-Reviewed-by: Aaron Durbin <adurbin@chromium.org> Original-Tested-by: Furquan Shaikh <furquan@chromium.org> Original-Commit-Queue: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/8784 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
Diffstat (limited to 'payloads/libpayload/arch')
-rw-r--r--payloads/libpayload/arch/arm64/Makefile.inc2
-rw-r--r--payloads/libpayload/arch/arm64/cache.c11
-rw-r--r--payloads/libpayload/arch/arm64/lib/Makefile.inc33
-rw-r--r--payloads/libpayload/arch/arm64/lib/cache.c93
-rw-r--r--payloads/libpayload/arch/arm64/lib/clock.c40
-rw-r--r--payloads/libpayload/arch/arm64/lib/pstate.c432
-rw-r--r--payloads/libpayload/arch/arm64/lib/sysctrl.c879
-rw-r--r--payloads/libpayload/arch/arm64/lib/tlb.c83
8 files changed, 1568 insertions, 5 deletions
diff --git a/payloads/libpayload/arch/arm64/Makefile.inc b/payloads/libpayload/arch/arm64/Makefile.inc
index bb09b8f919..1c23a477a7 100644
--- a/payloads/libpayload/arch/arm64/Makefile.inc
+++ b/payloads/libpayload/arch/arm64/Makefile.inc
@@ -30,6 +30,8 @@
CFLAGS += -march=armv8-a
arm64_asm_flags =
+subdirs-y += lib/
+
head.o-y += head.S
libc-y += main.c sysinfo.c
libc-y += timer.c coreboot.c util.S
diff --git a/payloads/libpayload/arch/arm64/cache.c b/payloads/libpayload/arch/arm64/cache.c
index 05d6fb20ca..e89d49e4c8 100644
--- a/payloads/libpayload/arch/arm64/cache.c
+++ b/payloads/libpayload/arch/arm64/cache.c
@@ -34,6 +34,7 @@
#include <stdint.h>
#include <arch/cache.h>
+#include <arch/lib_helpers.h>
void tlb_invalidate_all(void)
{
@@ -60,7 +61,7 @@ unsigned int dcache_line_bytes(void)
if (line_bytes)
return line_bytes;
- ccsidr = read_ccsidr();
+ ccsidr = raw_read_ccsidr_el1();
/* [2:0] - Indicates (Log2(number of words in cache line)) - 2 */
line_bytes = 1 << ((ccsidr & 0x7) + 2); /* words per line */
line_bytes *= sizeof(unsigned int); /* bytes per word */
@@ -126,18 +127,18 @@ void dcache_mmu_disable(void)
uint32_t sctlr;
dcache_clean_invalidate_all();
- sctlr = read_sctlr_el3();
+ sctlr = raw_read_sctlr_el3();
sctlr &= ~(SCTLR_C | SCTLR_M);
- write_sctlr_el3(sctlr);
+ raw_write_sctlr_el3(sctlr);
}
void dcache_mmu_enable(void)
{
uint32_t sctlr;
- sctlr = read_sctlr_el3();
+ sctlr = raw_read_sctlr_el3();
sctlr |= SCTLR_C | SCTLR_M;
- write_sctlr_el3(sctlr);
+ raw_write_sctlr_el3(sctlr);
}
void cache_sync_instructions(void)
diff --git a/payloads/libpayload/arch/arm64/lib/Makefile.inc b/payloads/libpayload/arch/arm64/lib/Makefile.inc
new file mode 100644
index 0000000000..a7b7f576b4
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/Makefile.inc
@@ -0,0 +1,33 @@
+#####################################################################################
+## This file is part of the coreboot project.
+##
+## Copyright 2014 Google Inc.
+##
+## Redistribution and use in source and binary forms, with or without
+## modification, are permitted provided that the following conditions
+## are met:
+## 1. Redistributions of source code must retain the above copyright
+## notice, this list of conditions and the following disclaimer.
+## 2. Redistributions in binary form must reproduce the above copyright
+## notice, this list of conditions and the following disclaimer in the
+## documentation and/or other materials provided with the distribution.
+## 3. The name of the author may not be used to endorse or promote products
+## derived from this software without specific prior written permission.
+##
+## THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+## ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+## FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+## OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+## SUCH DAMAGE.
+##
+#####################################################################################
+
+lib_access = pstate.c sysctrl.c cache.c tlb.c clock.c
+
+libc-y += $(lib_access) \ No newline at end of file
diff --git a/payloads/libpayload/arch/arm64/lib/cache.c b/payloads/libpayload/arch/arm64/lib/cache.c
new file mode 100644
index 0000000000..53dff5759e
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/cache.c
@@ -0,0 +1,93 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * cache.c: Cache Maintenance Instructions
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+void dccisw(uint64_t cisw)
+{
+ __asm__ __volatile__("dc cisw, %0\n\t" : : "r" (cisw) :"memory");
+}
+
+void dccivac(uint64_t civac)
+{
+ __asm__ __volatile__("dc civac, %0\n\t" : : "r" (civac) :"memory");
+}
+
+void dccsw(uint64_t csw)
+{
+ __asm__ __volatile__("dc csw, %0\n\t" : : "r" (csw) :"memory");
+}
+
+void dccvac(uint64_t cvac)
+{
+ __asm__ __volatile__("dc cvac, %0\n\t" : : "r" (cvac) :"memory");
+}
+
+void dccvau(uint64_t cvau)
+{
+ __asm__ __volatile__("dc cvau, %0\n\t" : : "r" (cvau) :"memory");
+}
+
+void dcisw(uint64_t isw)
+{
+ __asm__ __volatile__("dc isw, %0\n\t" : : "r" (isw) :"memory");
+}
+
+void dcivac(uint64_t ivac)
+{
+ __asm__ __volatile__("dc ivac, %0\n\t" : : "r" (ivac) :"memory");
+}
+
+void dczva(uint64_t zva)
+{
+ __asm__ __volatile__("dc zva, %0\n\t" : : "r" (zva) :"memory");
+}
+
+void iciallu(void)
+{
+ __asm__ __volatile__("ic iallu\n\t" : : :"memory");
+}
+
+void icialluis(void)
+{
+ __asm__ __volatile__("ic ialluis\n\t" : : :"memory");
+}
+
+void icivau(uint64_t ivau)
+{
+ __asm__ __volatile__("ic ivau, %0\n\t" : : "r" (ivau) :"memory");
+}
+
+
+
diff --git a/payloads/libpayload/arch/arm64/lib/clock.c b/payloads/libpayload/arch/arm64/lib/clock.c
new file mode 100644
index 0000000000..9f06f0828c
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/clock.c
@@ -0,0 +1,40 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * clock.c: Functions for accessing clock and timer related registers
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+void set_cntfrq(uint32_t freq)
+{
+ __asm__ __volatile__("msr cntfrq_el0, %0" :: "r"(freq));
+}
diff --git a/payloads/libpayload/arch/arm64/lib/pstate.c b/payloads/libpayload/arch/arm64/lib/pstate.c
new file mode 100644
index 0000000000..27554f7f1f
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/pstate.c
@@ -0,0 +1,432 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ * pstate.c: This file defines all the library functions for accessing
+ * PSTATE and special purpose registers
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+/* CurrentEL */
+uint32_t raw_read_current_el(void)
+{
+ uint32_t current_el;
+
+ __asm__ __volatile__("mrs %0, CurrentEL\n\t" : "=r" (current_el) : : "memory");
+
+ return current_el;
+}
+
+uint32_t get_current_el(void)
+{
+ uint32_t current_el = raw_read_current_el();
+ return ((current_el >> CURRENT_EL_SHIFT) & CURRENT_EL_MASK);
+}
+
+/* DAIF */
+uint32_t raw_read_daif(void)
+{
+ uint32_t daif;
+
+ __asm__ __volatile__("mrs %0, DAIF\n\t" : "=r" (daif) : : "memory");
+
+ return daif;
+}
+
+void raw_write_daif(uint32_t daif)
+{
+ __asm__ __volatile__("msr DAIF, %0\n\t" : : "r" (daif) : "memory");
+}
+
+void enable_debug_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_DBG_BIT) : "memory");
+}
+
+void enable_serror_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_ABT_BIT) : "memory");
+}
+
+void enable_irq(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_IRQ_BIT) : "memory");
+}
+
+void enable_fiq(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_FIQ_BIT) : "memory");
+}
+
+void disable_debug_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_DBG_BIT) : "memory");
+}
+
+void disable_serror_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_ABT_BIT) : "memory");
+}
+
+void disable_irq(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_IRQ_BIT) : "memory");
+}
+
+void disable_fiq(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_FIQ_BIT) : "memory");
+}
+
+/* DLR_EL0 */
+uint64_t raw_read_dlr_el0(void)
+{
+ uint64_t dlr_el0;
+
+ __asm__ __volatile__("mrs %0, DLR_EL0\n\t" : "=r" (dlr_el0) : : "memory");
+
+ return dlr_el0;
+}
+void raw_write_dlr_el0(uint64_t dlr_el0)
+{
+ __asm__ __volatile__("msr DLR_EL0, %0\n\t" : : "r" (dlr_el0) : "memory");
+}
+
+/* DSPSR_EL0 */
+uint64_t raw_read_dspsr_el0(void)
+{
+ uint64_t dspsr_el0;
+
+ __asm__ __volatile__("mrs %0, DSPSR_EL0\n\t" : "=r" (dspsr_el0) : : "memory");
+
+ return dspsr_el0;
+}
+void raw_write_dspsr_el0(uint64_t dspsr_el0)
+{
+ __asm__ __volatile__("msr DSPSR_EL0, %0\n\t" : : "r" (dspsr_el0) : "memory");
+}
+
+/* ELR */
+uint64_t raw_read_elr_el1(void)
+{
+ uint64_t elr_el1;
+
+ __asm__ __volatile__("mrs %0, ELR_EL1\n\t" : "=r" (elr_el1) : : "memory");
+
+ return elr_el1;
+}
+
+void raw_write_elr_el1(uint64_t elr_el1)
+{
+ __asm__ __volatile__("msr ELR_EL1, %0\n\t" : : "r" (elr_el1) : "memory");
+}
+
+uint64_t raw_read_elr_el2(void)
+{
+ uint64_t elr_el2;
+
+ __asm__ __volatile__("mrs %0, ELR_EL2\n\t" : "=r" (elr_el2) : : "memory");
+
+ return elr_el2;
+}
+
+void raw_write_elr_el2(uint64_t elr_el2)
+{
+ __asm__ __volatile__("msr ELR_EL2, %0\n\t" : : "r" (elr_el2) : "memory");
+}
+
+uint64_t raw_read_elr_el3(void)
+{
+ uint64_t elr_el3;
+
+ __asm__ __volatile__("mrs %0, ELR_EL3\n\t" : "=r" (elr_el3) : : "memory");
+
+ return elr_el3;
+}
+
+void raw_write_elr_el3(uint64_t elr_el3)
+{
+ __asm__ __volatile__("msr ELR_EL3, %0\n\t" : : "r" (elr_el3) : "memory");
+}
+
+uint64_t raw_read_elr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_elr,elr,uint64_t);
+}
+
+void raw_write_elr_current(uint64_t elr)
+{
+ SWITCH_CASE_WRITE(raw_write_elr,elr);
+}
+
+/* FPCR */
+uint32_t raw_read_fpcr(void)
+{
+ uint32_t fpcr;
+
+ __asm__ __volatile__("mrs %0, FPCR\n\t" : "=r" (fpcr) : : "memory");
+
+ return fpcr;
+}
+
+void raw_write_fpcr(uint32_t fpcr)
+{
+ __asm__ __volatile__("msr FPCR, %0\n\t" : : "r" (fpcr) : "memory");
+}
+
+/* FPSR */
+uint32_t raw_read_fpsr(void)
+{
+ uint32_t fpsr;
+
+ __asm__ __volatile__("mrs %0, FPSR\n\t" : "=r" (fpsr) : : "memory");
+
+ return fpsr;
+}
+
+void raw_write_fpsr(uint32_t fpsr)
+{
+ __asm__ __volatile__("msr FPSR, %0\n\t" : : "r" (fpsr) : "memory");
+}
+
+/* NZCV */
+uint32_t raw_read_nzcv(void)
+{
+ uint32_t nzcv;
+
+ __asm__ __volatile__("mrs %0, NZCV\n\t" : "=r" (nzcv) : : "memory");
+
+ return nzcv;
+}
+
+void raw_write_nzcv(uint32_t nzcv)
+{
+ __asm__ __volatile__("msr NZCV, %0\n\t" : : "r" (nzcv) : "memory");
+}
+
+/* SP */
+uint64_t raw_read_sp_el0(void)
+{
+ uint64_t sp_el0;
+
+ __asm__ __volatile__("mrs %0, SP_EL0\n\t" : "=r" (sp_el0) : : "memory");
+
+ return sp_el0;
+}
+
+void raw_write_sp_el0(uint64_t sp_el0)
+{
+ __asm__ __volatile__("msr SP_EL0, %0\n\t" : : "r" (sp_el0) : "memory");
+}
+
+uint64_t raw_read_sp_el1(void)
+{
+ uint64_t sp_el1;
+
+ __asm__ __volatile__("mrs %0, SP_EL1\n\t" : "=r" (sp_el1) : : "memory");
+
+ return sp_el1;
+}
+
+void raw_write_sp_el1(uint64_t sp_el1)
+{
+ __asm__ __volatile__("msr SP_EL1, %0\n\t" : : "r" (sp_el1) : "memory");
+}
+
+uint64_t raw_read_sp_el2(void)
+{
+ uint64_t sp_el2;
+
+ __asm__ __volatile__("mrs %0, SP_EL2\n\t" : "=r" (sp_el2) : : "memory");
+
+ return sp_el2;
+}
+
+void raw_write_sp_el2(uint64_t sp_el2)
+{
+ __asm__ __volatile__("msr SP_EL2, %0\n\t" : : "r" (sp_el2) : "memory");
+}
+
+/* SPSel */
+uint32_t raw_read_spsel(void)
+{
+ uint32_t spsel;
+
+ __asm__ __volatile__("mrs %0, SPSel\n\t" : "=r" (spsel) : : "memory");
+
+ return spsel;
+}
+
+void raw_write_spsel(uint32_t spsel)
+{
+ __asm__ __volatile__("msr SPSel, %0\n\t" : : "r" (spsel) : "memory");
+}
+
+uint64_t raw_read_sp_el3(void)
+{
+ uint64_t sp_el3;
+ uint32_t spsel;
+
+ spsel = raw_read_spsel();
+ if (!spsel)
+ raw_write_spsel(1);
+
+ __asm__ __volatile__("mov %0, sp\n\t" : "=r" (sp_el3) : : "memory");
+
+ if (!spsel)
+ raw_write_spsel(spsel);
+
+ return sp_el3;
+}
+
+void raw_write_sp_el3(uint64_t sp_el3)
+{
+ uint32_t spsel;
+
+ spsel = raw_read_spsel();
+ if (!spsel)
+ raw_write_spsel(1);
+
+ __asm__ __volatile__("mov sp, %0\n\t" : "=r" (sp_el3) : : "memory");
+
+ if (!spsel)
+ raw_write_spsel(spsel);
+}
+
+/* SPSR */
+uint32_t raw_read_spsr_abt(void)
+{
+ uint32_t spsr_abt;
+
+ __asm__ __volatile__("mrs %0, SPSR_abt\n\t" : "=r" (spsr_abt) : : "memory");
+
+ return spsr_abt;
+}
+
+void raw_write_spsr_abt(uint32_t spsr_abt)
+{
+ __asm__ __volatile__("msr SPSR_abt, %0\n\t" : : "r" (spsr_abt) : "memory");
+}
+
+uint32_t raw_read_spsr_el1(void)
+{
+ uint32_t spsr_el1;
+
+ __asm__ __volatile__("mrs %0, SPSR_EL1\n\t" : "=r" (spsr_el1) : : "memory");
+
+ return spsr_el1;
+}
+
+void raw_write_spsr_el1(uint32_t spsr_el1)
+{
+ __asm__ __volatile__("msr SPSR_EL1, %0\n\t" : : "r" (spsr_el1) : "memory");
+}
+
+uint32_t raw_read_spsr_el2(void)
+{
+ uint32_t spsr_el2;
+
+ __asm__ __volatile__("mrs %0, SPSR_EL2\n\t" : "=r" (spsr_el2) : : "memory");
+
+ return spsr_el2;
+}
+
+void raw_write_spsr_el2(uint32_t spsr_el2)
+{
+ __asm__ __volatile__("msr SPSR_EL2, %0\n\t" : : "r" (spsr_el2) : "memory");
+}
+
+uint32_t raw_read_spsr_el3(void)
+{
+ uint32_t spsr_el3;
+
+ __asm__ __volatile__("mrs %0, SPSR_EL3\n\t" : "=r" (spsr_el3) : : "memory");
+
+ return spsr_el3;
+}
+
+void raw_write_spsr_el3(uint32_t spsr_el3)
+{
+ __asm__ __volatile__("msr SPSR_EL3, %0\n\t" : : "r" (spsr_el3) : "memory");
+}
+
+uint32_t raw_read_spsr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_spsr,spsr,uint32_t);
+}
+
+void raw_write_spsr_current(uint32_t spsr)
+{
+ SWITCH_CASE_WRITE(raw_write_spsr,spsr);
+}
+
+uint32_t raw_read_spsr_fiq(void)
+{
+ uint32_t spsr_fiq;
+
+ __asm__ __volatile__("mrs %0, SPSR_fiq\n\t" : "=r" (spsr_fiq) : : "memory");
+
+ return spsr_fiq;
+}
+
+void raw_write_spsr_fiq(uint32_t spsr_fiq)
+{
+ __asm__ __volatile__("msr SPSR_fiq, %0\n\t" : : "r" (spsr_fiq) : "memory");
+}
+
+uint32_t raw_read_spsr_irq(void)
+{
+ uint32_t spsr_irq;
+
+ __asm__ __volatile__("mrs %0, SPSR_irq\n\t" : "=r" (spsr_irq) : : "memory");
+
+ return spsr_irq;
+}
+
+void raw_write_spsr_irq(uint32_t spsr_irq)
+{
+ __asm__ __volatile__("msr SPSR_irq, %0\n\t" : : "r" (spsr_irq) : "memory");
+}
+
+uint32_t raw_read_spsr_und(void)
+{
+ uint32_t spsr_und;
+
+ __asm__ __volatile__("mrs %0, SPSR_und\n\t" : "=r" (spsr_und) : : "memory");
+
+ return spsr_und;
+}
+
+void raw_write_spsr_und(uint32_t spsr_und)
+{
+ __asm__ __volatile__("msr SPSR_und, %0\n\t" : : "r" (spsr_und) : "memory");
+}
+
diff --git a/payloads/libpayload/arch/arm64/lib/sysctrl.c b/payloads/libpayload/arch/arm64/lib/sysctrl.c
new file mode 100644
index 0000000000..7e06e29658
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/sysctrl.c
@@ -0,0 +1,879 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ * sysctrl.c: This file defines all the library functions for accessing system
+ * control registers in Aarch64
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+/* ACTLR */
+uint32_t raw_read_actlr_el1(void)
+{
+ uint32_t actlr_el1;
+
+ __asm__ __volatile__("mrs %0, ACTLR_EL1\n\t" : "=r" (actlr_el1) : : "memory");
+
+ return actlr_el1;
+}
+
+void raw_write_actlr_el1(uint32_t actlr_el1)
+{
+ __asm__ __volatile__("msr ACTLR_EL1, %0\n\t" : : "r" (actlr_el1) : "memory");
+}
+
+uint32_t raw_read_actlr_el2(void)
+{
+ uint32_t actlr_el2;
+
+ __asm__ __volatile__("mrs %0, ACTLR_EL2\n\t" : "=r" (actlr_el2) : : "memory");
+
+ return actlr_el2;
+}
+
+void raw_write_actlr_el2(uint32_t actlr_el2)
+{
+ __asm__ __volatile__("msr ACTLR_EL2, %0\n\t" : : "r" (actlr_el2) : "memory");
+}
+
+uint32_t raw_read_actlr_el3(void)
+{
+ uint32_t actlr_el3;
+
+ __asm__ __volatile__("mrs %0, ACTLR_EL3\n\t" : "=r" (actlr_el3) : : "memory");
+
+ return actlr_el3;
+}
+
+void raw_write_actlr_el3(uint32_t actlr_el3)
+{
+ __asm__ __volatile__("msr ACTLR_EL3, %0\n\t" : : "r" (actlr_el3) : "memory");
+}
+
+uint32_t raw_read_actlr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_actlr,actlr,uint32_t);
+}
+
+void raw_write_actlr_current(uint32_t actlr)
+{
+ SWITCH_CASE_WRITE(raw_write_actlr,actlr);
+}
+
+/* AFSR0 */
+uint32_t raw_read_afsr0_el1(void)
+{
+ uint32_t afsr0_el1;
+
+ __asm__ __volatile__("mrs %0, AFSR0_EL1\n\t" : "=r" (afsr0_el1) : : "memory");
+
+ return afsr0_el1;
+}
+
+void raw_write_afsr0_el1(uint32_t afsr0_el1)
+{
+ __asm__ __volatile__("msr AFSR0_EL1, %0\n\t" : : "r" (afsr0_el1) : "memory");
+}
+
+uint32_t raw_read_afsr0_el2(void)
+{
+ uint32_t afsr0_el2;
+
+ __asm__ __volatile__("mrs %0, AFSR0_EL2\n\t" : "=r" (afsr0_el2) : : "memory");
+
+ return afsr0_el2;
+}
+
+void raw_write_afsr0_el2(uint32_t afsr0_el2)
+{
+ __asm__ __volatile__("msr AFSR0_EL2, %0\n\t" : : "r" (afsr0_el2) : "memory");
+}
+
+uint32_t raw_read_afsr0_el3(void)
+{
+ uint32_t afsr0_el3;
+
+ __asm__ __volatile__("mrs %0, AFSR0_EL3\n\t" : "=r" (afsr0_el3) : : "memory");
+
+ return afsr0_el3;
+}
+
+void raw_write_afsr0_el3(uint32_t afsr0_el3)
+{
+ __asm__ __volatile__("msr AFSR0_EL3, %0\n\t" : : "r" (afsr0_el3) : "memory");
+}
+
+uint32_t raw_read_afsr0_current(void)
+{
+ SWITCH_CASE_READ(raw_read_afsr0,afsr0,uint32_t);
+}
+
+void raw_write_afsr0_current(uint32_t afsr0)
+{
+ SWITCH_CASE_WRITE(raw_write_afsr0,afsr0);
+}
+
+/* AFSR1 */
+uint32_t raw_read_afsr1_el1(void)
+{
+ uint32_t afsr1_el1;
+
+ __asm__ __volatile__("mrs %0, AFSR1_EL1\n\t" : "=r" (afsr1_el1) : : "memory");
+
+ return afsr1_el1;
+}
+
+void raw_write_afsr1_el1(uint32_t afsr1_el1)
+{
+ __asm__ __volatile__("msr AFSR1_EL1, %0\n\t" : : "r" (afsr1_el1) : "memory");
+}
+
+uint32_t raw_read_afsr1_el2(void)
+{
+ uint32_t afsr1_el2;
+
+ __asm__ __volatile__("mrs %0, AFSR1_EL2\n\t" : "=r" (afsr1_el2) : : "memory");
+
+ return afsr1_el2;
+}
+
+void raw_write_afsr1_el2(uint32_t afsr1_el2)
+{
+ __asm__ __volatile__("msr AFSR1_EL2, %0\n\t" : : "r" (afsr1_el2) : "memory");
+}
+
+uint32_t raw_read_afsr1_el3(void)
+{
+ uint32_t afsr1_el3;
+
+ __asm__ __volatile__("mrs %0, AFSR1_EL3\n\t" : "=r" (afsr1_el3) : : "memory");
+
+ return afsr1_el3;
+}
+
+void raw_write_afsr1_el3(uint32_t afsr1_el3)
+{
+ __asm__ __volatile__("msr AFSR1_EL3, %0\n\t" : : "r" (afsr1_el3) : "memory");
+}
+
+uint32_t raw_read_afsr1_current(void)
+{
+ SWITCH_CASE_READ(raw_read_afsr1,afsr1,uint32_t);
+}
+
+void raw_write_afsr1_current(uint32_t afsr1)
+{
+ SWITCH_CASE_WRITE(raw_write_afsr1,afsr1);
+}
+
+/* AIDR */
+uint32_t raw_read_aidr_el1(void)
+{
+ uint32_t aidr_el1;
+
+ __asm__ __volatile__("mrs %0, AIDR_EL1\n\t" : "=r" (aidr_el1) : : "memory");
+
+ return aidr_el1;
+}
+
+/* AMAIR */
+uint64_t raw_read_amair_el1(void)
+{
+ uint64_t amair_el1;
+
+ __asm__ __volatile__("mrs %0, AMAIR_EL1\n\t" : "=r" (amair_el1) : : "memory");
+
+ return amair_el1;
+}
+
+void raw_write_amair_el1(uint64_t amair_el1)
+{
+ __asm__ __volatile__("msr AMAIR_EL1, %0\n\t" : : "r" (amair_el1) : "memory");
+}
+
+uint64_t raw_read_amair_el2(void)
+{
+ uint64_t amair_el2;
+
+ __asm__ __volatile__("mrs %0, AMAIR_EL2\n\t" : "=r" (amair_el2) : : "memory");
+
+ return amair_el2;
+}
+
+void raw_write_amair_el2(uint64_t amair_el2)
+{
+ __asm__ __volatile__("msr AMAIR_EL2, %0\n\t" : : "r" (amair_el2) : "memory");
+}
+
+uint64_t raw_read_amair_el3(void)
+{
+ uint64_t amair_el3;
+
+ __asm__ __volatile__("mrs %0, AMAIR_EL3\n\t" : "=r" (amair_el3) : : "memory");
+
+ return amair_el3;
+}
+
+void raw_write_amair_el3(uint64_t amair_el3)
+{
+ __asm__ __volatile__("msr AMAIR_EL3, %0\n\t" : : "r" (amair_el3) : "memory");
+}
+
+uint64_t raw_read_amair_current(void)
+{
+ SWITCH_CASE_READ(raw_read_amair,amair,uint64_t);
+}
+
+void raw_write_amair_current(uint64_t amair)
+{
+ SWITCH_CASE_WRITE(raw_write_amair,amair);
+}
+
+/* CCSIDR */
+uint32_t raw_read_ccsidr_el1(void)
+{
+ uint32_t ccsidr_el1;
+
+ __asm__ __volatile__("mrs %0, CCSIDR_EL1\n\t" : "=r" (ccsidr_el1) : : "memory");
+
+ return ccsidr_el1;
+}
+
+/* CLIDR */
+uint32_t raw_read_clidr_el1(void)
+{
+ uint32_t clidr_el1;
+
+ __asm__ __volatile__("mrs %0, CLIDR_EL1\n\t" : "=r" (clidr_el1) : : "memory");
+
+ return clidr_el1;
+}
+
+/* CPACR */
+uint32_t raw_read_cpacr_el1(void)
+{
+ uint32_t cpacr_el1;
+
+ __asm__ __volatile__("mrs %0, CPACR_EL1\n\t" : "=r" (cpacr_el1) : : "memory");
+
+ return cpacr_el1;
+}
+
+void raw_write_cpacr_el1(uint32_t cpacr_el1)
+{
+ __asm__ __volatile__("msr CPACR_EL1, %0\n\t" : : "r" (cpacr_el1) : "memory");
+}
+
+/* CPTR */
+uint32_t raw_read_cptr_el2(void)
+{
+ uint32_t cptr_el2;
+
+ __asm__ __volatile__("mrs %0, CPTR_EL2\n\t" : "=r" (cptr_el2) : : "memory");
+
+ return cptr_el2;
+}
+
+void raw_write_cptr_el2(uint32_t cptr_el2)
+{
+ __asm__ __volatile__("msr CPTR_EL2, %0\n\t" : : "r" (cptr_el2) : "memory");
+}
+
+uint32_t raw_read_cptr_el3(void)
+{
+ uint32_t cptr_el3;
+
+ __asm__ __volatile__("mrs %0, CPTR_EL3\n\t" : "=r" (cptr_el3) : : "memory");
+
+ return cptr_el3;
+}
+
+void raw_write_cptr_el3(uint32_t cptr_el3)
+{
+ __asm__ __volatile__("msr CPTR_EL3, %0\n\t" : : "r" (cptr_el3) : "memory");
+}
+
+/* CSSELR */
+uint32_t raw_read_csselr_el1(void)
+{
+ uint32_t csselr_el1;
+
+ __asm__ __volatile__("mrs %0, CSSELR_EL1\n\t" : "=r" (csselr_el1) : : "memory");
+
+ return csselr_el1;
+}
+
+void raw_write_csselr_el1(uint32_t csselr_el1)
+{
+ __asm__ __volatile__("msr CSSELR_EL1, %0\n\t" : : "r" (csselr_el1) : "memory");
+}
+
+/* CTR */
+uint32_t raw_read_ctr_el0(void)
+{
+ uint32_t ctr_el0;
+
+ __asm__ __volatile__("mrs %0, CTR_EL0\n\t" : "=r" (ctr_el0) : : "memory");
+
+ return ctr_el0;
+}
+
+/* ESR */
+uint32_t raw_read_esr_el1(void)
+{
+ uint32_t esr_el1;
+
+ __asm__ __volatile__("mrs %0, ESR_EL1\n\t" : "=r" (esr_el1) : : "memory");
+
+ return esr_el1;
+}
+
+void raw_write_esr_el1(uint32_t esr_el1)
+{
+ __asm__ __volatile__("msr ESR_EL1, %0\n\t" : : "r" (esr_el1) : "memory");
+}
+
+uint32_t raw_read_esr_el2(void)
+{
+ uint32_t esr_el2;
+
+ __asm__ __volatile__("mrs %0, ESR_EL2\n\t" : "=r" (esr_el2) : : "memory");
+
+ return esr_el2;
+}
+
+void raw_write_esr_el2(uint32_t esr_el2)
+{
+ __asm__ __volatile__("msr ESR_EL2, %0\n\t" : : "r" (esr_el2) : "memory");
+}
+
+uint32_t raw_read_esr_el3(void)
+{
+ uint32_t esr_el3;
+
+ __asm__ __volatile__("mrs %0, ESR_EL3\n\t" : "=r" (esr_el3) : : "memory");
+
+ return esr_el3;
+}
+
+void raw_write_esr_el3(uint32_t esr_el3)
+{
+ __asm__ __volatile__("msr ESR_EL3, %0\n\t" : : "r" (esr_el3) : "memory");
+}
+
+uint32_t raw_read_esr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_esr,esr,uint32_t);
+}
+
+void raw_write_esr_current(uint32_t esr)
+{
+ SWITCH_CASE_WRITE(raw_write_esr,esr);
+}
+
+/* FAR */
+uint64_t raw_read_far_el1(void)
+{
+ uint64_t far_el1;
+
+ __asm__ __volatile__("mrs %0, FAR_EL1\n\t" : "=r" (far_el1) : : "memory");
+
+ return far_el1;
+}
+
+void raw_write_far_el1(uint64_t far_el1)
+{
+ __asm__ __volatile__("msr FAR_EL1, %0\n\t" : : "r" (far_el1) : "memory");
+}
+
+uint64_t raw_read_far_el2(void)
+{
+ uint64_t far_el2;
+
+ __asm__ __volatile__("mrs %0, FAR_EL2\n\t" : "=r" (far_el2) : : "memory");
+
+ return far_el2;
+}
+
+void raw_write_far_el2(uint64_t far_el2)
+{
+ __asm__ __volatile__("msr FAR_EL2, %0\n\t" : : "r" (far_el2) : "memory");
+}
+
+uint64_t raw_read_far_el3(void)
+{
+ uint64_t far_el3;
+
+ __asm__ __volatile__("mrs %0, FAR_EL3\n\t" : "=r" (far_el3) : : "memory");
+
+ return far_el3;
+}
+
+void raw_write_far_el3(uint64_t far_el3)
+{
+ __asm__ __volatile__("msr FAR_EL3, %0\n\t" : : "r" (far_el3) : "memory");
+}
+
+uint64_t raw_read_far_current(void)
+{
+ SWITCH_CASE_READ(raw_read_far,far,uint64_t);
+}
+
+void raw_write_far_current(uint64_t far)
+{
+ SWITCH_CASE_WRITE(raw_write_far,far);
+}
+
+/* HCR */
+uint64_t raw_read_hcr_el2(void)
+{
+ uint64_t hcr_el2;
+
+ __asm__ __volatile__("mrs %0, HCR_EL2\n\t" : "=r" (hcr_el2) : : "memory");
+
+ return hcr_el2;
+}
+
+void raw_write_hcr_el2(uint64_t hcr_el2)
+{
+ __asm__ __volatile__("msr HCR_EL2, %0\n\t" : : "r" (hcr_el2) : "memory");
+}
+
+/* AA64PFR0 */
+uint64_t raw_read_aa64pfr0_el1(void)
+{
+ uint64_t aa64pfr0_el1;
+
+ __asm__ __volatile__("mrs %0, ID_AA64PFR0_EL1\n\t" : "=r" (aa64pfr0_el1) : : "memory");
+
+ return aa64pfr0_el1;
+}
+
+/* MAIR */
+uint64_t raw_read_mair_el1(void)
+{
+ uint64_t mair_el1;
+
+ __asm__ __volatile__("mrs %0, MAIR_EL1\n\t" : "=r" (mair_el1) : : "memory");
+
+ return mair_el1;
+}
+
+void raw_write_mair_el1(uint64_t mair_el1)
+{
+ __asm__ __volatile__("msr MAIR_EL1, %0\n\t" : : "r" (mair_el1) : "memory");
+}
+
+uint64_t raw_read_mair_el2(void)
+{
+ uint64_t mair_el2;
+
+ __asm__ __volatile__("mrs %0, MAIR_EL2\n\t" : "=r" (mair_el2) : : "memory");
+
+ return mair_el2;
+}
+
+void raw_write_mair_el2(uint64_t mair_el2)
+{
+ __asm__ __volatile__("msr MAIR_EL2, %0\n\t" : : "r" (mair_el2) : "memory");
+}
+
+uint64_t raw_read_mair_el3(void)
+{
+ uint64_t mair_el3;
+
+ __asm__ __volatile__("mrs %0, MAIR_EL3\n\t" : "=r" (mair_el3) : : "memory");
+
+ return mair_el3;
+}
+
+void raw_write_mair_el3(uint64_t mair_el3)
+{
+ __asm__ __volatile__("msr MAIR_EL3, %0\n\t" : : "r" (mair_el3) : "memory");
+}
+
+uint64_t raw_read_mair_current(void)
+{
+ SWITCH_CASE_READ(raw_read_mair,mair,uint64_t);
+}
+
+void raw_write_mair_current(uint64_t mair)
+{
+ SWITCH_CASE_WRITE(raw_write_mair,mair);
+}
+
+/* MPIDR */
+uint64_t raw_read_mpidr_el1(void)
+{
+ uint64_t mpidr_el1;
+
+ __asm__ __volatile__("mrs %0, MPIDR_EL1\n\t" : "=r" (mpidr_el1) : : "memory");
+
+ return mpidr_el1;
+}
+
+/* RMR */
+uint32_t raw_read_rmr_el1(void)
+{
+ uint32_t rmr_el1;
+
+ __asm__ __volatile__("mrs %0, RMR_EL1\n\t" : "=r" (rmr_el1) : : "memory");
+
+ return rmr_el1;
+}
+
+void raw_write_rmr_el1(uint32_t rmr_el1)
+{
+ __asm__ __volatile__("msr RMR_EL1, %0\n\t" : : "r" (rmr_el1) : "memory");
+}
+
+uint32_t raw_read_rmr_el2(void)
+{
+ uint32_t rmr_el2;
+
+ __asm__ __volatile__("mrs %0, RMR_EL2\n\t" : "=r" (rmr_el2) : : "memory");
+
+ return rmr_el2;
+}
+
+void raw_write_rmr_el2(uint32_t rmr_el2)
+{
+ __asm__ __volatile__("msr RMR_EL2, %0\n\t" : : "r" (rmr_el2) : "memory");
+}
+
+uint32_t raw_read_rmr_el3(void)
+{
+ uint32_t rmr_el3;
+
+ __asm__ __volatile__("mrs %0, RMR_EL3\n\t" : "=r" (rmr_el3) : : "memory");
+
+ return rmr_el3;
+}
+
+void raw_write_rmr_el3(uint32_t rmr_el3)
+{
+ __asm__ __volatile__("msr RMR_EL3, %0\n\t" : : "r" (rmr_el3) : "memory");
+}
+
+uint32_t raw_read_rmr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_rmr,rmr,uint32_t);
+}
+
+void raw_write_rmr_current(uint32_t rmr)
+{
+ SWITCH_CASE_WRITE(raw_write_rmr,rmr);
+}
+
+/* RVBAR */
+uint64_t raw_read_rvbar_el1(void)
+{
+ uint64_t rvbar_el1;
+
+ __asm__ __volatile__("mrs %0, RVBAR_EL1\n\t" : "=r" (rvbar_el1) : : "memory");
+
+ return rvbar_el1;
+}
+
+void raw_write_rvbar_el1(uint64_t rvbar_el1)
+{
+ __asm__ __volatile__("msr RVBAR_EL1, %0\n\t" : : "r" (rvbar_el1) : "memory");
+}
+
+uint64_t raw_read_rvbar_el2(void)
+{
+ uint64_t rvbar_el2;
+
+ __asm__ __volatile__("mrs %0, RVBAR_EL2\n\t" : "=r" (rvbar_el2) : : "memory");
+
+ return rvbar_el2;
+}
+
+void raw_write_rvbar_el2(uint64_t rvbar_el2)
+{
+ __asm__ __volatile__("msr RVBAR_EL2, %0\n\t" : : "r" (rvbar_el2) : "memory");
+}
+
+uint64_t raw_read_rvbar_el3(void)
+{
+ uint64_t rvbar_el3;
+
+ __asm__ __volatile__("mrs %0, RVBAR_EL3\n\t" : "=r" (rvbar_el3) : : "memory");
+
+ return rvbar_el3;
+}
+
+void raw_write_rvbar_el3(uint64_t rvbar_el3)
+{
+ __asm__ __volatile__("msr RVBAR_EL3, %0\n\t" : : "r" (rvbar_el3) : "memory");
+}
+
+uint64_t raw_read_rvbar_current(void)
+{
+ SWITCH_CASE_READ(raw_read_rvbar,rvbar,uint64_t);
+}
+
+void raw_write_rvbar_current(uint64_t rvbar)
+{
+ SWITCH_CASE_WRITE(raw_write_rvbar,rvbar);
+}
+
+/* SCR */
+uint32_t raw_read_scr_el3(void)
+{
+ uint32_t scr_el3;
+
+ __asm__ __volatile__("mrs %0, SCR_EL3\n\t" : "=r" (scr_el3) : : "memory");
+
+ return scr_el3;
+}
+
+void raw_write_scr_el3(uint32_t scr_el3)
+{
+ __asm__ __volatile__("msr SCR_EL3, %0\n\t" : : "r" (scr_el3) : "memory");
+}
+
+/* SCTLR */
+uint32_t raw_read_sctlr_el1(void)
+{
+ uint32_t sctlr_el1;
+
+ __asm__ __volatile__("mrs %0, SCTLR_EL1\n\t" : "=r" (sctlr_el1) : : "memory");
+
+ return sctlr_el1;
+}
+
+void raw_write_sctlr_el1(uint32_t sctlr_el1)
+{
+ __asm__ __volatile__("msr SCTLR_EL1, %0\n\t" : : "r" (sctlr_el1) : "memory");
+}
+
+uint32_t raw_read_sctlr_el2(void)
+{
+ uint32_t sctlr_el2;
+
+ __asm__ __volatile__("mrs %0, SCTLR_EL2\n\t" : "=r" (sctlr_el2) : : "memory");
+
+ return sctlr_el2;
+}
+
+void raw_write_sctlr_el2(uint32_t sctlr_el2)
+{
+ __asm__ __volatile__("msr SCTLR_EL2, %0\n\t" : : "r" (sctlr_el2) : "memory");
+}
+
+uint32_t raw_read_sctlr_el3(void)
+{
+ uint32_t sctlr_el3;
+
+ __asm__ __volatile__("mrs %0, SCTLR_EL3\n\t" : "=r" (sctlr_el3) : : "memory");
+
+ return sctlr_el3;
+}
+
+void raw_write_sctlr_el3(uint32_t sctlr_el3)
+{
+ __asm__ __volatile__("msr SCTLR_EL3, %0\n\t" : : "r" (sctlr_el3) : "memory");
+}
+
+uint32_t raw_read_sctlr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_sctlr,sctlr,uint32_t);
+}
+
+void raw_write_sctlr_current(uint32_t sctlr)
+{
+ SWITCH_CASE_WRITE(raw_write_sctlr,sctlr);
+}
+
+/* TCR */
+uint64_t raw_read_tcr_el1(void)
+{
+ uint64_t tcr_el1;
+
+ __asm__ __volatile__("mrs %0, TCR_EL1\n\t" : "=r" (tcr_el1) : : "memory");
+
+ return tcr_el1;
+}
+
+void raw_write_tcr_el1(uint64_t tcr_el1)
+{
+ __asm__ __volatile__("msr TCR_EL1, %0\n\t" : : "r" (tcr_el1) : "memory");
+}
+
+uint32_t raw_read_tcr_el2(void)
+{
+ uint32_t tcr_el2;
+
+ __asm__ __volatile__("mrs %0, TCR_EL2\n\t" : "=r" (tcr_el2) : : "memory");
+
+ return tcr_el2;
+}
+
+void raw_write_tcr_el2(uint32_t tcr_el2)
+{
+ __asm__ __volatile__("msr TCR_EL2, %0\n\t" : : "r" (tcr_el2) : "memory");
+}
+
+uint32_t raw_read_tcr_el3(void)
+{
+ uint32_t tcr_el3;
+
+ __asm__ __volatile__("mrs %0, TCR_EL3\n\t" : "=r" (tcr_el3) : : "memory");
+
+ return tcr_el3;
+}
+
+void raw_write_tcr_el3(uint32_t tcr_el3)
+{
+ __asm__ __volatile__("msr TCR_EL3, %0\n\t" : : "r" (tcr_el3) : "memory");
+}
+
+/* TTBR0 */
+uint64_t raw_read_ttbr0_el1(void)
+{
+ uint64_t ttbr0_el1;
+
+ __asm__ __volatile__("mrs %0, TTBR0_EL1\n\t" : "=r" (ttbr0_el1) : : "memory");
+
+ return ttbr0_el1;
+}
+
+void raw_write_ttbr0_el1(uint64_t ttbr0_el1)
+{
+ __asm__ __volatile__("msr TTBR0_EL1, %0\n\t" : : "r" (ttbr0_el1) : "memory");
+}
+
+uint64_t raw_read_ttbr0_el2(void)
+{
+ uint64_t ttbr0_el2;
+
+ __asm__ __volatile__("mrs %0, TTBR0_EL2\n\t" : "=r" (ttbr0_el2) : : "memory");
+
+ return ttbr0_el2;
+}
+
+void raw_write_ttbr0_el2(uint64_t ttbr0_el2)
+{
+ __asm__ __volatile__("msr TTBR0_EL2, %0\n\t" : : "r" (ttbr0_el2) : "memory");
+}
+
+uint64_t raw_read_ttbr0_el3(void)
+{
+ uint64_t ttbr0_el3;
+
+ __asm__ __volatile__("mrs %0, TTBR0_EL3\n\t" : "=r" (ttbr0_el3) : : "memory");
+
+ return ttbr0_el3;
+}
+
+void raw_write_ttbr0_el3(uint64_t ttbr0_el3)
+{
+ __asm__ __volatile__("msr TTBR0_EL3, %0\n\t" : : "r" (ttbr0_el3) : "memory");
+}
+
+uint64_t raw_read_ttbr0_current(void)
+{
+ SWITCH_CASE_READ(raw_read_ttbr0,ttbr0,uint64_t);
+}
+
+void raw_write_ttbr0_current(uint64_t ttbr0)
+{
+ SWITCH_CASE_WRITE(raw_write_ttbr0,ttbr0);
+}
+
+/* TTBR1 */
+uint64_t raw_read_ttbr1_el1(void)
+{
+ uint64_t ttbr1_el1;
+
+ __asm__ __volatile__("mrs %0, TTBR1_EL1\n\t" : "=r" (ttbr1_el1) : : "memory");
+
+ return ttbr1_el1;
+}
+
+void raw_write_ttbr1_el1(uint64_t ttbr1_el1)
+{
+ __asm__ __volatile__("msr TTBR1_EL1, %0\n\t" : : "r" (ttbr1_el1) : "memory");
+}
+
+/* VBAR */
+uint64_t raw_read_vbar_el1(void)
+{
+ uint64_t vbar_el1;
+
+ __asm__ __volatile__("mrs %0, VBAR_EL1\n\t" : "=r" (vbar_el1) : : "memory");
+
+ return vbar_el1;
+}
+
+void raw_write_vbar_el1(uint64_t vbar_el1)
+{
+ __asm__ __volatile__("msr VBAR_EL1, %0\n\t" : : "r" (vbar_el1) : "memory");
+}
+
+uint64_t raw_read_vbar_el2(void)
+{
+ uint64_t vbar_el2;
+
+ __asm__ __volatile__("mrs %0, VBAR_EL2\n\t" : "=r" (vbar_el2) : : "memory");
+
+ return vbar_el2;
+}
+
+void raw_write_vbar_el2(uint64_t vbar_el2)
+{
+ __asm__ __volatile__("msr VBAR_EL2, %0\n\t" : : "r" (vbar_el2) : "memory");
+}
+
+uint64_t raw_read_vbar_el3(void)
+{
+ uint64_t vbar_el3;
+
+ __asm__ __volatile__("mrs %0, VBAR_EL3\n\t" : "=r" (vbar_el3) : : "memory");
+
+ return vbar_el3;
+}
+
+void raw_write_vbar_el3(uint64_t vbar_el3)
+{
+ __asm__ __volatile__("msr VBAR_EL3, %0\n\t" : : "r" (vbar_el3) : "memory");
+}
+
+uint64_t raw_read_vbar_current(void)
+{
+ SWITCH_CASE_READ(raw_read_vbar,vbar,uint64_t);
+}
+
+void raw_write_vbar_current(uint64_t vbar)
+{
+ SWITCH_CASE_WRITE(raw_write_vbar,vbar);
+}
diff --git a/payloads/libpayload/arch/arm64/lib/tlb.c b/payloads/libpayload/arch/arm64/lib/tlb.c
new file mode 100644
index 0000000000..d5afc1796b
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/tlb.c
@@ -0,0 +1,83 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * tlb.c: System intructions for TLB maintenance.
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+/* TLBIALL */
+void tlbiall_el1(void)
+{
+ __asm__ __volatile__("tlbi alle1\n\t" : : : "memory");
+}
+
+void tlbiall_el2(void)
+{
+ __asm__ __volatile__("tlbi alle2\n\t" : : : "memory");
+}
+
+void tlbiall_el3(void)
+{
+ __asm__ __volatile__("tlbi alle3\n\t" : : : "memory");
+}
+
+void tlbiall_current(void)
+{
+ SWITCH_CASE_TLBI(tlbiall);
+}
+
+/* TLBIALLIS */
+void tlbiallis_el1(void)
+{
+ __asm__ __volatile__("tlbi alle1is\n\t" : : : "memory");
+}
+
+void tlbiallis_el2(void)
+{
+ __asm__ __volatile__("tlbi alle2is\n\t" : : : "memory");
+}
+
+void tlbiallis_el3(void)
+{
+ __asm__ __volatile__("tlbi alle3is\n\t" : : : "memory");
+}
+
+void tlbiallis_current(void)
+{
+ SWITCH_CASE_TLBI(tlbiallis);
+}
+
+/* TLBIVAA */
+void tlbivaa_el1(uint64_t va)
+{
+ __asm__ __volatile__("tlbi vaae1, %0\n\t" : : "r" (va) : "memory");
+}