summaryrefslogtreecommitdiff
path: root/payloads
diff options
context:
space:
mode:
authorFurquan Shaikh <furquan@google.com>2014-08-27 12:16:16 -0700
committerPatrick Georgi <pgeorgi@google.com>2015-03-21 13:35:42 +0100
commit635b45d60878887fba7425f61870cf2a9a6f3102 (patch)
tree39eeec5d39550823157390b162bf056e125fbf7e /payloads
parent3b1ee0387c70f0b31307f50a5efa5a2b584a3635 (diff)
libpayload arm64: Add library helpers
Add library helpers to access standard arm64 registers. This library also provides functions to directly read/write register based on current el. So, rest of the code doesnt need to keep checking the el and call appropriate function based on that. BUG=chrome-os-partner:31634 BRANCH=None TEST=Libpayload and depthcharge compile successfully for ryu Change-Id: Ibc0ca49f158362d4b7ab2045bf0fbd58ada79360 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: 2ca6da580cb51b4c23abdaf04fee2785e5780510 Original-Change-Id: I9b63e04aa26a98bbeb34fdef634776d49454ca8d Original-Signed-off-by: Furquan Shaikh <furquan@google.com> Original-Reviewed-on: https://chromium-review.googlesource.com/214575 Original-Reviewed-by: Aaron Durbin <adurbin@chromium.org> Original-Tested-by: Furquan Shaikh <furquan@chromium.org> Original-Commit-Queue: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/8784 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
Diffstat (limited to 'payloads')
-rw-r--r--payloads/libpayload/arch/arm64/Makefile.inc2
-rw-r--r--payloads/libpayload/arch/arm64/cache.c11
-rw-r--r--payloads/libpayload/arch/arm64/lib/Makefile.inc33
-rw-r--r--payloads/libpayload/arch/arm64/lib/cache.c93
-rw-r--r--payloads/libpayload/arch/arm64/lib/clock.c40
-rw-r--r--payloads/libpayload/arch/arm64/lib/pstate.c432
-rw-r--r--payloads/libpayload/arch/arm64/lib/sysctrl.c879
-rw-r--r--payloads/libpayload/arch/arm64/lib/tlb.c83
-rw-r--r--payloads/libpayload/include/arm64/arch/cache.h126
-rw-r--r--payloads/libpayload/include/arm64/arch/io.h1
-rw-r--r--payloads/libpayload/include/arm64/arch/lib_helpers.h319
11 files changed, 1888 insertions, 131 deletions
diff --git a/payloads/libpayload/arch/arm64/Makefile.inc b/payloads/libpayload/arch/arm64/Makefile.inc
index bb09b8f919..1c23a477a7 100644
--- a/payloads/libpayload/arch/arm64/Makefile.inc
+++ b/payloads/libpayload/arch/arm64/Makefile.inc
@@ -30,6 +30,8 @@
CFLAGS += -march=armv8-a
arm64_asm_flags =
+subdirs-y += lib/
+
head.o-y += head.S
libc-y += main.c sysinfo.c
libc-y += timer.c coreboot.c util.S
diff --git a/payloads/libpayload/arch/arm64/cache.c b/payloads/libpayload/arch/arm64/cache.c
index 05d6fb20ca..e89d49e4c8 100644
--- a/payloads/libpayload/arch/arm64/cache.c
+++ b/payloads/libpayload/arch/arm64/cache.c
@@ -34,6 +34,7 @@
#include <stdint.h>
#include <arch/cache.h>
+#include <arch/lib_helpers.h>
void tlb_invalidate_all(void)
{
@@ -60,7 +61,7 @@ unsigned int dcache_line_bytes(void)
if (line_bytes)
return line_bytes;
- ccsidr = read_ccsidr();
+ ccsidr = raw_read_ccsidr_el1();
/* [2:0] - Indicates (Log2(number of words in cache line)) - 2 */
line_bytes = 1 << ((ccsidr & 0x7) + 2); /* words per line */
line_bytes *= sizeof(unsigned int); /* bytes per word */
@@ -126,18 +127,18 @@ void dcache_mmu_disable(void)
uint32_t sctlr;
dcache_clean_invalidate_all();
- sctlr = read_sctlr_el3();
+ sctlr = raw_read_sctlr_el3();
sctlr &= ~(SCTLR_C | SCTLR_M);
- write_sctlr_el3(sctlr);
+ raw_write_sctlr_el3(sctlr);
}
void dcache_mmu_enable(void)
{
uint32_t sctlr;
- sctlr = read_sctlr_el3();
+ sctlr = raw_read_sctlr_el3();
sctlr |= SCTLR_C | SCTLR_M;
- write_sctlr_el3(sctlr);
+ raw_write_sctlr_el3(sctlr);
}
void cache_sync_instructions(void)
diff --git a/payloads/libpayload/arch/arm64/lib/Makefile.inc b/payloads/libpayload/arch/arm64/lib/Makefile.inc
new file mode 100644
index 0000000000..a7b7f576b4
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/Makefile.inc
@@ -0,0 +1,33 @@
+#####################################################################################
+## This file is part of the coreboot project.
+##
+## Copyright 2014 Google Inc.
+##
+## Redistribution and use in source and binary forms, with or without
+## modification, are permitted provided that the following conditions
+## are met:
+## 1. Redistributions of source code must retain the above copyright
+## notice, this list of conditions and the following disclaimer.
+## 2. Redistributions in binary form must reproduce the above copyright
+## notice, this list of conditions and the following disclaimer in the
+## documentation and/or other materials provided with the distribution.
+## 3. The name of the author may not be used to endorse or promote products
+## derived from this software without specific prior written permission.
+##
+## THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+## ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+## FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+## OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+## SUCH DAMAGE.
+##
+#####################################################################################
+
+lib_access = pstate.c sysctrl.c cache.c tlb.c clock.c
+
+libc-y += $(lib_access) \ No newline at end of file
diff --git a/payloads/libpayload/arch/arm64/lib/cache.c b/payloads/libpayload/arch/arm64/lib/cache.c
new file mode 100644
index 0000000000..53dff5759e
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/cache.c
@@ -0,0 +1,93 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * cache.c: Cache Maintenance Instructions
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+void dccisw(uint64_t cisw)
+{
+ __asm__ __volatile__("dc cisw, %0\n\t" : : "r" (cisw) :"memory");
+}
+
+void dccivac(uint64_t civac)
+{
+ __asm__ __volatile__("dc civac, %0\n\t" : : "r" (civac) :"memory");
+}
+
+void dccsw(uint64_t csw)
+{
+ __asm__ __volatile__("dc csw, %0\n\t" : : "r" (csw) :"memory");
+}
+
+void dccvac(uint64_t cvac)
+{
+ __asm__ __volatile__("dc cvac, %0\n\t" : : "r" (cvac) :"memory");
+}
+
+void dccvau(uint64_t cvau)
+{
+ __asm__ __volatile__("dc cvau, %0\n\t" : : "r" (cvau) :"memory");
+}
+
+void dcisw(uint64_t isw)
+{
+ __asm__ __volatile__("dc isw, %0\n\t" : : "r" (isw) :"memory");
+}
+
+void dcivac(uint64_t ivac)
+{
+ __asm__ __volatile__("dc ivac, %0\n\t" : : "r" (ivac) :"memory");
+}
+
+void dczva(uint64_t zva)
+{
+ __asm__ __volatile__("dc zva, %0\n\t" : : "r" (zva) :"memory");
+}
+
+void iciallu(void)
+{
+ __asm__ __volatile__("ic iallu\n\t" : : :"memory");
+}
+
+void icialluis(void)
+{
+ __asm__ __volatile__("ic ialluis\n\t" : : :"memory");
+}
+
+void icivau(uint64_t ivau)
+{
+ __asm__ __volatile__("ic ivau, %0\n\t" : : "r" (ivau) :"memory");
+}
+
+
+
diff --git a/payloads/libpayload/arch/arm64/lib/clock.c b/payloads/libpayload/arch/arm64/lib/clock.c
new file mode 100644
index 0000000000..9f06f0828c
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/clock.c
@@ -0,0 +1,40 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * clock.c: Functions for accessing clock and timer related registers
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+void set_cntfrq(uint32_t freq)
+{
+ __asm__ __volatile__("msr cntfrq_el0, %0" :: "r"(freq));
+}
diff --git a/payloads/libpayload/arch/arm64/lib/pstate.c b/payloads/libpayload/arch/arm64/lib/pstate.c
new file mode 100644
index 0000000000..27554f7f1f
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/pstate.c
@@ -0,0 +1,432 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ * pstate.c: This file defines all the library functions for accessing
+ * PSTATE and special purpose registers
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+/* CurrentEL */
+uint32_t raw_read_current_el(void)
+{
+ uint32_t current_el;
+
+ __asm__ __volatile__("mrs %0, CurrentEL\n\t" : "=r" (current_el) : : "memory");
+
+ return current_el;
+}
+
+uint32_t get_current_el(void)
+{
+ uint32_t current_el = raw_read_current_el();
+ return ((current_el >> CURRENT_EL_SHIFT) & CURRENT_EL_MASK);
+}
+
+/* DAIF */
+uint32_t raw_read_daif(void)
+{
+ uint32_t daif;
+
+ __asm__ __volatile__("mrs %0, DAIF\n\t" : "=r" (daif) : : "memory");
+
+ return daif;
+}
+
+void raw_write_daif(uint32_t daif)
+{
+ __asm__ __volatile__("msr DAIF, %0\n\t" : : "r" (daif) : "memory");
+}
+
+void enable_debug_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_DBG_BIT) : "memory");
+}
+
+void enable_serror_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_ABT_BIT) : "memory");
+}
+
+void enable_irq(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_IRQ_BIT) : "memory");
+}
+
+void enable_fiq(void)
+{
+ __asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_FIQ_BIT) : "memory");
+}
+
+void disable_debug_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_DBG_BIT) : "memory");
+}
+
+void disable_serror_exceptions(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_ABT_BIT) : "memory");
+}
+
+void disable_irq(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_IRQ_BIT) : "memory");
+}
+
+void disable_fiq(void)
+{
+ __asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_FIQ_BIT) : "memory");
+}
+
+/* DLR_EL0 */
+uint64_t raw_read_dlr_el0(void)
+{
+ uint64_t dlr_el0;
+
+ __asm__ __volatile__("mrs %0, DLR_EL0\n\t" : "=r" (dlr_el0) : : "memory");
+
+ return dlr_el0;
+}
+void raw_write_dlr_el0(uint64_t dlr_el0)
+{
+ __asm__ __volatile__("msr DLR_EL0, %0\n\t" : : "r" (dlr_el0) : "memory");
+}
+
+/* DSPSR_EL0 */
+uint64_t raw_read_dspsr_el0(void)
+{
+ uint64_t dspsr_el0;
+
+ __asm__ __volatile__("mrs %0, DSPSR_EL0\n\t" : "=r" (dspsr_el0) : : "memory");
+
+ return dspsr_el0;
+}
+void raw_write_dspsr_el0(uint64_t dspsr_el0)
+{
+ __asm__ __volatile__("msr DSPSR_EL0, %0\n\t" : : "r" (dspsr_el0) : "memory");
+}
+
+/* ELR */
+uint64_t raw_read_elr_el1(void)
+{
+ uint64_t elr_el1;
+
+ __asm__ __volatile__("mrs %0, ELR_EL1\n\t" : "=r" (elr_el1) : : "memory");
+
+ return elr_el1;
+}
+
+void raw_write_elr_el1(uint64_t elr_el1)
+{
+ __asm__ __volatile__("msr ELR_EL1, %0\n\t" : : "r" (elr_el1) : "memory");
+}
+
+uint64_t raw_read_elr_el2(void)
+{
+ uint64_t elr_el2;
+
+ __asm__ __volatile__("mrs %0, ELR_EL2\n\t" : "=r" (elr_el2) : : "memory");
+
+ return elr_el2;
+}
+
+void raw_write_elr_el2(uint64_t elr_el2)
+{
+ __asm__ __volatile__("msr ELR_EL2, %0\n\t" : : "r" (elr_el2) : "memory");
+}
+
+uint64_t raw_read_elr_el3(void)
+{
+ uint64_t elr_el3;
+
+ __asm__ __volatile__("mrs %0, ELR_EL3\n\t" : "=r" (elr_el3) : : "memory");
+
+ return elr_el3;
+}
+
+void raw_write_elr_el3(uint64_t elr_el3)
+{
+ __asm__ __volatile__("msr ELR_EL3, %0\n\t" : : "r" (elr_el3) : "memory");
+}
+
+uint64_t raw_read_elr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_elr,elr,uint64_t);
+}
+
+void raw_write_elr_current(uint64_t elr)
+{
+ SWITCH_CASE_WRITE(raw_write_elr,elr);
+}
+
+/* FPCR */
+uint32_t raw_read_fpcr(void)
+{
+ uint32_t fpcr;
+
+ __asm__ __volatile__("mrs %0, FPCR\n\t" : "=r" (fpcr) : : "memory");
+
+ return fpcr;
+}
+
+void raw_write_fpcr(uint32_t fpcr)
+{
+ __asm__ __volatile__("msr FPCR, %0\n\t" : : "r" (fpcr) : "memory");
+}
+
+/* FPSR */
+uint32_t raw_read_fpsr(void)
+{
+ uint32_t fpsr;
+
+ __asm__ __volatile__("mrs %0, FPSR\n\t" : "=r" (fpsr) : : "memory");
+
+ return fpsr;
+}
+
+void raw_write_fpsr(uint32_t fpsr)
+{
+ __asm__ __volatile__("msr FPSR, %0\n\t" : : "r" (fpsr) : "memory");
+}
+
+/* NZCV */
+uint32_t raw_read_nzcv(void)
+{
+ uint32_t nzcv;
+
+ __asm__ __volatile__("mrs %0, NZCV\n\t" : "=r" (nzcv) : : "memory");
+
+ return nzcv;
+}
+
+void raw_write_nzcv(uint32_t nzcv)
+{
+ __asm__ __volatile__("msr NZCV, %0\n\t" : : "r" (nzcv) : "memory");
+}
+
+/* SP */
+uint64_t raw_read_sp_el0(void)
+{
+ uint64_t sp_el0;
+
+ __asm__ __volatile__("mrs %0, SP_EL0\n\t" : "=r" (sp_el0) : : "memory");
+
+ return sp_el0;
+}
+
+void raw_write_sp_el0(uint64_t sp_el0)
+{
+ __asm__ __volatile__("msr SP_EL0, %0\n\t" : : "r" (sp_el0) : "memory");
+}
+
+uint64_t raw_read_sp_el1(void)
+{
+ uint64_t sp_el1;
+
+ __asm__ __volatile__("mrs %0, SP_EL1\n\t" : "=r" (sp_el1) : : "memory");
+
+ return sp_el1;
+}
+
+void raw_write_sp_el1(uint64_t sp_el1)
+{
+ __asm__ __volatile__("msr SP_EL1, %0\n\t" : : "r" (sp_el1) : "memory");
+}
+
+uint64_t raw_read_sp_el2(void)
+{
+ uint64_t sp_el2;
+
+ __asm__ __volatile__("mrs %0, SP_EL2\n\t" : "=r" (sp_el2) : : "memory");
+
+ return sp_el2;
+}
+
+void raw_write_sp_el2(uint64_t sp_el2)
+{
+ __asm__ __volatile__("msr SP_EL2, %0\n\t" : : "r" (sp_el2) : "memory");
+}
+
+/* SPSel */
+uint32_t raw_read_spsel(void)
+{
+ uint32_t spsel;
+
+ __asm__ __volatile__("mrs %0, SPSel\n\t" : "=r" (spsel) : : "memory");
+
+ return spsel;
+}
+
+void raw_write_spsel(uint32_t spsel)
+{
+ __asm__ __volatile__("msr SPSel, %0\n\t" : : "r" (spsel) : "memory");
+}
+
+uint64_t raw_read_sp_el3(void)
+{
+ uint64_t sp_el3;
+ uint32_t spsel;
+
+ spsel = raw_read_spsel();
+ if (!spsel)
+ raw_write_spsel(1);
+
+ __asm__ __volatile__("mov %0, sp\n\t" : "=r" (sp_el3) : : "memory");
+
+ if (!spsel)
+ raw_write_spsel(spsel);
+
+ return sp_el3;
+}
+
+void raw_write_sp_el3(uint64_t sp_el3)
+{
+ uint32_t spsel;
+
+ spsel = raw_read_spsel();
+ if (!spsel)
+ raw_write_spsel(1);
+
+ __asm__ __volatile__("mov sp, %0\n\t" : "=r" (sp_el3) : : "memory");
+
+ if (!spsel)
+ raw_write_spsel(spsel);
+}
+
+/* SPSR */
+uint32_t raw_read_spsr_abt(void)
+{
+ uint32_t spsr_abt;
+
+ __asm__ __volatile__("mrs %0, SPSR_abt\n\t" : "=r" (spsr_abt) : : "memory");
+
+ return spsr_abt;
+}
+
+void raw_write_spsr_abt(uint32_t spsr_abt)
+{
+ __asm__ __volatile__("msr SPSR_abt, %0\n\t" : : "r" (spsr_abt) : "memory");
+}
+
+uint32_t raw_read_spsr_el1(void)
+{
+ uint32_t spsr_el1;
+
+ __asm__ __volatile__("mrs %0, SPSR_EL1\n\t" : "=r" (spsr_el1) : : "memory");
+
+ return spsr_el1;
+}
+
+void raw_write_spsr_el1(uint32_t spsr_el1)
+{
+ __asm__ __volatile__("msr SPSR_EL1, %0\n\t" : : "r" (spsr_el1) : "memory");
+}
+
+uint32_t raw_read_spsr_el2(void)
+{
+ uint32_t spsr_el2;
+
+ __asm__ __volatile__("mrs %0, SPSR_EL2\n\t" : "=r" (spsr_el2) : : "memory");
+
+ return spsr_el2;
+}
+
+void raw_write_spsr_el2(uint32_t spsr_el2)
+{
+ __asm__ __volatile__("msr SPSR_EL2, %0\n\t" : : "r" (spsr_el2) : "memory");
+}
+
+uint32_t raw_read_spsr_el3(void)
+{
+ uint32_t spsr_el3;
+
+ __asm__ __volatile__("mrs %0, SPSR_EL3\n\t" : "=r" (spsr_el3) : : "memory");
+
+ return spsr_el3;
+}
+
+void raw_write_spsr_el3(uint32_t spsr_el3)
+{
+ __asm__ __volatile__("msr SPSR_EL3, %0\n\t" : : "r" (spsr_el3) : "memory");
+}
+
+uint32_t raw_read_spsr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_spsr,spsr,uint32_t);
+}
+
+void raw_write_spsr_current(uint32_t spsr)
+{
+ SWITCH_CASE_WRITE(raw_write_spsr,spsr);
+}
+
+uint32_t raw_read_spsr_fiq(void)
+{
+ uint32_t spsr_fiq;
+
+ __asm__ __volatile__("mrs %0, SPSR_fiq\n\t" : "=r" (spsr_fiq) : : "memory");
+
+ return spsr_fiq;
+}
+
+void raw_write_spsr_fiq(uint32_t spsr_fiq)
+{
+ __asm__ __volatile__("msr SPSR_fiq, %0\n\t" : : "r" (spsr_fiq) : "memory");
+}
+
+uint32_t raw_read_spsr_irq(void)
+{
+ uint32_t spsr_irq;
+
+ __asm__ __volatile__("mrs %0, SPSR_irq\n\t" : "=r" (spsr_irq) : : "memory");
+
+ return spsr_irq;
+}
+
+void raw_write_spsr_irq(uint32_t spsr_irq)
+{
+ __asm__ __volatile__("msr SPSR_irq, %0\n\t" : : "r" (spsr_irq) : "memory");
+}
+
+uint32_t raw_read_spsr_und(void)
+{
+ uint32_t spsr_und;
+
+ __asm__ __volatile__("mrs %0, SPSR_und\n\t" : "=r" (spsr_und) : : "memory");
+
+ return spsr_und;
+}
+
+void raw_write_spsr_und(uint32_t spsr_und)
+{
+ __asm__ __volatile__("msr SPSR_und, %0\n\t" : : "r" (spsr_und) : "memory");
+}
+
diff --git a/payloads/libpayload/arch/arm64/lib/sysctrl.c b/payloads/libpayload/arch/arm64/lib/sysctrl.c
new file mode 100644
index 0000000000..7e06e29658
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/sysctrl.c
@@ -0,0 +1,879 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ * sysctrl.c: This file defines all the library functions for accessing system
+ * control registers in Aarch64
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+/* ACTLR */
+uint32_t raw_read_actlr_el1(void)
+{
+ uint32_t actlr_el1;
+
+ __asm__ __volatile__("mrs %0, ACTLR_EL1\n\t" : "=r" (actlr_el1) : : "memory");
+
+ return actlr_el1;
+}
+
+void raw_write_actlr_el1(uint32_t actlr_el1)
+{
+ __asm__ __volatile__("msr ACTLR_EL1, %0\n\t" : : "r" (actlr_el1) : "memory");
+}
+
+uint32_t raw_read_actlr_el2(void)
+{
+ uint32_t actlr_el2;
+
+ __asm__ __volatile__("mrs %0, ACTLR_EL2\n\t" : "=r" (actlr_el2) : : "memory");
+
+ return actlr_el2;
+}
+
+void raw_write_actlr_el2(uint32_t actlr_el2)
+{
+ __asm__ __volatile__("msr ACTLR_EL2, %0\n\t" : : "r" (actlr_el2) : "memory");
+}
+
+uint32_t raw_read_actlr_el3(void)
+{
+ uint32_t actlr_el3;
+
+ __asm__ __volatile__("mrs %0, ACTLR_EL3\n\t" : "=r" (actlr_el3) : : "memory");
+
+ return actlr_el3;
+}
+
+void raw_write_actlr_el3(uint32_t actlr_el3)
+{
+ __asm__ __volatile__("msr ACTLR_EL3, %0\n\t" : : "r" (actlr_el3) : "memory");
+}
+
+uint32_t raw_read_actlr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_actlr,actlr,uint32_t);
+}
+
+void raw_write_actlr_current(uint32_t actlr)
+{
+ SWITCH_CASE_WRITE(raw_write_actlr,actlr);
+}
+
+/* AFSR0 */
+uint32_t raw_read_afsr0_el1(void)
+{
+ uint32_t afsr0_el1;
+
+ __asm__ __volatile__("mrs %0, AFSR0_EL1\n\t" : "=r" (afsr0_el1) : : "memory");
+
+ return afsr0_el1;
+}
+
+void raw_write_afsr0_el1(uint32_t afsr0_el1)
+{
+ __asm__ __volatile__("msr AFSR0_EL1, %0\n\t" : : "r" (afsr0_el1) : "memory");
+}
+
+uint32_t raw_read_afsr0_el2(void)
+{
+ uint32_t afsr0_el2;
+
+ __asm__ __volatile__("mrs %0, AFSR0_EL2\n\t" : "=r" (afsr0_el2) : : "memory");
+
+ return afsr0_el2;
+}
+
+void raw_write_afsr0_el2(uint32_t afsr0_el2)
+{
+ __asm__ __volatile__("msr AFSR0_EL2, %0\n\t" : : "r" (afsr0_el2) : "memory");
+}
+
+uint32_t raw_read_afsr0_el3(void)
+{
+ uint32_t afsr0_el3;
+
+ __asm__ __volatile__("mrs %0, AFSR0_EL3\n\t" : "=r" (afsr0_el3) : : "memory");
+
+ return afsr0_el3;
+}
+
+void raw_write_afsr0_el3(uint32_t afsr0_el3)
+{
+ __asm__ __volatile__("msr AFSR0_EL3, %0\n\t" : : "r" (afsr0_el3) : "memory");
+}
+
+uint32_t raw_read_afsr0_current(void)
+{
+ SWITCH_CASE_READ(raw_read_afsr0,afsr0,uint32_t);
+}
+
+void raw_write_afsr0_current(uint32_t afsr0)
+{
+ SWITCH_CASE_WRITE(raw_write_afsr0,afsr0);
+}
+
+/* AFSR1 */
+uint32_t raw_read_afsr1_el1(void)
+{
+ uint32_t afsr1_el1;
+
+ __asm__ __volatile__("mrs %0, AFSR1_EL1\n\t" : "=r" (afsr1_el1) : : "memory");
+
+ return afsr1_el1;
+}
+
+void raw_write_afsr1_el1(uint32_t afsr1_el1)
+{
+ __asm__ __volatile__("msr AFSR1_EL1, %0\n\t" : : "r" (afsr1_el1) : "memory");
+}
+
+uint32_t raw_read_afsr1_el2(void)
+{
+ uint32_t afsr1_el2;
+
+ __asm__ __volatile__("mrs %0, AFSR1_EL2\n\t" : "=r" (afsr1_el2) : : "memory");
+
+ return afsr1_el2;
+}
+
+void raw_write_afsr1_el2(uint32_t afsr1_el2)
+{
+ __asm__ __volatile__("msr AFSR1_EL2, %0\n\t" : : "r" (afsr1_el2) : "memory");
+}
+
+uint32_t raw_read_afsr1_el3(void)
+{
+ uint32_t afsr1_el3;
+
+ __asm__ __volatile__("mrs %0, AFSR1_EL3\n\t" : "=r" (afsr1_el3) : : "memory");
+
+ return afsr1_el3;
+}
+
+void raw_write_afsr1_el3(uint32_t afsr1_el3)
+{
+ __asm__ __volatile__("msr AFSR1_EL3, %0\n\t" : : "r" (afsr1_el3) : "memory");
+}
+
+uint32_t raw_read_afsr1_current(void)
+{
+ SWITCH_CASE_READ(raw_read_afsr1,afsr1,uint32_t);
+}
+
+void raw_write_afsr1_current(uint32_t afsr1)
+{
+ SWITCH_CASE_WRITE(raw_write_afsr1,afsr1);
+}
+
+/* AIDR */
+uint32_t raw_read_aidr_el1(void)
+{
+ uint32_t aidr_el1;
+
+ __asm__ __volatile__("mrs %0, AIDR_EL1\n\t" : "=r" (aidr_el1) : : "memory");
+
+ return aidr_el1;
+}
+
+/* AMAIR */
+uint64_t raw_read_amair_el1(void)
+{
+ uint64_t amair_el1;
+
+ __asm__ __volatile__("mrs %0, AMAIR_EL1\n\t" : "=r" (amair_el1) : : "memory");
+
+ return amair_el1;
+}
+
+void raw_write_amair_el1(uint64_t amair_el1)
+{
+ __asm__ __volatile__("msr AMAIR_EL1, %0\n\t" : : "r" (amair_el1) : "memory");
+}
+
+uint64_t raw_read_amair_el2(void)
+{
+ uint64_t amair_el2;
+
+ __asm__ __volatile__("mrs %0, AMAIR_EL2\n\t" : "=r" (amair_el2) : : "memory");
+
+ return amair_el2;
+}
+
+void raw_write_amair_el2(uint64_t amair_el2)
+{
+ __asm__ __volatile__("msr AMAIR_EL2, %0\n\t" : : "r" (amair_el2) : "memory");
+}
+
+uint64_t raw_read_amair_el3(void)
+{
+ uint64_t amair_el3;
+
+ __asm__ __volatile__("mrs %0, AMAIR_EL3\n\t" : "=r" (amair_el3) : : "memory");
+
+ return amair_el3;
+}
+
+void raw_write_amair_el3(uint64_t amair_el3)
+{
+ __asm__ __volatile__("msr AMAIR_EL3, %0\n\t" : : "r" (amair_el3) : "memory");
+}
+
+uint64_t raw_read_amair_current(void)
+{
+ SWITCH_CASE_READ(raw_read_amair,amair,uint64_t);
+}
+
+void raw_write_amair_current(uint64_t amair)
+{
+ SWITCH_CASE_WRITE(raw_write_amair,amair);
+}
+
+/* CCSIDR */
+uint32_t raw_read_ccsidr_el1(void)
+{
+ uint32_t ccsidr_el1;
+
+ __asm__ __volatile__("mrs %0, CCSIDR_EL1\n\t" : "=r" (ccsidr_el1) : : "memory");
+
+ return ccsidr_el1;
+}
+
+/* CLIDR */
+uint32_t raw_read_clidr_el1(void)
+{
+ uint32_t clidr_el1;
+
+ __asm__ __volatile__("mrs %0, CLIDR_EL1\n\t" : "=r" (clidr_el1) : : "memory");
+
+ return clidr_el1;
+}
+
+/* CPACR */
+uint32_t raw_read_cpacr_el1(void)
+{
+ uint32_t cpacr_el1;
+
+ __asm__ __volatile__("mrs %0, CPACR_EL1\n\t" : "=r" (cpacr_el1) : : "memory");
+
+ return cpacr_el1;
+}
+
+void raw_write_cpacr_el1(uint32_t cpacr_el1)
+{
+ __asm__ __volatile__("msr CPACR_EL1, %0\n\t" : : "r" (cpacr_el1) : "memory");
+}
+
+/* CPTR */
+uint32_t raw_read_cptr_el2(void)
+{
+ uint32_t cptr_el2;
+
+ __asm__ __volatile__("mrs %0, CPTR_EL2\n\t" : "=r" (cptr_el2) : : "memory");
+
+ return cptr_el2;
+}
+
+void raw_write_cptr_el2(uint32_t cptr_el2)
+{
+ __asm__ __volatile__("msr CPTR_EL2, %0\n\t" : : "r" (cptr_el2) : "memory");
+}
+
+uint32_t raw_read_cptr_el3(void)
+{
+ uint32_t cptr_el3;
+
+ __asm__ __volatile__("mrs %0, CPTR_EL3\n\t" : "=r" (cptr_el3) : : "memory");
+
+ return cptr_el3;
+}
+
+void raw_write_cptr_el3(uint32_t cptr_el3)
+{
+ __asm__ __volatile__("msr CPTR_EL3, %0\n\t" : : "r" (cptr_el3) : "memory");
+}
+
+/* CSSELR */
+uint32_t raw_read_csselr_el1(void)
+{
+ uint32_t csselr_el1;
+
+ __asm__ __volatile__("mrs %0, CSSELR_EL1\n\t" : "=r" (csselr_el1) : : "memory");
+
+ return csselr_el1;
+}
+
+void raw_write_csselr_el1(uint32_t csselr_el1)
+{
+ __asm__ __volatile__("msr CSSELR_EL1, %0\n\t" : : "r" (csselr_el1) : "memory");
+}
+
+/* CTR */
+uint32_t raw_read_ctr_el0(void)
+{
+ uint32_t ctr_el0;
+
+ __asm__ __volatile__("mrs %0, CTR_EL0\n\t" : "=r" (ctr_el0) : : "memory");
+
+ return ctr_el0;
+}
+
+/* ESR */
+uint32_t raw_read_esr_el1(void)
+{
+ uint32_t esr_el1;
+
+ __asm__ __volatile__("mrs %0, ESR_EL1\n\t" : "=r" (esr_el1) : : "memory");
+
+ return esr_el1;
+}
+
+void raw_write_esr_el1(uint32_t esr_el1)
+{
+ __asm__ __volatile__("msr ESR_EL1, %0\n\t" : : "r" (esr_el1) : "memory");
+}
+
+uint32_t raw_read_esr_el2(void)
+{
+ uint32_t esr_el2;
+
+ __asm__ __volatile__("mrs %0, ESR_EL2\n\t" : "=r" (esr_el2) : : "memory");
+
+ return esr_el2;
+}
+
+void raw_write_esr_el2(uint32_t esr_el2)
+{
+ __asm__ __volatile__("msr ESR_EL2, %0\n\t" : : "r" (esr_el2) : "memory");
+}
+
+uint32_t raw_read_esr_el3(void)
+{
+ uint32_t esr_el3;
+
+ __asm__ __volatile__("mrs %0, ESR_EL3\n\t" : "=r" (esr_el3) : : "memory");
+
+ return esr_el3;
+}
+
+void raw_write_esr_el3(uint32_t esr_el3)
+{
+ __asm__ __volatile__("msr ESR_EL3, %0\n\t" : : "r" (esr_el3) : "memory");
+}
+
+uint32_t raw_read_esr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_esr,esr,uint32_t);
+}
+
+void raw_write_esr_current(uint32_t esr)
+{
+ SWITCH_CASE_WRITE(raw_write_esr,esr);
+}
+
+/* FAR */
+uint64_t raw_read_far_el1(void)
+{
+ uint64_t far_el1;
+
+ __asm__ __volatile__("mrs %0, FAR_EL1\n\t" : "=r" (far_el1) : : "memory");
+
+ return far_el1;
+}
+
+void raw_write_far_el1(uint64_t far_el1)
+{
+ __asm__ __volatile__("msr FAR_EL1, %0\n\t" : : "r" (far_el1) : "memory");
+}
+
+uint64_t raw_read_far_el2(void)
+{
+ uint64_t far_el2;
+
+ __asm__ __volatile__("mrs %0, FAR_EL2\n\t" : "=r" (far_el2) : : "memory");
+
+ return far_el2;
+}
+
+void raw_write_far_el2(uint64_t far_el2)
+{
+ __asm__ __volatile__("msr FAR_EL2, %0\n\t" : : "r" (far_el2) : "memory");
+}
+
+uint64_t raw_read_far_el3(void)
+{
+ uint64_t far_el3;
+
+ __asm__ __volatile__("mrs %0, FAR_EL3\n\t" : "=r" (far_el3) : : "memory");
+
+ return far_el3;
+}
+
+void raw_write_far_el3(uint64_t far_el3)
+{
+ __asm__ __volatile__("msr FAR_EL3, %0\n\t" : : "r" (far_el3) : "memory");
+}
+
+uint64_t raw_read_far_current(void)
+{
+ SWITCH_CASE_READ(raw_read_far,far,uint64_t);
+}
+
+void raw_write_far_current(uint64_t far)
+{
+ SWITCH_CASE_WRITE(raw_write_far,far);
+}
+
+/* HCR */
+uint64_t raw_read_hcr_el2(void)
+{
+ uint64_t hcr_el2;
+
+ __asm__ __volatile__("mrs %0, HCR_EL2\n\t" : "=r" (hcr_el2) : : "memory");
+
+ return hcr_el2;
+}
+
+void raw_write_hcr_el2(uint64_t hcr_el2)
+{
+ __asm__ __volatile__("msr HCR_EL2, %0\n\t" : : "r" (hcr_el2) : "memory");
+}
+
+/* AA64PFR0 */
+uint64_t raw_read_aa64pfr0_el1(void)
+{
+ uint64_t aa64pfr0_el1;
+
+ __asm__ __volatile__("mrs %0, ID_AA64PFR0_EL1\n\t" : "=r" (aa64pfr0_el1) : : "memory");
+
+ return aa64pfr0_el1;
+}
+
+/* MAIR */
+uint64_t raw_read_mair_el1(void)
+{
+ uint64_t mair_el1;
+
+ __asm__ __volatile__("mrs %0, MAIR_EL1\n\t" : "=r" (mair_el1) : : "memory");
+
+ return mair_el1;
+}
+
+void raw_write_mair_el1(uint64_t mair_el1)
+{
+ __asm__ __volatile__("msr MAIR_EL1, %0\n\t" : : "r" (mair_el1) : "memory");
+}
+
+uint64_t raw_read_mair_el2(void)
+{
+ uint64_t mair_el2;
+
+ __asm__ __volatile__("mrs %0, MAIR_EL2\n\t" : "=r" (mair_el2) : : "memory");
+
+ return mair_el2;
+}
+
+void raw_write_mair_el2(uint64_t mair_el2)
+{
+ __asm__ __volatile__("msr MAIR_EL2, %0\n\t" : : "r" (mair_el2) : "memory");
+}
+
+uint64_t raw_read_mair_el3(void)
+{
+ uint64_t mair_el3;
+
+ __asm__ __volatile__("mrs %0, MAIR_EL3\n\t" : "=r" (mair_el3) : : "memory");
+
+ return mair_el3;
+}
+
+void raw_write_mair_el3(uint64_t mair_el3)
+{
+ __asm__ __volatile__("msr MAIR_EL3, %0\n\t" : : "r" (mair_el3) : "memory");
+}
+
+uint64_t raw_read_mair_current(void)
+{
+ SWITCH_CASE_READ(raw_read_mair,mair,uint64_t);
+}
+
+void raw_write_mair_current(uint64_t mair)
+{
+ SWITCH_CASE_WRITE(raw_write_mair,mair);
+}
+
+/* MPIDR */
+uint64_t raw_read_mpidr_el1(void)
+{
+ uint64_t mpidr_el1;
+
+ __asm__ __volatile__("mrs %0, MPIDR_EL1\n\t" : "=r" (mpidr_el1) : : "memory");
+
+ return mpidr_el1;
+}
+
+/* RMR */
+uint32_t raw_read_rmr_el1(void)
+{
+ uint32_t rmr_el1;
+
+ __asm__ __volatile__("mrs %0, RMR_EL1\n\t" : "=r" (rmr_el1) : : "memory");
+
+ return rmr_el1;
+}
+
+void raw_write_rmr_el1(uint32_t rmr_el1)
+{
+ __asm__ __volatile__("msr RMR_EL1, %0\n\t" : : "r" (rmr_el1) : "memory");
+}
+
+uint32_t raw_read_rmr_el2(void)
+{
+ uint32_t rmr_el2;
+
+ __asm__ __volatile__("mrs %0, RMR_EL2\n\t" : "=r" (rmr_el2) : : "memory");
+
+ return rmr_el2;
+}
+
+void raw_write_rmr_el2(uint32_t rmr_el2)
+{
+ __asm__ __volatile__("msr RMR_EL2, %0\n\t" : : "r" (rmr_el2) : "memory");
+}
+
+uint32_t raw_read_rmr_el3(void)
+{
+ uint32_t rmr_el3;
+
+ __asm__ __volatile__("mrs %0, RMR_EL3\n\t" : "=r" (rmr_el3) : : "memory");
+
+ return rmr_el3;
+}
+
+void raw_write_rmr_el3(uint32_t rmr_el3)
+{
+ __asm__ __volatile__("msr RMR_EL3, %0\n\t" : : "r" (rmr_el3) : "memory");
+}
+
+uint32_t raw_read_rmr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_rmr,rmr,uint32_t);
+}
+
+void raw_write_rmr_current(uint32_t rmr)
+{
+ SWITCH_CASE_WRITE(raw_write_rmr,rmr);
+}
+
+/* RVBAR */
+uint64_t raw_read_rvbar_el1(void)
+{
+ uint64_t rvbar_el1;
+
+ __asm__ __volatile__("mrs %0, RVBAR_EL1\n\t" : "=r" (rvbar_el1) : : "memory");
+
+ return rvbar_el1;
+}
+
+void raw_write_rvbar_el1(uint64_t rvbar_el1)
+{
+ __asm__ __volatile__("msr RVBAR_EL1, %0\n\t" : : "r" (rvbar_el1) : "memory");
+}
+
+uint64_t raw_read_rvbar_el2(void)
+{
+ uint64_t rvbar_el2;
+
+ __asm__ __volatile__("mrs %0, RVBAR_EL2\n\t" : "=r" (rvbar_el2) : : "memory");
+
+ return rvbar_el2;
+}
+
+void raw_write_rvbar_el2(uint64_t rvbar_el2)
+{
+ __asm__ __volatile__("msr RVBAR_EL2, %0\n\t" : : "r" (rvbar_el2) : "memory");
+}
+
+uint64_t raw_read_rvbar_el3(void)
+{
+ uint64_t rvbar_el3;
+
+ __asm__ __volatile__("mrs %0, RVBAR_EL3\n\t" : "=r" (rvbar_el3) : : "memory");
+
+ return rvbar_el3;
+}
+
+void raw_write_rvbar_el3(uint64_t rvbar_el3)
+{
+ __asm__ __volatile__("msr RVBAR_EL3, %0\n\t" : : "r" (rvbar_el3) : "memory");
+}
+
+uint64_t raw_read_rvbar_current(void)
+{
+ SWITCH_CASE_READ(raw_read_rvbar,rvbar,uint64_t);
+}
+
+void raw_write_rvbar_current(uint64_t rvbar)
+{
+ SWITCH_CASE_WRITE(raw_write_rvbar,rvbar);
+}
+
+/* SCR */
+uint32_t raw_read_scr_el3(void)
+{
+ uint32_t scr_el3;
+
+ __asm__ __volatile__("mrs %0, SCR_EL3\n\t" : "=r" (scr_el3) : : "memory");
+
+ return scr_el3;
+}
+
+void raw_write_scr_el3(uint32_t scr_el3)
+{
+ __asm__ __volatile__("msr SCR_EL3, %0\n\t" : : "r" (scr_el3) : "memory");
+}
+
+/* SCTLR */
+uint32_t raw_read_sctlr_el1(void)
+{
+ uint32_t sctlr_el1;
+
+ __asm__ __volatile__("mrs %0, SCTLR_EL1\n\t" : "=r" (sctlr_el1) : : "memory");
+
+ return sctlr_el1;
+}
+
+void raw_write_sctlr_el1(uint32_t sctlr_el1)
+{
+ __asm__ __volatile__("msr SCTLR_EL1, %0\n\t" : : "r" (sctlr_el1) : "memory");
+}
+
+uint32_t raw_read_sctlr_el2(void)
+{
+ uint32_t sctlr_el2;
+
+ __asm__ __volatile__("mrs %0, SCTLR_EL2\n\t" : "=r" (sctlr_el2) : : "memory");
+
+ return sctlr_el2;
+}
+
+void raw_write_sctlr_el2(uint32_t sctlr_el2)
+{
+ __asm__ __volatile__("msr SCTLR_EL2, %0\n\t" : : "r" (sctlr_el2) : "memory");
+}
+
+uint32_t raw_read_sctlr_el3(void)
+{
+ uint32_t sctlr_el3;
+
+ __asm__ __volatile__("mrs %0, SCTLR_EL3\n\t" : "=r" (sctlr_el3) : : "memory");
+
+ return sctlr_el3;
+}
+
+void raw_write_sctlr_el3(uint32_t sctlr_el3)
+{
+ __asm__ __volatile__("msr SCTLR_EL3, %0\n\t" : : "r" (sctlr_el3) : "memory");
+}
+
+uint32_t raw_read_sctlr_current(void)
+{
+ SWITCH_CASE_READ(raw_read_sctlr,sctlr,uint32_t);
+}
+
+void raw_write_sctlr_current(uint32_t sctlr)
+{
+ SWITCH_CASE_WRITE(raw_write_sctlr,sctlr);
+}
+
+/* TCR */
+uint64_t raw_read_tcr_el1(void)
+{
+ uint64_t tcr_el1;
+
+ __asm__ __volatile__("mrs %0, TCR_EL1\n\t" : "=r" (tcr_el1) : : "memory");
+
+ return tcr_el1;
+}
+
+void raw_write_tcr_el1(uint64_t tcr_el1)
+{
+ __asm__ __volatile__("msr TCR_EL1, %0\n\t" : : "r" (tcr_el1) : "memory");
+}
+
+uint32_t raw_read_tcr_el2(void)
+{
+ uint32_t tcr_el2;
+
+ __asm__ __volatile__("mrs %0, TCR_EL2\n\t" : "=r" (tcr_el2) : : "memory");
+
+ return tcr_el2;
+}
+
+void raw_write_tcr_el2(uint32_t tcr_el2)
+{
+ __asm__ __volatile__("msr TCR_EL2, %0\n\t" : : "r" (tcr_el2) : "memory");
+}
+
+uint32_t raw_read_tcr_el3(void)
+{
+ uint32_t tcr_el3;
+
+ __asm__ __volatile__("mrs %0, TCR_EL3\n\t" : "=r" (tcr_el3) : : "memory");
+
+ return tcr_el3;
+}
+
+void raw_write_tcr_el3(uint32_t tcr_el3)
+{
+ __asm__ __volatile__("msr TCR_EL3, %0\n\t" : : "r" (tcr_el3) : "memory");
+}
+
+/* TTBR0 */
+uint64_t raw_read_ttbr0_el1(void)
+{
+ uint64_t ttbr0_el1;
+
+ __asm__ __volatile__("mrs %0, TTBR0_EL1\n\t" : "=r" (ttbr0_el1) : : "memory");
+
+ return ttbr0_el1;
+}
+
+void raw_write_ttbr0_el1(uint64_t ttbr0_el1)
+{
+ __asm__ __volatile__("msr TTBR0_EL1, %0\n\t" : : "r" (ttbr0_el1) : "memory");
+}
+
+uint64_t raw_read_ttbr0_el2(void)
+{
+ uint64_t ttbr0_el2;
+
+ __asm__ __volatile__("mrs %0, TTBR0_EL2\n\t" : "=r" (ttbr0_el2) : : "memory");
+
+ return ttbr0_el2;
+}
+
+void raw_write_ttbr0_el2(uint64_t ttbr0_el2)
+{
+ __asm__ __volatile__("msr TTBR0_EL2, %0\n\t" : : "r" (ttbr0_el2) : "memory");
+}
+
+uint64_t raw_read_ttbr0_el3(void)
+{
+ uint64_t ttbr0_el3;
+
+ __asm__ __volatile__("mrs %0, TTBR0_EL3\n\t" : "=r" (ttbr0_el3) : : "memory");
+
+ return ttbr0_el3;
+}
+
+void raw_write_ttbr0_el3(uint64_t ttbr0_el3)
+{
+ __asm__ __volatile__("msr TTBR0_EL3, %0\n\t" : : "r" (ttbr0_el3) : "memory");
+}
+
+uint64_t raw_read_ttbr0_current(void)
+{
+ SWITCH_CASE_READ(raw_read_ttbr0,ttbr0,uint64_t);
+}
+
+void raw_write_ttbr0_current(uint64_t ttbr0)
+{
+ SWITCH_CASE_WRITE(raw_write_ttbr0,ttbr0);
+}
+
+/* TTBR1 */
+uint64_t raw_read_ttbr1_el1(void)
+{
+ uint64_t ttbr1_el1;
+
+ __asm__ __volatile__("mrs %0, TTBR1_EL1\n\t" : "=r" (ttbr1_el1) : : "memory");
+
+ return ttbr1_el1;
+}
+
+void raw_write_ttbr1_el1(uint64_t ttbr1_el1)
+{
+ __asm__ __volatile__("msr TTBR1_EL1, %0\n\t" : : "r" (ttbr1_el1) : "memory");
+}
+
+/* VBAR */
+uint64_t raw_read_vbar_el1(void)
+{
+ uint64_t vbar_el1;
+
+ __asm__ __volatile__("mrs %0, VBAR_EL1\n\t" : "=r" (vbar_el1) : : "memory");
+
+ return vbar_el1;
+}
+
+void raw_write_vbar_el1(uint64_t vbar_el1)
+{
+ __asm__ __volatile__("msr VBAR_EL1, %0\n\t" : : "r" (vbar_el1) : "memory");
+}
+
+uint64_t raw_read_vbar_el2(void)
+{
+ uint64_t vbar_el2;
+
+ __asm__ __volatile__("mrs %0, VBAR_EL2\n\t" : "=r" (vbar_el2) : : "memory");
+
+ return vbar_el2;
+}
+
+void raw_write_vbar_el2(uint64_t vbar_el2)
+{
+ __asm__ __volatile__("msr VBAR_EL2, %0\n\t" : : "r" (vbar_el2) : "memory");
+}
+
+uint64_t raw_read_vbar_el3(void)
+{
+ uint64_t vbar_el3;
+
+ __asm__ __volatile__("mrs %0, VBAR_EL3\n\t" : "=r" (vbar_el3) : : "memory");
+
+ return vbar_el3;
+}
+
+void raw_write_vbar_el3(uint64_t vbar_el3)
+{
+ __asm__ __volatile__("msr VBAR_EL3, %0\n\t" : : "r" (vbar_el3) : "memory");
+}
+
+uint64_t raw_read_vbar_current(void)
+{
+ SWITCH_CASE_READ(raw_read_vbar,vbar,uint64_t);
+}
+
+void raw_write_vbar_current(uint64_t vbar)
+{
+ SWITCH_CASE_WRITE(raw_write_vbar,vbar);
+}
diff --git a/payloads/libpayload/arch/arm64/lib/tlb.c b/payloads/libpayload/arch/arm64/lib/tlb.c
new file mode 100644
index 0000000000..d5afc1796b
--- /dev/null
+++ b/payloads/libpayload/arch/arm64/lib/tlb.c
@@ -0,0 +1,83 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * tlb.c: System intructions for TLB maintenance.
+ * Reference: ARM Architecture Reference Manual, ARMv8-A edition
+ */
+
+#include <stdint.h>
+
+#include <arch/lib_helpers.h>
+
+/* TLBIALL */
+void tlbiall_el1(void)
+{
+ __asm__ __volatile__("tlbi alle1\n\t" : : : "memory");
+}
+
+void tlbiall_el2(void)
+{
+ __asm__ __volatile__("tlbi alle2\n\t" : : : "memory");
+}
+
+void tlbiall_el3(void)
+{
+ __asm__ __volatile__("tlbi alle3\n\t" : : : "memory");
+}
+
+void tlbiall_current(void)
+{
+ SWITCH_CASE_TLBI(tlbiall);
+}
+
+/* TLBIALLIS */
+void tlbiallis_el1(void)
+{
+ __asm__ __volatile__("tlbi alle1is\n\t" : : : "memory");
+}
+
+void tlbiallis_el2(void)
+{
+ __asm__ __volatile__("tlbi alle2is\n\t" : : : "memory");
+}
+
+void tlbiallis_el3(void)
+{
+ __asm__ __volatile__("tlbi alle3is\n\t" : : : "memory");
+}
+
+void tlbiallis_current(void)
+{
+ SWITCH_CASE_TLBI(tlbiallis);
+}
+
+/* TLBIVAA */
+void tlbivaa_el1(uint64_t va)
+{
+ __asm__ __volatile__("tlbi vaae1, %0\n\t" : : "r" (va) : "memory");
+}
diff --git a/payloads/libpayload/include/arm64/arch/cache.h b/payloads/libpayload/include/arm64/arch/cache.h
index f03d09b9f9..5a0b3b03ba 100644
--- a/payloads/libpayload/include/arm64/arch/cache.h
+++ b/payloads/libpayload/include/arm64/arch/cache.h
@@ -68,132 +68,6 @@
/* Bit 31 is reserved */
/*
- * Sync primitives
- */
-/* data memory barrier */
-#define dmb_opt(opt) asm volatile ("dmb " #opt : : : "memory")
-/* data sync barrier */
-#define dsb_opt(opt) asm volatile ("dsb " #opt : : : "memory")
-/* instruction sync barrier */
-#define isb_opt(opt) asm volatile ("isb " #opt : : : "memory")
-
-#define dmb() dmb_opt(sy)
-#define dsb() dsb_opt(sy)
-#define isb() isb_opt()
-
-/*
- * Low-level TLB maintenance operations
- */
-
-/* invalidate entire unified TLB */
-static inline void tlbiall_el3(void)
-{
- asm volatile ("tlbi alle3" : : : "memory");
-}
-
-/* invalidate unified TLB by VA, all ASID */
-static inline void tlbivaa(unsigned long va)
-{
- asm volatile ("tlbi vaae1, %0" : : "r" (va) : "memory");
-}
-
-/*
- * Low-level cache maintenance operations
- */
-
-/* data cache clean and invalidate by VA to PoC */
-static inline void dccivac(unsigned long va)
-{
- asm volatile ("dc civac, %0" : : "r" (va) : "memory");
-}
-
-/* data cache invalidate by set/way */
-static inline void dccisw(uint32_t val)
-{
- asm volatile ("dc cisw, %0" : : "r" (val) : "memory");
-}
-
-/* data cache clean by VA to PoC */
-static inline void dccvac(unsigned long va)
-{
- asm volatile ("dc cvac, %0" : : "r" (va) : "memory");
-}
-
-/* data cache clean by set/way */
-static inline void dccsw(uint32_t val)
-{
- asm volatile ("dc csw, %0" : : "r" (val) : "memory");
-}
-
-/* data cache invalidate by VA to PoC */
-static inline void dcivac(unsigned long va)
-{
- asm volatile ("dc ivac, %0" : : "r" (va) : "memory");
-}
-
-/* data cache invalidate by set/way */
-static inline void dcisw(uint32_t val)
-{
- asm volatile ("dc isw, %0" : : "r" (val) : "memory");
-}
-
-/* instruction cache invalidate all by PoU */
-static inline void iciallu(void)
-{
- asm volatile ("ic iallu" : : "r" (0));
-}
-
-/* read cache level ID register (CLIDR) */
-static inline uint32_t read_clidr(void)
-{
- uint32_t val = 0;
- asm volatile ("mrs %0, clidr_el1" : "=r" (val));
- return val;
-}
-
-/* read cache size ID register register (CCSIDR) */
-static inline uint32_t read_ccsidr(void)
-{
- uint32_t val = 0;
- asm volatile ("mrs %0, ccsidr_el1" : "=r" (val));
- return val;
-}
-
-/* read cache size selection register (CSSELR) */
-static inline uint32_t read_csselr(void)
-{
- uint32_t val = 0;
- asm volatile ("mrs %0, csselr_el1" : "=r" (val));
- return val;
-}
-
-/* write to cache size selection register (CSSELR) */
-static inline void write_csselr(uint32_t val)
-{
- /*
- * Bits [3:1] - Cache level + 1 (0b000 = L1, 0b110 = L7, 0b111 is rsvd)
- * Bit 0 - 0 = data or unified cache, 1 = instruction cache
- */
- asm volatile ("msr csselr_el1, %0" : : "r" (val));
- isb(); /* ISB to sync the change to CCSIDR */
-}
-
-/* read system control register (SCTLR) */
-static inline uint32_t read_sctlr_el3(void)
-{
- uint32_t val;
- asm volatile ("mrs %0, sctlr_el3" : "=r" (val));
- return val;
-}
-
-/* write system control register (SCTLR) */
-static inline void write_sctlr_el3(uint32_t val)
-{
- asm volatile ("msr sctlr_el3, %0" : : "r" (val) : "cc");
- isb();
-}
-
-/*
* Cache maintenance API
*/
diff --git a/payloads/libpayload/include/arm64/arch/io.h b/payloads/libpayload/include/arm64/arch/io.h
index df3a0d51ae..f8be1aec95 100644
--- a/payloads/libpayload/include/arm64/arch/io.h
+++ b/payloads/libpayload/include/arm64/arch/io.h
@@ -33,6 +33,7 @@
#include <stdint.h>
#include <arch/cache.h>
+#include <arch/lib_helpers.h>
static inline uint8_t readb(volatile const void *_a)
{
diff --git a/payloads/libpayload/include/arm64/arch/lib_helpers.h b/payloads/libpayload/include/arm64/arch/lib_helpers.h
new file mode 100644
index 0000000000..f8d95468e1
--- /dev/null
+++ b/payloads/libpayload/include/arm64/arch/lib_helpers.h
@@ -0,0 +1,319 @@
+/*
+ * This file is part of the coreboot project.
+ *
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * lib_helpers.h: All library function prototypes and macros are defined in this
+ * file.
+ */
+
+#ifndef __ARCH_LIB_HELPERS_H__
+#define __ARCH_LIB_HELPERS_H__
+
+#define EL0 0
+#define EL1 1
+#define EL2 2
+#define EL3 3
+
+#define CURRENT_EL_MASK 0x3
+#define CURRENT_EL_SHIFT 2
+
+#define DAIF_DBG_BIT (1<<3)
+#define DAIF_ABT_BIT (1<<2)
+#define DAIF_IRQ_BIT (1<<1)
+#define DAIF_FIQ_BIT (1<<0)
+
+#define SWITCH_CASE_READ(func,var,type) do { \
+ type var = -1; \
+ uint8_t current_el = get_current_el(); \
+ switch(current_el) { \
+ case EL1: \
+ var = func##_el1(); \
+ break; \
+ case EL2: \
+ var = func##_el2(); \
+ break; \
+ case EL3: \
+ var = func##_el3(); \
+ break; \
+ } \
+ return var; \
+ } while(0)
+
+#define SWITCH_CASE_WRITE(func,var) do { \
+ uint8_t current_el = get_current_el(); \
+ switch(current_el) { \
+ case EL1: \
+ func##_el1(var); \
+ break; \
+ case EL2: \
+ func##_el2(var); \
+ break; \
+ case EL3: \
+ func##_el3(var); \
+ break; \
+ } \
+ } while(0)
+
+#define SWITCH_CASE_TLBI(func) do { \
+ uint8_t current_el = get_current_el(); \
+ switch(current_el) { \
+ case EL1: \
+ func##_el1(); \
+ break; \
+ case EL2: \
+ func##_el2(); \
+ break; \
+ case EL3: \
+ func##_el3(); \
+ break; \
+ } \
+ } while(0)
+
+/* PSTATE and special purpose register access functions */
+uint32_t raw_read_current_el(void);
+uint32_t get_current_el(void);
+uint32_t raw_read_daif(void);
+void raw_write_daif(uint32_t daif);
+void enable_debug_exceptions(void);
+void enable_serror_exceptions(void);
+void enable_irq(void);
+void enable_fiq(void);
+void disable_debug_exceptions(void);
+void disable_serror_exceptions(void);
+void disable_irq(void);
+void disable_fiq(void);
+uint64_t raw_read_dlr_el0(void);
+void raw_write_dlr_el0(uint64_t dlr_el0);
+uint64_t raw_read_dspsr_el0(void);
+void raw_write_dspsr_el0(uint64_t dspsr_el0);
+uint64_t raw_read_elr_el1(void);
+void raw_write_elr_el1(uint64_t elr_el1);
+uint64_t raw_read_elr_el2(void);
+void raw_write_elr_el2(uint64_t elr_el2);
+uint64_t raw_read_elr_el3(void);
+void raw_write_elr_el3(uint64_t elr_el3);
+uint64_t raw_read_elr_current(void);
+void raw_write_elr_current(uint64_t elr);
+uint32_t raw_read_fpcr(void);
+void raw_write_fpcr(uint32_t fpcr);
+uint32_t raw_read_fpsr(void);
+void raw_write_fpsr(uint32_t fpsr);
+uint32_t raw_read_nzcv(void);
+void raw_write_nzcv(uint32_t nzcv);
+uint64_t raw_read_sp_el0(void);
+void raw_write_sp_el0(uint64_t sp_el0);
+uint64_t raw_read_sp_el1(void);
+void raw_write_sp_el1(uint64_t sp_el1);
+uint64_t raw_read_sp_el2(void);
+void raw_write_sp_el2(uint64_t sp_el2);
+uint32_t raw_read_spsel(void);
+void raw_write_spsel(uint32_t spsel);
+uint64_t raw_read_sp_el3(void);
+void raw_write_sp_el3(uint64_t sp_el3);
+uint32_t raw_read_spsr_abt(void);
+void raw_write_spsr_abt(uint32_t spsr_abt);
+uint32_t raw_read_spsr_el1(void);
+void raw_write_spsr_el1(uint32_t spsr_el1);
+uint32_t raw_read_spsr_el2(void);
+void raw_write_spsr_el2(uint32_t spsr_el2);
+uint32_t raw_read_spsr_el3(void);
+void raw_write_spsr_el3(uint32_t spsr_el3);
+uint32_t raw_read_spsr_current(void);
+void raw_write_spsr_current(uint32_t spsr);
+uint32_t raw_read_spsr_fiq(void);
+void raw_write_spsr_fiq(uint32_t spsr_fiq);
+uint32_t raw_read_spsr_irq(void);
+void raw_write_spsr_irq(uint32_t spsr_irq);
+uint32_t raw_read_spsr_und(void);
+void raw_write_spsr_und(uint32_t spsr_und);
+
+/* System control register access */
+uint32_t raw_read_actlr_el1(void);
+void raw_write_actlr_el1(uint32_t actlr_el1);
+uint32_t raw_read_actlr_el2(void);
+void raw_write_actlr_el2(uint32_t actlr_el2);
+uint32_t raw_read_actlr_el3(void);
+void raw_write_actlr_el3(uint32_t actlr_el3);
+uint32_t raw_read_actlr_current(void);
+void raw_write_actlr_current(uint32_t actlr);
+uint32_t raw_read_afsr0_el1(void);
+void raw_write_afsr0_el1(uint32_t afsr0_el1);
+uint32_t raw_read_afsr0_el2(void);
+void raw_write_afsr0_el2(uint32_t afsr0_el2);
+uint32_t raw_read_afsr0_el3(void);
+void raw_write_afsr0_el3(uint32_t afsr0_el3);
+uint32_t raw_read_afsr0_current(void);
+void raw_write_afsr0_current(uint32_t afsr0);
+uint32_t raw_read_afsr1_el1(void);
+void raw_write_afsr1_el1(uint32_t afsr1_el1);
+uint32_t raw_read_afsr1_el2(void);
+void raw_write_afsr1_el2(uint32_t afsr1_el2);
+uint32_t raw_read_afsr1_el3(void);
+void raw_write_afsr1_el3(uint32_t afsr1_el3);
+uint32_t raw_read_afsr1_current(void);
+void raw_write_afsr1_current(uint32_t afsr1);
+uint32_t raw_read_aidr_el1(void);
+uint64_t raw_read_amair_el1(void);
+void raw_write_amair_el1(uint64_t amair_el1);
+uint64_t raw_read_amair_el2(void);
+void raw_write_amair_el2(uint64_t amair_el2);
+uint64_t raw_read_amair_el3(void);
+void raw_write_amair_el3(uint64_t amair_el3);
+uint64_t raw_read_amair_current(void);
+void raw_write_amair_current(uint64_t amair);
+uint32_t raw_read_ccsidr_el1(void);
+uint32_t raw_read_clidr_el1(void);
+uint32_t raw_read_cpacr_el1(void);
+void raw_write_cpacr_el1(uint32_t cpacr_el1);
+uint32_t raw_read_cptr_el2(void);
+void raw_write_cptr_el2(uint32_t cptr_el2);
+uint32_t raw_read_cptr_el3(void);
+void raw_write_cptr_el3(uint32_t cptr_el3);
+uint32_t raw_read_cptr_current(void);
+void raw_write_cptr_current(uint32_t cptr);
+uint32_t raw_read_csselr_el1(void);
+void raw_write_csselr_el1(uint32_t csselr_el1);
+uint32_t raw_read_ctr_el0(void);
+uint32_t raw_read_esr_el1(void);
+void raw_write_esr_el1(uint32_t esr_el1);
+uint32_t raw_read_esr_el2(void);
+void raw_write_esr_el2(uint32_t esr_el2);
+uint32_t raw_read_esr_el3(void);
+void raw_write_esr_el3(uint32_t esr_el3);
+uint32_t raw_read_esr_current(void);
+void raw_write_esr_current(uint32_t esr);
+uint64_t raw_read_far_el1(void);
+void raw_write_far_el1(uint64_t far_el1);
+uint64_t raw_read_far_el2(void);
+void raw_write_far_el2(uint64_t far_el2);
+uint64_t raw_read_far_el3(void);
+void raw_write_far_el3(uint64_t far_el3);
+uint64_t raw_read_far_current(void);
+void raw_write_far_current(uint64_t far);
+uint64_t raw_read_hcr_el2(void);
+void raw_write_hcr_el2(uint64_t hcr_el2);
+uint64_t raw_read_aa64pfr0_el1(void);
+uint64_t raw_read_mair_el1(void);
+void raw_write_mair_el1(uint64_t mair_el1);
+uint64_t raw_read_mair_el2(void);
+void raw_write_mair_el2(uint64_t mair_el2);
+uint64_t raw_read_mair_el3(void);
+void raw_write_mair_el3(uint64_t mair_el3);
+uint64_t raw_read_mair_current(void);
+void raw_write_mair_current(uint64_t mair);
+uint64_t raw_read_mpidr_el1(void);
+uint32_t raw_read_rmr_el1(void);
+void raw_write_rmr_el1(uint32_t rmr_el1);
+uint32_t raw_read_rmr_el2(void);
+void raw_write_rmr_el2(uint32_t rmr_el2);
+uint32_t raw_read_rmr_el3(void);
+void raw_write_rmr_el3(uint32_t rmr_el3);
+uint32_t raw_read_rmr_current(void);
+void raw_write_rmr_current(uint32_t rmr);
+uint64_t raw_read_rvbar_el1(void);
+void raw_write_rvbar_el1(uint64_t rvbar_el1);
+uint64_t raw_read_rvbar_el2(void);
+void raw_write_rvbar_el2(uint64_t rvbar_el2);
+uint64_t raw_read_rvbar_el3(void);
+void raw_write_rvbar_el3(uint64_t rvbar_el3);
+uint64_t raw_read_rvbar_current(void);
+void raw_write_rvbar_current(uint64_t rvbar);
+uint32_t raw_read_scr_el3(void);
+void raw_write_scr_el3(uint32_t scr_el3);
+uint32_t raw_read_sctlr_el1(void);
+void raw_write_sctlr_el1(uint32_t sctlr_el1);
+uint32_t raw_read_sctlr_el2(void);
+void raw_write_sctlr_el2(uint32_t sctlr_el2);
+uint32_t raw_read_sctlr_el3(void);
+void raw_write_sctlr_el3(uint32_t sctlr_el3);
+uint32_t raw_read_sctlr_current(void);
+void raw_write_sctlr_current(uint32_t sctlr);
+uint64_t raw_read_tcr_el1(void);
+void raw_write_tcr_el1(uint64_t tcr_el1);
+uint32_t raw_read_tcr_el2(void);
+void raw_write_tcr_el2(uint32_t tcr_el2);
+uint32_t raw_read_tcr_el3(void);
+void raw_write_tcr_el3(uint32_t tcr_el3);
+uint64_t raw_read_ttbr0_el1(void);
+void raw_write_ttbr0_el1(uint64_t ttbr0_el1);
+uint64_t raw_read_ttbr0_el2(void);
+void raw_write_ttbr0_el2(uint64_t ttbr0_el2);
+uint64_t raw_read_ttbr0_el3(void);
+void raw_write_ttbr0_el3(uint64_t ttbr0_el3);
+uint64_t raw_read_ttbr0_current(void);
+void raw_write_ttbr0_current(uint64_t ttbr0);
+uint64_t raw_read_ttbr1_el1(void);
+void raw_write_ttbr1_el1(uint64_t ttbr1_el1);
+uint64_t raw_read_vbar_el1(void);
+void raw_write_vbar_el1(uint64_t vbar_el1);
+uint64_t raw_read_vbar_el2(void);
+void raw_write_vbar_el2(uint64_t vbar_el2);
+uint64_t raw_read_vbar_el3(void);
+void raw_write_vbar_el3(uint64_t vbar_el3);
+uint64_t raw_read_vbar_current(void);
+void raw_write_vbar_current(uint64_t vbar);
+
+/* Cache maintenance system instructions */
+void dccisw(uint64_t cisw);
+void dccivac(uint64_t civac);
+void dccsw(uint64_t csw);
+void dccvac(uint64_t cvac);
+void dccvau(uint64_t cvau);
+void dcisw(uint64_t isw);
+void dcivac(uint64_t ivac);
+void dczva(uint64_t zva);
+void iciallu(void);
+void icialluis(void);
+void icivau(uint64_t ivau);
+
+/* TLB maintenance instructions */
+void tlbiall_el1(void);
+void tlbiall_el2(void);
+void tlbiall_el3(void);
+void tlbiall_current(void);
+void tlbiallis_el1(void);
+void tlbiallis_el2(void);
+void tlbiallis_el3(void);
+void tlbiallis_current(void);
+void tlbivaa_el1(uint64_t va);
+
+/* Memory barrier */
+/* data memory barrier */
+#define dmb_opt(opt) asm volatile ("dmb " #opt : : : "memory")
+/* data sync barrier */
+#define dsb_opt(opt) asm volatile ("dsb " #opt : : : "memory")
+/* instruction sync barrier */
+#define isb_opt(opt) asm volatile ("isb " #opt : : : "memory")
+
+#define dmb() dmb_opt(sy)
+#define dsb() dsb_opt(sy)
+#define isb() isb_opt()
+
+/* Clock */
+void set_cntfrq(uint32_t freq);
+
+#endif //__ARCH_LIB_HELPERS_H__