summaryrefslogtreecommitdiff
path: root/lib/arch
diff options
context:
space:
mode:
Diffstat (limited to 'lib/arch')
-rw-r--r--lib/arch/aarch64/cache_helpers.S233
-rw-r--r--lib/arch/aarch64/misc_helpers.S274
-rw-r--r--lib/arch/aarch64/sysreg_helpers.S1154
-rw-r--r--lib/arch/aarch64/tlb_helpers.S111
4 files changed, 1772 insertions, 0 deletions
diff --git a/lib/arch/aarch64/cache_helpers.S b/lib/arch/aarch64/cache_helpers.S
new file mode 100644
index 00000000..b8a56083
--- /dev/null
+++ b/lib/arch/aarch64/cache_helpers.S
@@ -0,0 +1,233 @@
+/*
+ * Copyright (c) 2013, ARM Limited. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * Neither the name of ARM nor the names of its contributors may be used
+ * to endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <arch_helpers.h>
+#include <asm_macros.S>
+
+ .globl dcisw
+ .globl dccisw
+ .globl dccsw
+ .globl dccvac
+ .globl dcivac
+ .globl dccivac
+ .globl dccvau
+ .globl dczva
+ .globl flush_dcache_range
+ .globl inv_dcache_range
+ .globl dcsw_op_louis
+ .globl dcsw_op_all
+
+ .section .text, "ax"; .align 3
+
+dcisw:; .type dcisw, %function
+ dc isw, x0
+ dsb sy
+ isb
+ ret
+
+
+dccisw:; .type dccisw, %function
+ dc cisw, x0
+ dsb sy
+ isb
+ ret
+
+
+dccsw:; .type dccsw, %function
+ dc csw, x0
+ dsb sy
+ isb
+ ret
+
+
+dccvac:; .type dccvac, %function
+ dc cvac, x0
+ dsb sy
+ isb
+ ret
+
+
+dcivac:; .type dcivac, %function
+ dc ivac, x0
+ dsb sy
+ isb
+ ret
+
+
+dccivac:; .type dccivac, %function
+ dc civac, x0
+ dsb sy
+ isb
+ ret
+
+
+dccvau:; .type dccvau, %function
+ dc cvau, x0
+ dsb sy
+ isb
+ ret
+
+
+dczva:; .type dczva, %function
+ dc zva, x0
+ dsb sy
+ isb
+ ret
+
+
+ /* ------------------------------------------
+ * Clean+Invalidate from base address till
+ * size. 'x0' = addr, 'x1' = size
+ * ------------------------------------------
+ */
+flush_dcache_range:; .type flush_dcache_range, %function
+ dcache_line_size x2, x3
+ add x1, x0, x1
+ sub x3, x2, #1
+ bic x0, x0, x3
+flush_loop:
+ dc civac, x0
+ add x0, x0, x2
+ cmp x0, x1
+ b.lo flush_loop
+ dsb sy
+ ret
+
+
+ /* ------------------------------------------
+ * Invalidate from base address till
+ * size. 'x0' = addr, 'x1' = size
+ * ------------------------------------------
+ */
+inv_dcache_range:; .type inv_dcache_range, %function
+ dcache_line_size x2, x3
+ add x1, x0, x1
+ sub x3, x2, #1
+ bic x0, x0, x3
+inv_loop:
+ dc ivac, x0
+ add x0, x0, x2
+ cmp x0, x1
+ b.lo inv_loop
+ dsb sy
+ ret
+
+
+ /* ------------------------------------------
+ * Data cache operations by set/way to the
+ * level specified
+ * ------------------------------------------
+ * ----------------------------------
+ * Call this func with the clidr in
+ * x0, starting cache level in x10,
+ * last cache level in x3 & cm op in
+ * x14
+ * ----------------------------------
+ */
+dcsw_op:; .type dcsw_op, %function
+all_start_at_level:
+ add x2, x10, x10, lsr #1 // work out 3x current cache level
+ lsr x1, x0, x2 // extract cache type bits from clidr
+ and x1, x1, #7 // mask of the bits for current cache only
+ cmp x1, #2 // see what cache we have at this level
+ b.lt skip // skip if no cache, or just i-cache
+ msr csselr_el1, x10 // select current cache level in csselr
+ isb // isb to sych the new cssr&csidr
+ mrs x1, ccsidr_el1 // read the new ccsidr
+ and x2, x1, #7 // extract the length of the cache lines
+ add x2, x2, #4 // add 4 (line length offset)
+ mov x4, #0x3ff
+ and x4, x4, x1, lsr #3 // find maximum number on the way size
+ clz w5, w4 // find bit position of way size increment
+ mov x7, #0x7fff
+ and x7, x7, x1, lsr #13 // extract max number of the index size
+loop2:
+ mov x9, x4 // create working copy of max way size
+loop3:
+ lsl x6, x9, x5
+ orr x11, x10, x6 // factor way and cache number into x11
+ lsl x6, x7, x2
+ orr x11, x11, x6 // factor index number into x11
+ mov x12, x0
+ mov x13, x30 // lr
+ mov x0, x11
+ blr x14
+ mov x0, x12
+ mov x30, x13 // lr
+ subs x9, x9, #1 // decrement the way
+ b.ge loop3
+ subs x7, x7, #1 // decrement the index
+ b.ge loop2
+skip:
+ add x10, x10, #2 // increment cache number
+ cmp x3, x10
+ b.gt all_start_at_level
+finished:
+ mov x10, #0 // swith back to cache level 0
+ msr csselr_el1, x10 // select current cache level in csselr
+ dsb sy
+ isb
+ ret
+
+
+do_dcsw_op:; .type do_dcsw_op, %function
+ cbz x3, exit
+ cmp x0, #DCISW
+ b.eq dc_isw
+ cmp x0, #DCCISW
+ b.eq dc_cisw
+ cmp x0, #DCCSW
+ b.eq dc_csw
+dc_isw:
+ mov x0, x9
+ adr x14, dcisw
+ b dcsw_op
+dc_cisw:
+ mov x0, x9
+ adr x14, dccisw
+ b dcsw_op
+dc_csw:
+ mov x0, x9
+ adr x14, dccsw
+ b dcsw_op
+exit:
+ ret
+
+
+dcsw_op_louis:; .type dcsw_op_louis, %function
+ dsb sy
+ setup_dcsw_op_args x10, x3, x9, #LOUIS_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
+ b do_dcsw_op
+
+
+dcsw_op_all:; .type dcsw_op_all, %function
+ dsb sy
+ setup_dcsw_op_args x10, x3, x9, #LOC_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
+ b do_dcsw_op
diff --git a/lib/arch/aarch64/misc_helpers.S b/lib/arch/aarch64/misc_helpers.S
new file mode 100644
index 00000000..8c1f7400
--- /dev/null
+++ b/lib/arch/aarch64/misc_helpers.S
@@ -0,0 +1,274 @@
+/*
+ * Copyright (c) 2013, ARM Limited. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * Neither the name of ARM nor the names of its contributors may be used
+ * to endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <arch_helpers.h>
+
+ .globl enable_irq
+ .globl disable_irq
+
+ .globl enable_fiq
+ .globl disable_fiq
+
+ .globl enable_serror
+ .globl disable_serror
+
+ .globl read_daif
+ .globl write_daif
+
+ .globl read_spsr
+ .globl read_spsr_el1
+ .globl read_spsr_el2
+ .globl read_spsr_el3
+
+ .globl write_spsr
+ .globl write_spsr_el1
+ .globl write_spsr_el2
+ .globl write_spsr_el3
+
+ .globl read_elr
+ .globl read_elr_el1
+ .globl read_elr_el2
+ .globl read_elr_el3
+
+ .globl write_elr
+ .globl write_elr_el1
+ .globl write_elr_el2
+ .globl write_elr_el3
+
+ .globl get_afflvl_shift
+ .globl mpidr_mask_lower_afflvls
+ .globl dsb
+ .globl isb
+ .globl sev
+ .globl wfe
+ .globl wfi
+ .globl eret
+ .globl smc
+
+
+ .section .text, "ax"
+
+get_afflvl_shift:; .type get_afflvl_shift, %function
+ cmp x0, #3
+ cinc x0, x0, eq
+ mov x1, #MPIDR_AFFLVL_SHIFT
+ lsl x0, x0, x1
+ ret
+
+mpidr_mask_lower_afflvls:; .type mpidr_mask_lower_afflvls, %function
+ cmp x1, #3
+ cinc x1, x1, eq
+ mov x2, #MPIDR_AFFLVL_SHIFT
+ lsl x2, x1, x2
+ lsr x0, x0, x2
+ lsl x0, x0, x2
+ ret
+
+ /* -----------------------------------------------------
+ * Asynchronous exception manipulation accessors
+ * -----------------------------------------------------
+ */
+enable_irq:; .type enable_irq, %function
+ msr daifclr, #DAIF_IRQ_BIT
+ ret
+
+
+enable_fiq:; .type enable_fiq, %function
+ msr daifclr, #DAIF_FIQ_BIT
+ ret
+
+
+enable_serror:; .type enable_serror, %function
+ msr daifclr, #DAIF_ABT_BIT
+ ret
+
+
+disable_irq:; .type disable_irq, %function
+ msr daifset, #DAIF_IRQ_BIT
+ ret
+
+
+disable_fiq:; .type disable_fiq, %function
+ msr daifset, #DAIF_FIQ_BIT
+ ret
+
+
+disable_serror:; .type disable_serror, %function
+ msr daifset, #DAIF_ABT_BIT
+ ret
+
+
+read_daif:; .type read_daif, %function
+ mrs x0, daif
+ ret
+
+
+write_daif:; .type write_daif, %function
+ msr daif, x0
+ ret
+
+
+read_spsr:; .type read_spsr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_spsr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_spsr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_spsr_el3
+
+
+read_spsr_el1:; .type read_spsr_el1, %function
+ mrs x0, spsr_el1
+ ret
+
+
+read_spsr_el2:; .type read_spsr_el2, %function
+ mrs x0, spsr_el2
+ ret
+
+
+read_spsr_el3:; .type read_spsr_el3, %function
+ mrs x0, spsr_el3
+ ret
+
+
+write_spsr:; .type write_spsr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_spsr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_spsr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_spsr_el3
+
+
+write_spsr_el1:; .type write_spsr_el1, %function
+ msr spsr_el1, x0
+ isb
+ ret
+
+
+write_spsr_el2:; .type write_spsr_el2, %function
+ msr spsr_el2, x0
+ isb
+ ret
+
+
+write_spsr_el3:; .type write_spsr_el3, %function
+ msr spsr_el3, x0
+ isb
+ ret
+
+
+read_elr:; .type read_elr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_elr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_elr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_elr_el3
+
+
+read_elr_el1:; .type read_elr_el1, %function
+ mrs x0, elr_el1
+ ret
+
+
+read_elr_el2:; .type read_elr_el2, %function
+ mrs x0, elr_el2
+ ret
+
+
+read_elr_el3:; .type read_elr_el3, %function
+ mrs x0, elr_el3
+ ret
+
+
+write_elr:; .type write_elr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_elr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_elr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_elr_el3
+
+
+write_elr_el1:; .type write_elr_el1, %function
+ msr elr_el1, x0
+ isb
+ ret
+
+
+write_elr_el2:; .type write_elr_el2, %function
+ msr elr_el2, x0
+ isb
+ ret
+
+
+write_elr_el3:; .type write_elr_el3, %function
+ msr elr_el3, x0
+ isb
+ ret
+
+
+dsb:; .type dsb, %function
+ dsb sy
+ ret
+
+
+isb:; .type isb, %function
+ isb
+ ret
+
+
+sev:; .type sev, %function
+ sev
+ ret
+
+
+wfe:; .type wfe, %function
+ wfe
+ ret
+
+
+wfi:; .type wfi, %function
+ wfi
+ ret
+
+
+eret:; .type eret, %function
+ eret
+
+
+smc:; .type smc, %function
+ smc #0
diff --git a/lib/arch/aarch64/sysreg_helpers.S b/lib/arch/aarch64/sysreg_helpers.S
new file mode 100644
index 00000000..e68192ff
--- /dev/null
+++ b/lib/arch/aarch64/sysreg_helpers.S
@@ -0,0 +1,1154 @@
+/*
+ * Copyright (c) 2013, ARM Limited. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * Neither the name of ARM nor the names of its contributors may be used
+ * to endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <arch_helpers.h>
+
+ .globl read_vbar
+ .globl read_vbar_el1
+ .globl read_vbar_el2
+ .globl read_vbar_el3
+ .globl write_vbar
+ .globl write_vbar_el1
+ .globl write_vbar_el2
+ .globl write_vbar_el3
+
+ .globl read_sctlr
+ .globl read_sctlr_el1
+ .globl read_sctlr_el2
+ .globl read_sctlr_el3
+ .globl write_sctlr
+ .globl write_sctlr_el1
+ .globl write_sctlr_el2
+ .globl write_sctlr_el3
+
+ .globl read_actlr
+ .globl read_actlr_el1
+ .globl read_actlr_el2
+ .globl read_actlr_el3
+ .globl write_actlr
+ .globl write_actlr_el1
+ .globl write_actlr_el2
+ .globl write_actlr_el3
+
+ .globl read_esr
+ .globl read_esr_el1
+ .globl read_esr_el2
+ .globl read_esr_el3
+ .globl write_esr
+ .globl write_esr_el1
+ .globl write_esr_el2
+ .globl write_esr_el3
+
+ .globl read_afsr0
+ .globl read_afsr0_el1
+ .globl read_afsr0_el2
+ .globl read_afsr0_el3
+ .globl write_afsr0
+ .globl write_afsr0_el1
+ .globl write_afsr0_el2
+ .globl write_afsr0_el3
+
+ .globl read_afsr1
+ .globl read_afsr1_el1
+ .globl read_afsr1_el2
+ .globl read_afsr1_el3
+ .globl write_afsr1
+ .globl write_afsr1_el1
+ .globl write_afsr1_el2
+ .globl write_afsr1_el3
+
+ .globl read_far
+ .globl read_far_el1
+ .globl read_far_el2
+ .globl read_far_el3
+ .globl write_far
+ .globl write_far_el1
+ .globl write_far_el2
+ .globl write_far_el3
+
+ .globl read_mair
+ .globl read_mair_el1
+ .globl read_mair_el2
+ .globl read_mair_el3
+ .globl write_mair
+ .globl write_mair_el1
+ .globl write_mair_el2
+ .globl write_mair_el3
+
+ .globl read_amair
+ .globl read_amair_el1
+ .globl read_amair_el2
+ .globl read_amair_el3
+ .globl write_amair
+ .globl write_amair_el1
+ .globl write_amair_el2
+ .globl write_amair_el3
+
+ .globl read_rvbar
+ .globl read_rvbar_el1
+ .globl read_rvbar_el2
+ .globl read_rvbar_el3
+
+ .globl read_rmr
+ .globl read_rmr_el1
+ .globl read_rmr_el2
+ .globl read_rmr_el3
+ .globl write_rmr
+ .globl write_rmr_el1
+ .globl write_rmr_el2
+ .globl write_rmr_el3
+
+ .globl read_tcr
+ .globl read_tcr_el1
+ .globl read_tcr_el2
+ .globl read_tcr_el3
+ .globl write_tcr
+ .globl write_tcr_el1
+ .globl write_tcr_el2
+ .globl write_tcr_el3
+
+ .globl read_cptr
+ .globl read_cptr_el2
+ .globl read_cptr_el3
+ .globl write_cptr
+ .globl write_cptr_el2
+ .globl write_cptr_el3
+
+ .globl read_ttbr0
+ .globl read_ttbr0_el1
+ .globl read_ttbr0_el2
+ .globl read_ttbr0_el3
+ .globl write_ttbr0
+ .globl write_ttbr0_el1
+ .globl write_ttbr0_el2
+ .globl write_ttbr0_el3
+
+ .globl read_ttbr1
+ .globl read_ttbr1_el1
+ .globl read_ttbr1_el2
+ .globl write_ttbr1
+ .globl write_ttbr1_el1
+ .globl write_ttbr1_el2
+
+ .globl read_cpacr
+ .globl write_cpacr
+
+ .globl read_cntfrq
+ .globl write_cntfrq
+
+ .globl read_cpuectlr
+ .globl write_cpuectlr
+
+ .globl read_cnthctl_el2
+ .globl write_cnthctl_el2
+
+ .globl read_cntfrq_el0
+ .globl write_cntfrq_el0
+
+ .globl read_scr
+ .globl write_scr
+
+ .globl read_hcr
+ .globl write_hcr
+
+ .globl read_midr
+ .globl read_mpidr
+
+ .globl read_current_el
+ .globl read_id_pfr1_el1
+ .globl read_id_aa64pfr0_el1
+
+#if SUPPORT_VFP
+ .globl enable_vfp
+ .globl read_fpexc
+ .globl write_fpexc
+#endif
+
+
+ .section .text, "ax"
+
+read_current_el:; .type read_current_el, %function
+ mrs x0, CurrentEl
+ ret
+
+
+read_id_pfr1_el1:; .type read_id_pfr1_el1, %function
+ mrs x0, id_pfr1_el1
+ ret
+
+
+read_id_aa64pfr0_el1:; .type read_id_aa64pfr0_el1, %function
+ mrs x0, id_aa64pfr0_el1
+ ret
+
+
+ /* -----------------------------------------------------
+ * VBAR accessors
+ * -----------------------------------------------------
+ */
+read_vbar:; .type read_vbar, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_vbar_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_vbar_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_vbar_el3
+
+
+read_vbar_el1:; .type read_vbar_el1, %function
+ mrs x0, vbar_el1
+ ret
+
+
+read_vbar_el2:; .type read_vbar_el2, %function
+ mrs x0, vbar_el2
+ ret
+
+
+read_vbar_el3:; .type read_vbar_el3, %function
+ mrs x0, vbar_el3
+ ret
+
+
+write_vbar:; .type write_vbar, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_vbar_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_vbar_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_vbar_el3
+
+
+write_vbar_el1:; .type write_vbar_el1, %function
+ msr vbar_el1, x0
+ isb
+ ret
+
+
+write_vbar_el2:; .type write_vbar_el2, %function
+ msr vbar_el2, x0
+ isb
+ ret
+
+
+write_vbar_el3:; .type write_vbar_el3, %function
+ msr vbar_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * AFSR0 accessors
+ * -----------------------------------------------------
+ */
+read_afsr0:; .type read_afsr0, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_afsr0_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_afsr0_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_afsr0_el3
+
+
+read_afsr0_el1:; .type read_afsr0_el1, %function
+ mrs x0, afsr0_el1
+ ret
+
+
+read_afsr0_el2:; .type read_afsr0_el2, %function
+ mrs x0, afsr0_el2
+ ret
+
+
+read_afsr0_el3:; .type read_afsr0_el3, %function
+ mrs x0, afsr0_el3
+ ret
+
+
+write_afsr0:; .type write_afsr0, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_afsr0_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_afsr0_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_afsr0_el3
+
+
+write_afsr0_el1:; .type write_afsr0_el1, %function
+ msr afsr0_el1, x0
+ isb
+ ret
+
+
+write_afsr0_el2:; .type write_afsr0_el2, %function
+ msr afsr0_el2, x0
+ isb
+ ret
+
+
+write_afsr0_el3:; .type write_afsr0_el3, %function
+ msr afsr0_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * FAR accessors
+ * -----------------------------------------------------
+ */
+read_far:; .type read_far, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_far_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_far_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_far_el3
+
+
+read_far_el1:; .type read_far_el1, %function
+ mrs x0, far_el1
+ ret
+
+
+read_far_el2:; .type read_far_el2, %function
+ mrs x0, far_el2
+ ret
+
+
+read_far_el3:; .type read_far_el3, %function
+ mrs x0, far_el3
+ ret
+
+
+write_far:; .type write_far, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_far_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_far_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_far_el3
+
+
+write_far_el1:; .type write_far_el1, %function
+ msr far_el1, x0
+ isb
+ ret
+
+
+write_far_el2:; .type write_far_el2, %function
+ msr far_el2, x0
+ isb
+ ret
+
+
+write_far_el3:; .type write_far_el3, %function
+ msr far_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * MAIR accessors
+ * -----------------------------------------------------
+ */
+read_mair:; .type read_mair, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_mair_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_mair_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_mair_el3
+
+
+read_mair_el1:; .type read_mair_el1, %function
+ mrs x0, mair_el1
+ ret
+
+
+read_mair_el2:; .type read_mair_el2, %function
+ mrs x0, mair_el2
+ ret
+
+
+read_mair_el3:; .type read_mair_el3, %function
+ mrs x0, mair_el3
+ ret
+
+
+write_mair:; .type write_mair, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_mair_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_mair_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_mair_el3
+
+
+write_mair_el1:; .type write_mair_el1, %function
+ msr mair_el1, x0
+ isb
+ ret
+
+
+write_mair_el2:; .type write_mair_el2, %function
+ msr mair_el2, x0
+ isb
+ ret
+
+
+write_mair_el3:; .type write_mair_el3, %function
+ msr mair_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * AMAIR accessors
+ * -----------------------------------------------------
+ */
+read_amair:; .type read_amair, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_amair_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_amair_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_amair_el3
+
+
+read_amair_el1:; .type read_amair_el1, %function
+ mrs x0, amair_el1
+ ret
+
+
+read_amair_el2:; .type read_amair_el2, %function
+ mrs x0, amair_el2
+ ret
+
+
+read_amair_el3:; .type read_amair_el3, %function
+ mrs x0, amair_el3
+ ret
+
+
+write_amair:; .type write_amair, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_amair_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_amair_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_amair_el3
+
+
+write_amair_el1:; .type write_amair_el1, %function
+ msr amair_el1, x0
+ isb
+ ret
+
+
+write_amair_el2:; .type write_amair_el2, %function
+ msr amair_el2, x0
+ isb
+ ret
+
+
+write_amair_el3:; .type write_amair_el3, %function
+ msr amair_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * RVBAR accessors
+ * -----------------------------------------------------
+ */
+read_rvbar:; .type read_rvbar, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_rvbar_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_rvbar_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_rvbar_el3
+
+
+read_rvbar_el1:; .type read_rvbar_el1, %function
+ mrs x0, rvbar_el1
+ ret
+
+
+read_rvbar_el2:; .type read_rvbar_el2, %function
+ mrs x0, rvbar_el2
+ ret
+
+
+read_rvbar_el3:; .type read_rvbar_el3, %function
+ mrs x0, rvbar_el3
+ ret
+
+
+ /* -----------------------------------------------------
+ * RMR accessors
+ * -----------------------------------------------------
+ */
+read_rmr:; .type read_rmr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_rmr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_rmr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_rmr_el3
+
+
+read_rmr_el1:; .type read_rmr_el1, %function
+ mrs x0, rmr_el1
+ ret
+
+
+read_rmr_el2:; .type read_rmr_el2, %function
+ mrs x0, rmr_el2
+ ret
+
+
+read_rmr_el3:; .type read_rmr_el3, %function
+ mrs x0, rmr_el3
+ ret
+
+
+write_rmr:; .type write_rmr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_rmr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_rmr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_rmr_el3
+
+
+write_rmr_el1:; .type write_rmr_el1, %function
+ msr rmr_el1, x0
+ isb
+ ret
+
+
+write_rmr_el2:; .type write_rmr_el2, %function
+ msr rmr_el2, x0
+ isb
+ ret
+
+
+write_rmr_el3:; .type write_rmr_el3, %function
+ msr rmr_el3, x0
+ isb
+ ret
+
+
+read_afsr1:; .type read_afsr1, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_afsr1_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_afsr1_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_afsr1_el3
+
+
+ /* -----------------------------------------------------
+ * AFSR1 accessors
+ * -----------------------------------------------------
+ */
+read_afsr1_el1:; .type read_afsr1_el1, %function
+ mrs x0, afsr1_el1
+ ret
+
+
+read_afsr1_el2:; .type read_afsr1_el2, %function
+ mrs x0, afsr1_el2
+ ret
+
+
+read_afsr1_el3:; .type read_afsr1_el3, %function
+ mrs x0, afsr1_el3
+ ret
+
+
+write_afsr1:; .type write_afsr1, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_afsr1_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_afsr1_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_afsr1_el3
+
+
+write_afsr1_el1:; .type write_afsr1_el1, %function
+ msr afsr1_el1, x0
+ isb
+ ret
+
+
+write_afsr1_el2:; .type write_afsr1_el2, %function
+ msr afsr1_el2, x0
+ isb
+ ret
+
+
+write_afsr1_el3:; .type write_afsr1_el3, %function
+ msr afsr1_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * SCTLR accessors
+ * -----------------------------------------------------
+ */
+read_sctlr:; .type read_sctlr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_sctlr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_sctlr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_sctlr_el3
+
+
+read_sctlr_el1:; .type read_sctlr_el1, %function
+ mrs x0, sctlr_el1
+ ret
+
+
+read_sctlr_el2:; .type read_sctlr_el2, %function
+ mrs x0, sctlr_el2
+ ret
+
+
+read_sctlr_el3:; .type read_sctlr_el3, %function
+ mrs x0, sctlr_el3
+ ret
+
+
+write_sctlr:; .type write_sctlr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_sctlr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_sctlr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_sctlr_el3
+
+
+write_sctlr_el1:; .type write_sctlr_el1, %function
+ msr sctlr_el1, x0
+ dsb sy
+ isb
+ ret
+
+
+write_sctlr_el2:; .type write_sctlr_el2, %function
+ msr sctlr_el2, x0
+ dsb sy
+ isb
+ ret
+
+
+write_sctlr_el3:; .type write_sctlr_el3, %function
+ msr sctlr_el3, x0
+ dsb sy
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * ACTLR accessors
+ * -----------------------------------------------------
+ */
+read_actlr:; .type read_actlr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_actlr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_actlr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_actlr_el3
+
+
+read_actlr_el1:; .type read_actlr_el1, %function
+ mrs x0, actlr_el1
+ ret
+
+
+read_actlr_el2:; .type read_actlr_el2, %function
+ mrs x0, actlr_el2
+ ret
+
+
+read_actlr_el3:; .type read_actlr_el3, %function
+ mrs x0, actlr_el3
+ ret
+
+
+write_actlr:; .type write_actlr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_actlr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_actlr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_actlr_el3
+
+
+write_actlr_el1:; .type write_actlr_el1, %function
+ msr actlr_el1, x0
+ dsb sy
+ isb
+ ret
+
+
+write_actlr_el2:; .type write_actlr_el2, %function
+ msr actlr_el2, x0
+ dsb sy
+ isb
+ ret
+
+
+write_actlr_el3:; .type write_actlr_el3, %function
+ msr actlr_el3, x0
+ dsb sy
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * ESR accessors
+ * -----------------------------------------------------
+ */
+read_esr:; .type read_esr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_esr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_esr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_esr_el3
+
+
+read_esr_el1:; .type read_esr_el1, %function
+ mrs x0, esr_el1
+ ret
+
+
+read_esr_el2:; .type read_esr_el2, %function
+ mrs x0, esr_el2
+ ret
+
+
+read_esr_el3:; .type read_esr_el3, %function
+ mrs x0, esr_el3
+ ret
+
+
+write_esr:; .type write_esr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_esr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_esr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_esr_el3
+
+
+write_esr_el1:; .type write_esr_el1, %function
+ msr esr_el1, x0
+ dsb sy
+ isb
+ ret
+
+
+write_esr_el2:; .type write_esr_el2, %function
+ msr esr_el2, x0
+ dsb sy
+ isb
+ ret
+
+
+write_esr_el3:; .type write_esr_el3, %function
+ msr esr_el3, x0
+ dsb sy
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * TCR accessors
+ * -----------------------------------------------------
+ */
+read_tcr:; .type read_tcr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_tcr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_tcr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_tcr_el3
+
+
+read_tcr_el1:; .type read_tcr_el1, %function
+ mrs x0, tcr_el1
+ ret
+
+
+read_tcr_el2:; .type read_tcr_el2, %function
+ mrs x0, tcr_el2
+ ret
+
+
+read_tcr_el3:; .type read_tcr_el3, %function
+ mrs x0, tcr_el3
+ ret
+
+
+write_tcr:; .type write_tcr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_tcr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_tcr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_tcr_el3
+
+
+write_tcr_el1:; .type write_tcr_el1, %function
+ msr tcr_el1, x0
+ dsb sy
+ isb
+ ret
+
+
+write_tcr_el2:; .type write_tcr_el2, %function
+ msr tcr_el2, x0
+ dsb sy
+ isb
+ ret
+
+
+write_tcr_el3:; .type write_tcr_el3, %function
+ msr tcr_el3, x0
+ dsb sy
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * CPTR accessors
+ * -----------------------------------------------------
+ */
+read_cptr:; .type read_cptr, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_cptr_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_cptr_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_cptr_el3
+
+
+read_cptr_el1:; .type read_cptr_el1, %function
+ b read_cptr_el1
+ ret
+
+
+read_cptr_el2:; .type read_cptr_el2, %function
+ mrs x0, cptr_el2
+ ret
+
+
+read_cptr_el3:; .type read_cptr_el3, %function
+ mrs x0, cptr_el3
+ ret
+
+
+write_cptr:; .type write_cptr, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_cptr_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_cptr_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_cptr_el3
+
+
+write_cptr_el1:; .type write_cptr_el1, %function
+ b write_cptr_el1
+
+
+write_cptr_el2:; .type write_cptr_el2, %function
+ msr cptr_el2, x0
+ dsb sy
+ isb
+ ret
+
+
+write_cptr_el3:; .type write_cptr_el3, %function
+ msr cptr_el3, x0
+ dsb sy
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * TTBR0 accessors
+ * -----------------------------------------------------
+ */
+read_ttbr0:; .type read_ttbr0, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_ttbr0_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_ttbr0_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_ttbr0_el3
+
+
+read_ttbr0_el1:; .type read_ttbr0_el1, %function
+ mrs x0, ttbr0_el1
+ ret
+
+
+read_ttbr0_el2:; .type read_ttbr0_el2, %function
+ mrs x0, ttbr0_el2
+ ret
+
+
+read_ttbr0_el3:; .type read_ttbr0_el3, %function
+ mrs x0, ttbr0_el3
+ ret
+
+
+write_ttbr0:; .type write_ttbr0, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_ttbr0_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_ttbr0_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_ttbr0_el3
+
+
+write_ttbr0_el1:; .type write_ttbr0_el1, %function
+ msr ttbr0_el1, x0
+ isb
+ ret
+
+
+write_ttbr0_el2:; .type write_ttbr0_el2, %function
+ msr ttbr0_el2, x0
+ isb
+ ret
+
+
+write_ttbr0_el3:; .type write_ttbr0_el3, %function
+ msr ttbr0_el3, x0
+ isb
+ ret
+
+
+ /* -----------------------------------------------------
+ * TTBR1 accessors
+ * -----------------------------------------------------
+ */
+read_ttbr1:; .type read_ttbr1, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq read_ttbr1_el1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq read_ttbr1_el2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq read_ttbr1_el3
+
+
+read_ttbr1_el1:; .type read_ttbr1_el1, %function
+ mrs x0, ttbr1_el1
+ ret
+
+
+read_ttbr1_el2:; .type read_ttbr1_el2, %function
+ b read_ttbr1_el2
+
+
+read_ttbr1_el3:; .type read_ttbr1_el3, %function
+ b read_ttbr1_el3
+
+
+write_ttbr1:; .type write_ttbr1, %function
+ mrs x1, CurrentEl
+ cmp x1, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq write_ttbr1_el1
+ cmp x1, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq write_ttbr1_el2
+ cmp x1, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq write_ttbr1_el3
+
+
+write_ttbr1_el1:; .type write_ttbr1_el1, %function
+ msr ttbr1_el1, x0
+ isb
+ ret
+
+
+write_ttbr1_el2:; .type write_ttbr1_el2, %function
+ b write_ttbr1_el2
+
+
+write_ttbr1_el3:; .type write_ttbr1_el3, %function
+ b write_ttbr1_el3
+
+
+read_hcr:; .type read_hcr, %function
+ mrs x0, hcr_el2
+ ret
+
+
+write_hcr:; .type write_hcr, %function
+ msr hcr_el2, x0
+ dsb sy
+ isb
+ ret
+
+
+read_cpacr:; .type read_cpacr, %function
+ mrs x0, cpacr_el1
+ ret
+
+
+write_cpacr:; .type write_cpacr, %function
+ msr cpacr_el1, x0
+ ret
+
+
+read_cntfrq_el0:; .type read_cntfrq_el0, %function
+ mrs x0, cntfrq_el0
+ ret
+
+
+write_cntfrq_el0:; .type write_cntfrq_el0, %function
+ msr cntfrq_el0, x0
+ ret
+
+
+read_cpuectlr:; .type read_cpuectlr, %function
+ mrs x0, CPUECTLR_EL1
+ ret
+
+
+write_cpuectlr:; .type write_cpuectlr, %function
+ msr CPUECTLR_EL1, x0
+ dsb sy
+ isb
+ ret
+
+
+read_cnthctl_el2:; .type read_cnthctl_el2, %function
+ mrs x0, cnthctl_el2
+ ret
+
+
+write_cnthctl_el2:; .type write_cnthctl_el2, %function
+ msr cnthctl_el2, x0
+ ret
+
+
+read_cntfrq:; .type read_cntfrq, %function
+ mrs x0, cntfrq_el0
+ ret
+
+
+write_cntfrq:; .type write_cntfrq, %function
+ msr cntfrq_el0, x0
+ ret
+
+
+write_scr:; .type write_scr, %function
+ msr scr_el3, x0
+ dsb sy
+ isb
+ ret
+
+
+read_scr:; .type read_scr, %function
+ mrs x0, scr_el3
+ ret
+
+
+read_midr:; .type read_midr, %function
+ mrs x0, midr_el1
+ ret
+
+
+read_mpidr:; .type read_mpidr, %function
+ mrs x0, mpidr_el1
+ ret
+
+
+#if SUPPORT_VFP
+enable_vfp:; .type enable_vfp, %function
+ mrs x0, cpacr_el1
+ orr x0, x0, #CPACR_VFP_BITS
+ msr cpacr_el1, x0
+ mrs x0, cptr_el3
+ mov x1, #AARCH64_CPTR_TFP
+ bic x0, x0, x1
+ msr cptr_el3, x0
+ ret
+
+
+ // int read_fpexc(void)
+read_fpexc:; .type read_fpexc, %function
+ b read_fpexc
+ ret
+
+
+ // void write_fpexc(int fpexc)
+write_fpexc:; .type write_fpexc, %function
+ b write_fpexc
+ ret
+
+#endif
diff --git a/lib/arch/aarch64/tlb_helpers.S b/lib/arch/aarch64/tlb_helpers.S
new file mode 100644
index 00000000..8377f2c6
--- /dev/null
+++ b/lib/arch/aarch64/tlb_helpers.S
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2013, ARM Limited. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * Neither the name of ARM nor the names of its contributors may be used
+ * to endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <arch_helpers.h>
+
+ .globl tlbiall
+ .globl tlbiallis
+ .globl tlbialle1
+ .globl tlbialle1is
+ .globl tlbialle2
+ .globl tlbialle2is
+ .globl tlbialle3
+ .globl tlbialle3is
+ .globl tlbivmalle1
+
+
+ .section .text, "ax"
+
+tlbiall:; .type tlbiall, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq tlbialle1
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq tlbialle2
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq tlbialle3
+
+
+tlbiallis:; .type tlbiallis, %function
+ mrs x0, CurrentEl
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq tlbialle1is
+ cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
+ b.eq tlbialle2is
+ cmp x0, #(MODE_EL3 << MODE_EL_SHIFT)
+ b.eq tlbialle3is
+
+
+tlbialle1:; .type tlbialle1, %function
+ tlbi alle1
+ dsb sy
+ isb
+ ret
+
+
+tlbialle1is:; .type tlbialle1is, %function
+ tlbi alle1is
+ dsb sy
+ isb
+ ret
+
+
+tlbialle2:; .type tlbialle2, %function
+ tlbi alle2
+ dsb sy
+ isb
+ ret
+
+
+tlbialle2is:; .type tlbialle2is, %function
+ tlbi alle2is
+ dsb sy
+ isb
+ ret
+
+
+tlbialle3:; .type tlbialle3, %function
+ tlbi alle3
+ dsb sy
+ isb
+ ret
+
+
+tlbialle3is:; .type tlbialle3is, %function
+ tlbi alle3is
+ dsb sy
+ isb
+ ret
+
+tlbivmalle1:; .type tlbivmalle1, %function
+ tlbi vmalle1
+ dsb sy
+ isb
+ ret