aarch64: userspace: Add support for page tables swapping
Introduce the necessary routines to have the user thread stack correctly
mapped and the functions to swap page tables on context switch.
Signed-off-by: Carlo Caione <ccaione@baylibre.com>
diff --git a/arch/arm/core/aarch64/mmu/CMakeLists.txt b/arch/arm/core/aarch64/mmu/CMakeLists.txt
index 5356b07..72a75fa 100644
--- a/arch/arm/core/aarch64/mmu/CMakeLists.txt
+++ b/arch/arm/core/aarch64/mmu/CMakeLists.txt
@@ -2,4 +2,4 @@
zephyr_library()
-zephyr_library_sources(arm_mmu.c)
+zephyr_library_sources(arm_mmu.c low_level.S)
diff --git a/arch/arm/core/aarch64/mmu/arm_mmu.c b/arch/arm/core/aarch64/mmu/arm_mmu.c
index a39f2d8..f2858df 100644
--- a/arch/arm/core/aarch64/mmu/arm_mmu.c
+++ b/arch/arm/core/aarch64/mmu/arm_mmu.c
@@ -10,6 +10,7 @@
#include <device.h>
#include <init.h>
#include <kernel.h>
+#include <kernel_arch_func.h>
#include <kernel_arch_interface.h>
#include <kernel_internal.h>
#include <logging/log.h>
@@ -993,6 +994,10 @@
reset_map(old_ptables, __func__, thread->stack_info.start,
thread->stack_info.size);
}
+
+ if (thread == _current && !is_ptable_active(domain_ptables)) {
+ z_arm64_swap_ptables(thread);
+ }
}
void arch_mem_domain_thread_remove(struct k_thread *thread)
@@ -1015,4 +1020,30 @@
thread->stack_info.size);
}
+void z_arm64_swap_ptables(struct k_thread *incoming)
+{
+ struct arm_mmu_ptables *ptables = incoming->arch.ptables;
+
+ if (!is_ptable_active(ptables)) {
+ z_arm64_set_ttbr0((uintptr_t)ptables->base_xlat_table);
+ } else {
+ invalidate_tlb_all();
+ }
+}
+
+void z_arm64_thread_pt_init(struct k_thread *incoming)
+{
+ struct arm_mmu_ptables *ptables;
+
+ if ((incoming->base.user_options & K_USER) == 0)
+ return;
+
+ ptables = incoming->arch.ptables;
+
+ /* Map the thread stack */
+ map_thread_stack(incoming, ptables);
+
+ z_arm64_swap_ptables(incoming);
+}
+
#endif /* CONFIG_USERSPACE */
diff --git a/arch/arm/core/aarch64/mmu/low_level.S b/arch/arm/core/aarch64/mmu/low_level.S
new file mode 100644
index 0000000..583f614
--- /dev/null
+++ b/arch/arm/core/aarch64/mmu/low_level.S
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+#include <toolchain.h>
+#include <linker/sections.h>
+#include <arch/cpu.h>
+
+_ASM_FILE_PROLOGUE
+
+/*
+ * Switch TTBR0
+ */
+
+GTEXT(z_arm64_set_ttbr0)
+SECTION_FUNC(TEXT, z_arm64_set_ttbr0)
+
+ /* Disable all the caches */
+ mrs x2, sctlr_el1
+ mov_imm x1, (SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
+ and x1, x2, x1
+ msr sctlr_el1, x1
+ isb
+
+ /* Invalidate the TLBs */
+ tlbi vmalle1
+ dsb sy
+ isb
+
+ /* Switch the TTBR0 */
+ msr ttbr0_el1, x0
+ isb
+
+ /* Restore the saved SCTLR_EL1 */
+ msr sctlr_el1, x2
+ isb
+
+ ret
diff --git a/arch/arm/core/aarch64/switch.S b/arch/arm/core/aarch64/switch.S
index 846516b..8c69559 100644
--- a/arch/arm/core/aarch64/switch.S
+++ b/arch/arm/core/aarch64/switch.S
@@ -76,6 +76,12 @@
mov sp, x1
+#ifdef CONFIG_USERSPACE
+ stp xzr, x30, [sp, #-16]!
+ bl z_arm64_swap_ptables
+ ldp xzr, x30, [sp], #16
+#endif
+
#ifdef CONFIG_INSTRUMENT_THREAD_SWITCHING
stp xzr, x30, [sp, #-16]!
bl z_thread_mark_switched_in
diff --git a/arch/arm/core/aarch64/thread.c b/arch/arm/core/aarch64/thread.c
index 66b3863..ab2ab3c 100644
--- a/arch/arm/core/aarch64/thread.c
+++ b/arch/arm/core/aarch64/thread.c
@@ -83,6 +83,9 @@
z_arch_esf_t *pInitCtx;
uintptr_t stack_ptr;
+ /* Map the thread stack */
+ z_arm64_thread_pt_init(_current);
+
/* Setup the private stack */
_current->arch.priv_stack_start = (uint64_t)(_current->stack_obj);
diff --git a/arch/arm/include/aarch64/kernel_arch_func.h b/arch/arm/include/aarch64/kernel_arch_func.h
index d80b51d..fc06999 100644
--- a/arch/arm/include/aarch64/kernel_arch_func.h
+++ b/arch/arm/include/aarch64/kernel_arch_func.h
@@ -41,6 +41,7 @@
extern void z_arm64_fatal_error(z_arch_esf_t *esf, unsigned int reason);
extern void z_arm64_userspace_enter(z_arch_esf_t *esf);
+extern void z_arm64_set_ttbr0(uintptr_t ttbr0);
#endif /* _ASMLANGUAGE */
diff --git a/include/arch/arm/aarch64/arm_mmu.h b/include/arch/arm/aarch64/arm_mmu.h
index b24454d..9e86bcc 100644
--- a/include/arch/arm/aarch64/arm_mmu.h
+++ b/include/arch/arm/aarch64/arm_mmu.h
@@ -194,7 +194,11 @@
*/
extern const struct arm_mmu_config mmu_config;
+struct k_thread;
void z_arm64_mmu_init(void);
+void z_arm64_thread_pt_init(struct k_thread *thread);
+void z_arm64_swap_ptables(struct k_thread *thread);
+
#endif /* _ASMLANGUAGE */
#endif /* ZEPHYR_INCLUDE_ARCH_ARM64_MMU_ARM_MMU_H_ */