|
16 | 16 | #include <asm/kvm_hyp.h> |
17 | 17 | #include <asm/kvm_mmu.h> |
18 | 18 |
|
| 19 | +#define SYS_IMP_APL_ACTLR_EL12 sys_reg(3, 6, 15, 14, 6) |
| 20 | +#define SYS_ACTLR_EL12 sys_reg(3, 5, 1, 0, 1) |
| 21 | + |
19 | 22 | static inline bool ctxt_has_s1poe(struct kvm_cpu_context *ctxt); |
20 | 23 |
|
21 | 24 | static inline struct kvm_vcpu *ctxt_to_vcpu(struct kvm_cpu_context *ctxt) |
@@ -172,6 +175,13 @@ static inline void __sysreg_save_el1_state(struct kvm_cpu_context *ctxt) |
172 | 175 |
|
173 | 176 | if (ctxt_has_sctlr2(ctxt)) |
174 | 177 | ctxt_sys_reg(ctxt, SCTLR2_EL1) = read_sysreg_el1(SYS_SCTLR2); |
| 178 | + |
| 179 | + if (IS_ENABLED(CONFIG_ARM64_ACTLR_STATE)) { |
| 180 | + if (alternative_has_cap_unlikely(ARM64_HAS_ACTLR_VIRT)) |
| 181 | + ctxt_sys_reg(ctxt, ACTLR_EL1) = read_sysreg_s(SYS_ACTLR_EL12); |
| 182 | + else if (alternative_has_cap_unlikely(ARM64_HAS_ACTLR_VIRT_APPLE)) |
| 183 | + ctxt_sys_reg(ctxt, ACTLR_EL1) = read_sysreg_s(SYS_IMP_APL_ACTLR_EL12); |
| 184 | + } |
175 | 185 | } |
176 | 186 |
|
177 | 187 | static inline void __sysreg_save_el2_return_state(struct kvm_cpu_context *ctxt) |
@@ -256,6 +266,13 @@ static inline void __sysreg_restore_el1_state(struct kvm_cpu_context *ctxt, |
256 | 266 | write_sysreg(ctxt_sys_reg(ctxt, PAR_EL1), par_el1); |
257 | 267 | write_sysreg(ctxt_sys_reg(ctxt, TPIDR_EL1), tpidr_el1); |
258 | 268 |
|
| 269 | + if (IS_ENABLED(CONFIG_ARM64_ACTLR_STATE)) { |
| 270 | + if (alternative_has_cap_unlikely(ARM64_HAS_ACTLR_VIRT)) |
| 271 | + write_sysreg_s(ctxt_sys_reg(ctxt, ACTLR_EL1), SYS_ACTLR_EL12); |
| 272 | + else if (alternative_has_cap_unlikely(ARM64_HAS_ACTLR_VIRT_APPLE)) |
| 273 | + write_sysreg_s(ctxt_sys_reg(ctxt, ACTLR_EL1), SYS_IMP_APL_ACTLR_EL12); |
| 274 | + } |
| 275 | + |
259 | 276 | if (ctxt_has_mte(ctxt)) { |
260 | 277 | write_sysreg_el1(ctxt_sys_reg(ctxt, TFSR_EL1), SYS_TFSR); |
261 | 278 | write_sysreg_s(ctxt_sys_reg(ctxt, TFSRE0_EL1), SYS_TFSRE0_EL1); |
|
0 commit comments