target/arm: Add mmu_idx for EL1 and EL2 w/ PAN enabled

To implement PAN, we will want to swap, for short periods
of time, to a different privileged mmu_idx. In addition,
we cannot do this with flushing alone, because the AT*
instructions have both PAN and PAN-less versions.

Add the ARMMMUIdx*_PAN constants where necessary next to
the corresponding ARMMMUIdx* constant.

Backports commit 452ef8cb8c7b06f44a30a3c3a54d3be82c4aef59 from qemu
This commit is contained in:
Richard Henderson 2020-03-21 17:07:10 -04:00 committed by Lioncash
parent ed5a4950fd
commit 7aaf0d442b
6 changed files with 90 additions and 23 deletions

View file

@ -28,9 +28,9 @@
// Unicorn: Commented out until VTLB support is implemented. // Unicorn: Commented out until VTLB support is implemented.
//# define TARGET_PAGE_BITS_VARY //# define TARGET_PAGE_BITS_VARY
//# define TARGET_PAGE_BITS_MIN 10 //# define TARGET_PAGE_BITS_MIN 10
# define TARGET_PAGE_BITS 10 # define TARGET_PAGE_BITS 12
#endif #endif
#define NB_MMU_MODES 9 #define NB_MMU_MODES 12
#endif #endif

View file

@ -2645,20 +2645,24 @@ bool write_cpustate_to_list(ARMCPU *cpu);
* 5. we want to be able to use the TLB for accesses done as part of a * 5. we want to be able to use the TLB for accesses done as part of a
* stage1 page table walk, rather than having to walk the stage2 page * stage1 page table walk, rather than having to walk the stage2 page
* table over and over. * table over and over.
* 6. we need separate EL1/EL2 mmu_idx for handling the Privileged Access
* Never (PAN) bit within PSTATE.
* *
* This gives us the following list of cases: * This gives us the following list of cases:
* *
* NS EL0 EL1&0 stage 1+2 (aka NS PL0) * NS EL0 EL1&0 stage 1+2 (aka NS PL0)
* NS EL1 EL1&0 stage 1+2 (aka NS PL1) * NS EL1 EL1&0 stage 1+2 (aka NS PL1)
* NS EL1 EL1&0 stage 1+2 +PAN
* NS EL0 EL2&0 * NS EL0 EL2&0
* NS EL2 EL2&0 * NS EL2 EL2&0 +PAN
* NS EL2 (aka NS PL2) * NS EL2 (aka NS PL2)
* S EL0 EL1&0 (aka S PL0) * S EL0 EL1&0 (aka S PL0)
* S EL1 EL1&0 (not used if EL3 is 32 bit) * S EL1 EL1&0 (not used if EL3 is 32 bit)
* S EL1 EL1&0 +PAN
* S EL3 (aka S PL1) * S EL3 (aka S PL1)
* NS EL1&0 stage 2 * NS EL1&0 stage 2
* *
* for a total of 9 different mmu_idx. * for a total of 12 different mmu_idx.
* *
* R profile CPUs have an MPU, but can use the same set of MMU indexes * R profile CPUs have an MPU, but can use the same set of MMU indexes
* as A profile. They only need to distinguish NS EL0 and NS EL1 (and * as A profile. They only need to distinguish NS EL0 and NS EL1 (and
@ -2713,19 +2717,22 @@ typedef enum ARMMMUIdx {
/* /*
* A-profile. * A-profile.
*/ */
ARMMMUIdx_E10_0 = 0 | ARM_MMU_IDX_A, ARMMMUIdx_E10_0 = 0 | ARM_MMU_IDX_A,
ARMMMUIdx_E20_0 = 1 | ARM_MMU_IDX_A, ARMMMUIdx_E20_0 = 1 | ARM_MMU_IDX_A,
ARMMMUIdx_E10_1 = 2 | ARM_MMU_IDX_A, ARMMMUIdx_E10_1 = 2 | ARM_MMU_IDX_A,
ARMMMUIdx_E10_1_PAN = 3 | ARM_MMU_IDX_A,
ARMMMUIdx_E2 = 3 | ARM_MMU_IDX_A, ARMMMUIdx_E2 = 4 | ARM_MMU_IDX_A,
ARMMMUIdx_E20_2 = 4 | ARM_MMU_IDX_A, ARMMMUIdx_E20_2 = 5 | ARM_MMU_IDX_A,
ARMMMUIdx_E20_2_PAN = 6 | ARM_MMU_IDX_A,
ARMMMUIdx_SE10_0 = 5 | ARM_MMU_IDX_A, ARMMMUIdx_SE10_0 = 7 | ARM_MMU_IDX_A,
ARMMMUIdx_SE10_1 = 6 | ARM_MMU_IDX_A, ARMMMUIdx_SE10_1 = 8 | ARM_MMU_IDX_A,
ARMMMUIdx_SE3 = 7 | ARM_MMU_IDX_A, ARMMMUIdx_SE10_1_PAN = 9 | ARM_MMU_IDX_A,
ARMMMUIdx_SE3 = 10 | ARM_MMU_IDX_A,
ARMMMUIdx_Stage2 = 8 | ARM_MMU_IDX_A, ARMMMUIdx_Stage2 = 11 | ARM_MMU_IDX_A,
/* /*
* These are not allocated TLBs and are used only for AT system * These are not allocated TLBs and are used only for AT system
@ -2733,6 +2740,7 @@ typedef enum ARMMMUIdx {
*/ */
ARMMMUIdx_Stage1_E0 = 0 | ARM_MMU_IDX_NOTLB, ARMMMUIdx_Stage1_E0 = 0 | ARM_MMU_IDX_NOTLB,
ARMMMUIdx_Stage1_E1 = 1 | ARM_MMU_IDX_NOTLB, ARMMMUIdx_Stage1_E1 = 1 | ARM_MMU_IDX_NOTLB,
ARMMMUIdx_Stage1_E1_PAN = 2 | ARM_MMU_IDX_NOTLB,
/* /*
* M-profile. * M-profile.
@ -2758,10 +2766,13 @@ typedef enum ARMMMUIdxBit {
TO_CORE_BIT(E10_0), TO_CORE_BIT(E10_0),
TO_CORE_BIT(E20_0), TO_CORE_BIT(E20_0),
TO_CORE_BIT(E10_1), TO_CORE_BIT(E10_1),
TO_CORE_BIT(E10_1_PAN),
TO_CORE_BIT(E2), TO_CORE_BIT(E2),
TO_CORE_BIT(E20_2), TO_CORE_BIT(E20_2),
TO_CORE_BIT(E20_2_PAN),
TO_CORE_BIT(SE10_0), TO_CORE_BIT(SE10_0),
TO_CORE_BIT(SE10_1), TO_CORE_BIT(SE10_1),
TO_CORE_BIT(SE10_1_PAN),
TO_CORE_BIT(SE3), TO_CORE_BIT(SE3),
TO_CORE_BIT(Stage2), TO_CORE_BIT(Stage2),

View file

@ -514,6 +514,7 @@ static void tlbiall_nsnh_write(CPUARMState *env, const ARMCPRegInfo *ri,
tlb_flush_by_mmuidx(cs, tlb_flush_by_mmuidx(cs,
ARMMMUIdxBit_E10_1 | ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
ARMMMUIdxBit_E10_0 | ARMMMUIdxBit_E10_0 |
ARMMMUIdxBit_Stage2); ARMMMUIdxBit_Stage2);
} }
@ -527,6 +528,7 @@ static void tlbiall_nsnh_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
tlb_flush_by_mmuidx_all_cpus_synced(cs, tlb_flush_by_mmuidx_all_cpus_synced(cs,
ARMMMUIdxBit_E10_1 | ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
ARMMMUIdxBit_E10_0 | ARMMMUIdxBit_E10_0 |
ARMMMUIdxBit_Stage2); ARMMMUIdxBit_Stage2);
#endif #endif
@ -2484,6 +2486,7 @@ static int gt_phys_redir_timeridx(CPUARMState *env)
switch (arm_mmu_idx(env)) { switch (arm_mmu_idx(env)) {
case ARMMMUIdx_E20_0: case ARMMMUIdx_E20_0:
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
return GTIMER_HYP; return GTIMER_HYP;
default: default:
return GTIMER_PHYS; return GTIMER_PHYS;
@ -2495,6 +2498,7 @@ static int gt_virt_redir_timeridx(CPUARMState *env)
switch (arm_mmu_idx(env)) { switch (arm_mmu_idx(env)) {
case ARMMMUIdx_E20_0: case ARMMMUIdx_E20_0:
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
return GTIMER_HYPVIRT; return GTIMER_HYPVIRT;
default: default:
return GTIMER_VIRT; return GTIMER_VIRT;
@ -3112,7 +3116,9 @@ static uint64_t do_ats_write(CPUARMState *env, uint64_t value,
format64 = arm_s1_regime_using_lpae_format(env, mmu_idx); format64 = arm_s1_regime_using_lpae_format(env, mmu_idx);
if (arm_feature(env, ARM_FEATURE_EL2)) { if (arm_feature(env, ARM_FEATURE_EL2)) {
if (mmu_idx == ARMMMUIdx_E10_0 || mmu_idx == ARMMMUIdx_E10_1) { if (mmu_idx == ARMMMUIdx_E10_0 ||
mmu_idx == ARMMMUIdx_E10_1 ||
mmu_idx == ARMMMUIdx_E10_1_PAN) {
format64 |= env->cp15.hcr_el2 & (HCR_VM | HCR_DC); format64 |= env->cp15.hcr_el2 & (HCR_VM | HCR_DC);
} else { } else {
format64 |= arm_current_el(env) == 2; format64 |= arm_current_el(env) == 2;
@ -3572,7 +3578,9 @@ static void vmsa_tcr_ttbr_el2_write(CPUARMState *env, const ARMCPRegInfo *ri,
if (extract64(raw_read(env, ri) ^ value, 48, 16) && if (extract64(raw_read(env, ri) ^ value, 48, 16) &&
(arm_hcr_el2_eff(env) & HCR_E2H)) { (arm_hcr_el2_eff(env) & HCR_E2H)) {
tlb_flush_by_mmuidx(env_cpu(env), tlb_flush_by_mmuidx(env_cpu(env),
ARMMMUIdxBit_E20_2 | ARMMMUIdxBit_E20_0); ARMMMUIdxBit_E20_2 |
ARMMMUIdxBit_E20_2_PAN |
ARMMMUIdxBit_E20_0);
} }
raw_write(env, ri, value); raw_write(env, ri, value);
} }
@ -3590,6 +3598,7 @@ static void vttbr_write(CPUARMState *env, const ARMCPRegInfo *ri,
if (raw_read(env, ri) != value) { if (raw_read(env, ri) != value) {
tlb_flush_by_mmuidx(cs, tlb_flush_by_mmuidx(cs,
ARMMMUIdxBit_E10_1 | ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
ARMMMUIdxBit_E10_0 | ARMMMUIdxBit_E10_0 |
ARMMMUIdxBit_Stage2); ARMMMUIdxBit_Stage2);
raw_write(env, ri, value); raw_write(env, ri, value);
@ -3950,12 +3959,18 @@ static int vae1_tlbmask(CPUARMState *env)
{ {
/* Since we exclude secure first, we may read HCR_EL2 directly. */ /* Since we exclude secure first, we may read HCR_EL2 directly. */
if (arm_is_secure_below_el3(env)) { if (arm_is_secure_below_el3(env)) {
return ARMMMUIdxBit_SE10_1 | ARMMMUIdxBit_SE10_0; return ARMMMUIdxBit_SE10_1 |
ARMMMUIdxBit_SE10_1_PAN |
ARMMMUIdxBit_SE10_0;
} else if ((env->cp15.hcr_el2 & (HCR_E2H | HCR_TGE)) } else if ((env->cp15.hcr_el2 & (HCR_E2H | HCR_TGE))
== (HCR_E2H | HCR_TGE)) { == (HCR_E2H | HCR_TGE)) {
return ARMMMUIdxBit_E20_2 | ARMMMUIdxBit_E20_0; return ARMMMUIdxBit_E20_2 |
ARMMMUIdxBit_E20_2_PAN |
ARMMMUIdxBit_E20_0;
} else { } else {
return ARMMMUIdxBit_E10_1 | ARMMMUIdxBit_E10_0; return ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
ARMMMUIdxBit_E10_0;
} }
} }
@ -3994,18 +4009,28 @@ static int alle1_tlbmask(CPUARMState *env)
*/ */
if (arm_is_secure_below_el3(env)) { if (arm_is_secure_below_el3(env)) {
return ARMMMUIdxBit_SE10_1 | ARMMMUIdxBit_SE10_0; return ARMMMUIdxBit_SE10_1 |
ARMMMUIdxBit_SE10_1_PAN |
ARMMMUIdxBit_SE10_0;
} else if (arm_feature(env, ARM_FEATURE_EL2)) { } else if (arm_feature(env, ARM_FEATURE_EL2)) {
return ARMMMUIdxBit_E10_1 | ARMMMUIdxBit_E10_0 | ARMMMUIdxBit_Stage2; return ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
ARMMMUIdxBit_E10_0 |
ARMMMUIdxBit_Stage2;
} else { } else {
return ARMMMUIdxBit_E10_1 | ARMMMUIdxBit_E10_0; return ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
ARMMMUIdxBit_E10_0;
} }
} }
static int e2_tlbmask(CPUARMState *env) static int e2_tlbmask(CPUARMState *env)
{ {
/* TODO: ARMv8.4-SecEL2 */ /* TODO: ARMv8.4-SecEL2 */
return ARMMMUIdxBit_E20_0 | ARMMMUIdxBit_E20_2 | ARMMMUIdxBit_E2; return ARMMMUIdxBit_E20_0 |
ARMMMUIdxBit_E20_2 |
ARMMMUIdxBit_E20_2_PAN |
ARMMMUIdxBit_E2;
} }
static void tlbi_aa64_alle1_write(CPUARMState *env, const ARMCPRegInfo *ri, static void tlbi_aa64_alle1_write(CPUARMState *env, const ARMCPRegInfo *ri,
@ -9046,6 +9071,7 @@ static uint32_t regime_el(CPUARMState *env, ARMMMUIdx mmu_idx)
switch (mmu_idx) { switch (mmu_idx) {
case ARMMMUIdx_E20_0: case ARMMMUIdx_E20_0:
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
case ARMMMUIdx_Stage2: case ARMMMUIdx_Stage2:
case ARMMMUIdx_E2: case ARMMMUIdx_E2:
return 2; return 2;
@ -9054,10 +9080,13 @@ static uint32_t regime_el(CPUARMState *env, ARMMMUIdx mmu_idx)
case ARMMMUIdx_SE10_0: case ARMMMUIdx_SE10_0:
return arm_el_is_aa64(env, 3) ? 1 : 3; return arm_el_is_aa64(env, 3) ? 1 : 3;
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
case ARMMMUIdx_Stage1_E0: case ARMMMUIdx_Stage1_E0:
case ARMMMUIdx_Stage1_E1: case ARMMMUIdx_Stage1_E1:
case ARMMMUIdx_Stage1_E1_PAN:
case ARMMMUIdx_E10_0: case ARMMMUIdx_E10_0:
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
case ARMMMUIdx_MPrivNegPri: case ARMMMUIdx_MPrivNegPri:
case ARMMMUIdx_MUserNegPri: case ARMMMUIdx_MUserNegPri:
case ARMMMUIdx_MPriv: case ARMMMUIdx_MPriv:
@ -9175,6 +9204,8 @@ static inline ARMMMUIdx stage_1_mmu_idx(ARMMMUIdx mmu_idx)
return ARMMMUIdx_Stage1_E0; return ARMMMUIdx_Stage1_E0;
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
return ARMMMUIdx_Stage1_E1; return ARMMMUIdx_Stage1_E1;
case ARMMMUIdx_E10_1_PAN:
return ARMMMUIdx_Stage1_E1_PAN;
default: default:
return mmu_idx; return mmu_idx;
} }
@ -9223,6 +9254,7 @@ static inline bool regime_is_user(CPUARMState *env, ARMMMUIdx mmu_idx)
return false; return false;
case ARMMMUIdx_E10_0: case ARMMMUIdx_E10_0:
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
g_assert_not_reached(); g_assert_not_reached();
} }
} }
@ -11133,7 +11165,9 @@ bool get_phys_addr(CPUARMState *env, target_ulong address,
target_ulong *page_size, target_ulong *page_size,
ARMMMUFaultInfo *fi, ARMCacheAttrs *cacheattrs) ARMMMUFaultInfo *fi, ARMCacheAttrs *cacheattrs)
{ {
if (mmu_idx == ARMMMUIdx_E10_0 || mmu_idx == ARMMMUIdx_E10_1) { if (mmu_idx == ARMMMUIdx_E10_0 ||
mmu_idx == ARMMMUIdx_E10_1 ||
mmu_idx == ARMMMUIdx_E10_1_PAN) {
/* /*
* Call ourselves recursively to do the stage 1 and then stage 2 * Call ourselves recursively to do the stage 1 and then stage 2
* translations. * translations.
@ -11667,10 +11701,13 @@ int arm_mmu_idx_to_el(ARMMMUIdx mmu_idx)
case ARMMMUIdx_SE10_0: case ARMMMUIdx_SE10_0:
return 0; return 0;
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
return 1; return 1;
case ARMMMUIdx_E2: case ARMMMUIdx_E2:
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
return 2; return 2;
case ARMMMUIdx_SE3: case ARMMMUIdx_SE3:
return 3; return 3;
@ -11818,11 +11855,14 @@ void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
/* TODO: ARMv8.2-UAO */ /* TODO: ARMv8.2-UAO */
switch (mmu_idx) { switch (mmu_idx) {
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
/* TODO: ARMv8.3-NV */ /* TODO: ARMv8.3-NV */
flags = FIELD_DP32(flags, TBFLAG_A64, UNPRIV, 1); flags = FIELD_DP32(flags, TBFLAG_A64, UNPRIV, 1);
break; break;
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
/* TODO: ARMv8.4-SecEL2 */ /* TODO: ARMv8.4-SecEL2 */
/* /*
* Note that E20_2 is gated by HCR_EL2.E2H == 1, but E20_0 is * Note that E20_2 is gated by HCR_EL2.E2H == 1, but E20_0 is

View file

@ -845,12 +845,16 @@ static inline bool regime_has_2_ranges(ARMMMUIdx mmu_idx)
switch (mmu_idx) { switch (mmu_idx) {
case ARMMMUIdx_Stage1_E0: case ARMMMUIdx_Stage1_E0:
case ARMMMUIdx_Stage1_E1: case ARMMMUIdx_Stage1_E1:
case ARMMMUIdx_Stage1_E1_PAN:
case ARMMMUIdx_E10_0: case ARMMMUIdx_E10_0:
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
case ARMMMUIdx_E20_0: case ARMMMUIdx_E20_0:
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
case ARMMMUIdx_SE10_0: case ARMMMUIdx_SE10_0:
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
return true; return true;
default: default:
return false; return false;
@ -863,8 +867,13 @@ static inline bool regime_is_secure(CPUARMState *env, ARMMMUIdx mmu_idx)
switch (mmu_idx) { switch (mmu_idx) {
case ARMMMUIdx_E10_0: case ARMMMUIdx_E10_0:
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
case ARMMMUIdx_E20_0:
case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
case ARMMMUIdx_Stage1_E0: case ARMMMUIdx_Stage1_E0:
case ARMMMUIdx_Stage1_E1: case ARMMMUIdx_Stage1_E1:
case ARMMMUIdx_Stage1_E1_PAN:
case ARMMMUIdx_E2: case ARMMMUIdx_E2:
case ARMMMUIdx_Stage2: case ARMMMUIdx_Stage2:
case ARMMMUIdx_MPrivNegPri: case ARMMMUIdx_MPrivNegPri:
@ -875,6 +884,7 @@ static inline bool regime_is_secure(CPUARMState *env, ARMMMUIdx mmu_idx)
case ARMMMUIdx_SE3: case ARMMMUIdx_SE3:
case ARMMMUIdx_SE10_0: case ARMMMUIdx_SE10_0:
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
case ARMMMUIdx_MSPrivNegPri: case ARMMMUIdx_MSPrivNegPri:
case ARMMMUIdx_MSUserNegPri: case ARMMMUIdx_MSUserNegPri:
case ARMMMUIdx_MSPriv: case ARMMMUIdx_MSPriv:
@ -1046,6 +1056,7 @@ static inline bool arm_mmu_idx_is_stage1_of_2(ARMMMUIdx mmu_idx)
switch (mmu_idx) { switch (mmu_idx) {
case ARMMMUIdx_Stage1_E0: case ARMMMUIdx_Stage1_E0:
case ARMMMUIdx_Stage1_E1: case ARMMMUIdx_Stage1_E1:
case ARMMMUIdx_Stage1_E1_PAN:
return true; return true;
default: default:
return false; return false;

View file

@ -125,12 +125,15 @@ static int get_a64_user_mem_index(DisasContext *s)
*/ */
switch (useridx) { switch (useridx) {
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
useridx = ARMMMUIdx_E10_0; useridx = ARMMMUIdx_E10_0;
break; break;
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN:
useridx = ARMMMUIdx_E20_0; useridx = ARMMMUIdx_E20_0;
break; break;
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
useridx = ARMMMUIdx_SE10_0; useridx = ARMMMUIdx_SE10_0;
break; break;
default: default:

View file

@ -149,10 +149,12 @@ static inline int get_a32_user_mem_index(DisasContext *s)
case ARMMMUIdx_E2: /* this one is UNPREDICTABLE */ case ARMMMUIdx_E2: /* this one is UNPREDICTABLE */
case ARMMMUIdx_E10_0: case ARMMMUIdx_E10_0:
case ARMMMUIdx_E10_1: case ARMMMUIdx_E10_1:
case ARMMMUIdx_E10_1_PAN:
return arm_to_core_mmu_idx(ARMMMUIdx_E10_0); return arm_to_core_mmu_idx(ARMMMUIdx_E10_0);
case ARMMMUIdx_SE3: case ARMMMUIdx_SE3:
case ARMMMUIdx_SE10_0: case ARMMMUIdx_SE10_0:
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN:
return arm_to_core_mmu_idx(ARMMMUIdx_SE10_0); return arm_to_core_mmu_idx(ARMMMUIdx_SE10_0);
case ARMMMUIdx_MUser: case ARMMMUIdx_MUser:
case ARMMMUIdx_MPriv: case ARMMMUIdx_MPriv: