mirror of
https://github.com/yuzu-emu/unicorn.git
synced 2025-01-03 15:55:37 +00:00
target/arm: secure stage 2 translation regime
b1a10c868f9b2b09e64009b43450e9a86697d9f3
This commit is contained in:
parent
eeefc3c4a2
commit
b49531cfef
|
@ -2975,6 +2975,9 @@ typedef enum ARMMMUIdx {
|
||||||
ARMMMUIdx_Stage1_E0 = 0 | ARM_MMU_IDX_NOTLB,
|
ARMMMUIdx_Stage1_E0 = 0 | ARM_MMU_IDX_NOTLB,
|
||||||
ARMMMUIdx_Stage1_E1 = 1 | ARM_MMU_IDX_NOTLB,
|
ARMMMUIdx_Stage1_E1 = 1 | ARM_MMU_IDX_NOTLB,
|
||||||
ARMMMUIdx_Stage1_E1_PAN = 2 | ARM_MMU_IDX_NOTLB,
|
ARMMMUIdx_Stage1_E1_PAN = 2 | ARM_MMU_IDX_NOTLB,
|
||||||
|
ARMMMUIdx_Stage1_SE0 = 3 | ARM_MMU_IDX_NOTLB,
|
||||||
|
ARMMMUIdx_Stage1_SE1 = 4 | ARM_MMU_IDX_NOTLB,
|
||||||
|
ARMMMUIdx_Stage1_SE1_PAN = 5 | ARM_MMU_IDX_NOTLB,
|
||||||
/*
|
/*
|
||||||
* Not allocated a TLB: used only for second stage of an S12 page
|
* Not allocated a TLB: used only for second stage of an S12 page
|
||||||
* table walk, or for descriptor loads during first stage of an S1
|
* table walk, or for descriptor loads during first stage of an S1
|
||||||
|
@ -2982,7 +2985,8 @@ typedef enum ARMMMUIdx {
|
||||||
* then various TLB flush insns which currently are no-ops or flush
|
* then various TLB flush insns which currently are no-ops or flush
|
||||||
* only stage 1 MMU indexes will need to change to flush stage 2.
|
* only stage 1 MMU indexes will need to change to flush stage 2.
|
||||||
*/
|
*/
|
||||||
ARMMMUIdx_Stage2 = 3 | ARM_MMU_IDX_NOTLB,
|
ARMMMUIdx_Stage2 = 6 | ARM_MMU_IDX_NOTLB,
|
||||||
|
ARMMMUIdx_Stage2_S = 7 | ARM_MMU_IDX_NOTLB,
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* M-profile.
|
* M-profile.
|
||||||
|
|
|
@ -3122,7 +3122,7 @@ static uint64_t do_ats_write(CPUARMState *env, uint64_t value,
|
||||||
uint32_t syn, fsr, fsc;
|
uint32_t syn, fsr, fsc;
|
||||||
bool take_exc = false;
|
bool take_exc = false;
|
||||||
|
|
||||||
if (fi.s1ptw && current_el == 1 && !arm_is_secure(env)
|
if (fi.s1ptw && current_el == 1
|
||||||
&& arm_mmu_idx_is_stage1_of_2(mmu_idx)) {
|
&& arm_mmu_idx_is_stage1_of_2(mmu_idx)) {
|
||||||
/*
|
/*
|
||||||
* Synchronous stage 2 fault on an access made as part of the
|
* Synchronous stage 2 fault on an access made as part of the
|
||||||
|
@ -3279,10 +3279,10 @@ static void ats_write(CPUARMState *env, const ARMCPRegInfo *ri, uint64_t value)
|
||||||
/* fall through */
|
/* fall through */
|
||||||
case 1:
|
case 1:
|
||||||
if (ri->crm == 9 && (env->uncached_cpsr & CPSR_PAN)) {
|
if (ri->crm == 9 && (env->uncached_cpsr & CPSR_PAN)) {
|
||||||
mmu_idx = (secure ? ARMMMUIdx_SE10_1_PAN
|
mmu_idx = (secure ? ARMMMUIdx_Stage1_SE1_PAN
|
||||||
: ARMMMUIdx_Stage1_E1_PAN);
|
: ARMMMUIdx_Stage1_E1_PAN);
|
||||||
} else {
|
} else {
|
||||||
mmu_idx = secure ? ARMMMUIdx_SE10_1 : ARMMMUIdx_Stage1_E1;
|
mmu_idx = secure ? ARMMMUIdx_Stage1_SE1 : ARMMMUIdx_Stage1_E1;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
@ -3296,10 +3296,11 @@ static void ats_write(CPUARMState *env, const ARMCPRegInfo *ri, uint64_t value)
|
||||||
mmu_idx = ARMMMUIdx_SE10_0;
|
mmu_idx = ARMMMUIdx_SE10_0;
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
|
g_assert(!secure); /* ARMv8.4-SecEL2 is 64-bit only */
|
||||||
mmu_idx = ARMMMUIdx_Stage1_E0;
|
mmu_idx = ARMMMUIdx_Stage1_E0;
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
mmu_idx = secure ? ARMMMUIdx_SE10_0 : ARMMMUIdx_Stage1_E0;
|
mmu_idx = secure ? ARMMMUIdx_Stage1_SE0 : ARMMMUIdx_Stage1_E0;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
g_assert_not_reached();
|
g_assert_not_reached();
|
||||||
|
@ -3364,10 +3365,10 @@ static void ats_write64(CPUARMState *env, const ARMCPRegInfo *ri,
|
||||||
switch (ri->opc1) {
|
switch (ri->opc1) {
|
||||||
case 0: /* AT S1E1R, AT S1E1W, AT S1E1RP, AT S1E1WP */
|
case 0: /* AT S1E1R, AT S1E1W, AT S1E1RP, AT S1E1WP */
|
||||||
if (ri->crm == 9 && (env->pstate & PSTATE_PAN)) {
|
if (ri->crm == 9 && (env->pstate & PSTATE_PAN)) {
|
||||||
mmu_idx = (secure ? ARMMMUIdx_SE10_1_PAN
|
mmu_idx = (secure ? ARMMMUIdx_Stage1_SE1_PAN
|
||||||
: ARMMMUIdx_Stage1_E1_PAN);
|
: ARMMMUIdx_Stage1_E1_PAN);
|
||||||
} else {
|
} else {
|
||||||
mmu_idx = secure ? ARMMMUIdx_SE10_1 : ARMMMUIdx_Stage1_E1;
|
mmu_idx = secure ? ARMMMUIdx_Stage1_SE1 : ARMMMUIdx_Stage1_E1;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 4: /* AT S1E2R, AT S1E2W */
|
case 4: /* AT S1E2R, AT S1E2W */
|
||||||
|
@ -3381,7 +3382,7 @@ static void ats_write64(CPUARMState *env, const ARMCPRegInfo *ri,
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 2: /* AT S1E0R, AT S1E0W */
|
case 2: /* AT S1E0R, AT S1E0W */
|
||||||
mmu_idx = secure ? ARMMMUIdx_SE10_0 : ARMMMUIdx_Stage1_E0;
|
mmu_idx = secure ? ARMMMUIdx_Stage1_SE0 : ARMMMUIdx_Stage1_E0;
|
||||||
break;
|
break;
|
||||||
case 4: /* AT S12E1R, AT S12E1W */
|
case 4: /* AT S12E1R, AT S12E1W */
|
||||||
mmu_idx = secure ? ARMMMUIdx_SE10_1 : ARMMMUIdx_E10_1;
|
mmu_idx = secure ? ARMMMUIdx_SE10_1 : ARMMMUIdx_E10_1;
|
||||||
|
@ -9765,7 +9766,7 @@ static inline bool regime_translation_disabled(CPUARMState *env,
|
||||||
|
|
||||||
hcr_el2 = arm_hcr_el2_eff(env);
|
hcr_el2 = arm_hcr_el2_eff(env);
|
||||||
|
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
/* HCR.DC means HCR.VM behaves as 1 */
|
/* HCR.DC means HCR.VM behaves as 1 */
|
||||||
return (hcr_el2 & (HCR_DC | HCR_VM)) == 0;
|
return (hcr_el2 & (HCR_DC | HCR_VM)) == 0;
|
||||||
}
|
}
|
||||||
|
@ -9798,6 +9799,9 @@ static inline uint64_t regime_ttbr(CPUARMState *env, ARMMMUIdx mmu_idx,
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2) {
|
||||||
return env->cp15.vttbr_el2;
|
return env->cp15.vttbr_el2;
|
||||||
}
|
}
|
||||||
|
if (mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
|
return env->cp15.vsttbr_el2;
|
||||||
|
}
|
||||||
if (ttbrn == 0) {
|
if (ttbrn == 0) {
|
||||||
return env->cp15.ttbr0_el[regime_el(env, mmu_idx)];
|
return env->cp15.ttbr0_el[regime_el(env, mmu_idx)];
|
||||||
} else {
|
} else {
|
||||||
|
@ -9814,6 +9818,12 @@ static inline uint64_t regime_ttbr(CPUARMState *env, ARMMMUIdx mmu_idx,
|
||||||
static inline ARMMMUIdx stage_1_mmu_idx(ARMMMUIdx mmu_idx)
|
static inline ARMMMUIdx stage_1_mmu_idx(ARMMMUIdx mmu_idx)
|
||||||
{
|
{
|
||||||
switch (mmu_idx) {
|
switch (mmu_idx) {
|
||||||
|
case ARMMMUIdx_SE10_0:
|
||||||
|
return ARMMMUIdx_Stage1_SE0;
|
||||||
|
case ARMMMUIdx_SE10_1:
|
||||||
|
return ARMMMUIdx_Stage1_SE1;
|
||||||
|
case ARMMMUIdx_SE10_1_PAN:
|
||||||
|
return ARMMMUIdx_Stage1_SE1_PAN;
|
||||||
case ARMMMUIdx_E10_0:
|
case ARMMMUIdx_E10_0:
|
||||||
return ARMMMUIdx_Stage1_E0;
|
return ARMMMUIdx_Stage1_E0;
|
||||||
case ARMMMUIdx_E10_1:
|
case ARMMMUIdx_E10_1:
|
||||||
|
@ -9860,6 +9870,7 @@ static inline bool regime_is_user(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
case ARMMMUIdx_E20_0:
|
case ARMMMUIdx_E20_0:
|
||||||
case ARMMMUIdx_SE20_0:
|
case ARMMMUIdx_SE20_0:
|
||||||
case ARMMMUIdx_Stage1_E0:
|
case ARMMMUIdx_Stage1_E0:
|
||||||
|
case ARMMMUIdx_Stage1_SE0:
|
||||||
case ARMMMUIdx_MUser:
|
case ARMMMUIdx_MUser:
|
||||||
case ARMMMUIdx_MSUser:
|
case ARMMMUIdx_MSUser:
|
||||||
case ARMMMUIdx_MUserNegPri:
|
case ARMMMUIdx_MUserNegPri:
|
||||||
|
@ -10030,6 +10041,7 @@ static int get_S1prot(CPUARMState *env, ARMMMUIdx mmu_idx, bool is_aa64,
|
||||||
int wxn = 0;
|
int wxn = 0;
|
||||||
|
|
||||||
assert(mmu_idx != ARMMMUIdx_Stage2);
|
assert(mmu_idx != ARMMMUIdx_Stage2);
|
||||||
|
assert(mmu_idx != ARMMMUIdx_Stage2_S);
|
||||||
|
|
||||||
user_rw = simple_ap_to_rw_prot_is_user(ap, true);
|
user_rw = simple_ap_to_rw_prot_is_user(ap, true);
|
||||||
if (is_user) {
|
if (is_user) {
|
||||||
|
@ -10126,13 +10138,12 @@ static hwaddr S1_ptw_translate(CPUARMState *env, ARMMMUIdx mmu_idx,
|
||||||
hwaddr s2pa;
|
hwaddr s2pa;
|
||||||
int s2prot;
|
int s2prot;
|
||||||
int ret;
|
int ret;
|
||||||
|
ARMMMUIdx s2_mmu_idx = *is_secure ? ARMMMUIdx_Stage2_S
|
||||||
|
: ARMMMUIdx_Stage2;
|
||||||
ARMCacheAttrs cacheattrs = {};
|
ARMCacheAttrs cacheattrs = {};
|
||||||
MemTxAttrs txattrs = {};
|
MemTxAttrs txattrs = {};
|
||||||
|
|
||||||
assert(!*is_secure); /* TODO: S-EL2 */
|
ret = get_phys_addr_lpae(env, addr, MMU_DATA_LOAD, s2_mmu_idx, false,
|
||||||
|
|
||||||
ret = get_phys_addr_lpae(env, addr, MMU_DATA_LOAD, ARMMMUIdx_Stage2,
|
|
||||||
false,
|
|
||||||
&s2pa, &txattrs, &s2prot, &s2size, fi,
|
&s2pa, &txattrs, &s2prot, &s2size, fi,
|
||||||
&cacheattrs);
|
&cacheattrs);
|
||||||
if (ret) {
|
if (ret) {
|
||||||
|
@ -10612,7 +10623,7 @@ static int aa64_va_parameter_tbi(uint64_t tcr, ARMMMUIdx mmu_idx)
|
||||||
{
|
{
|
||||||
if (regime_has_2_ranges(mmu_idx)) {
|
if (regime_has_2_ranges(mmu_idx)) {
|
||||||
return extract64(tcr, 37, 2);
|
return extract64(tcr, 37, 2);
|
||||||
} else if (mmu_idx == ARMMMUIdx_Stage2) {
|
} else if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
return 0; /* VTCR_EL2 */
|
return 0; /* VTCR_EL2 */
|
||||||
} else {
|
} else {
|
||||||
/* Replicate the single TBI bit so we always have 2 bits. */
|
/* Replicate the single TBI bit so we always have 2 bits. */
|
||||||
|
@ -10624,7 +10635,7 @@ static int aa64_va_parameter_tbid(uint64_t tcr, ARMMMUIdx mmu_idx)
|
||||||
{
|
{
|
||||||
if (regime_has_2_ranges(mmu_idx)) {
|
if (regime_has_2_ranges(mmu_idx)) {
|
||||||
return extract64(tcr, 51, 2);
|
return extract64(tcr, 51, 2);
|
||||||
} else if (mmu_idx == ARMMMUIdx_Stage2) {
|
} else if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
return 0; /* VTCR_EL2 */
|
return 0; /* VTCR_EL2 */
|
||||||
} else {
|
} else {
|
||||||
/* Replicate the single TBID bit so we always have 2 bits. */
|
/* Replicate the single TBID bit so we always have 2 bits. */
|
||||||
|
@ -10654,7 +10665,7 @@ ARMVAParameters aa64_va_parameters(CPUARMState *env, uint64_t va,
|
||||||
tsz = extract32(tcr, 0, 6);
|
tsz = extract32(tcr, 0, 6);
|
||||||
using64k = extract32(tcr, 14, 1);
|
using64k = extract32(tcr, 14, 1);
|
||||||
using16k = extract32(tcr, 15, 1);
|
using16k = extract32(tcr, 15, 1);
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
/* VTCR_EL2 */
|
/* VTCR_EL2 */
|
||||||
hpd = false;
|
hpd = false;
|
||||||
} else {
|
} else {
|
||||||
|
@ -10719,6 +10730,8 @@ static ARMVAParameters aa32_va_parameters(CPUARMState *env, uint32_t va,
|
||||||
int select, tsz;
|
int select, tsz;
|
||||||
bool epd, hpd;
|
bool epd, hpd;
|
||||||
|
|
||||||
|
assert(mmu_idx != ARMMMUIdx_Stage2_S);
|
||||||
|
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2) {
|
||||||
/* VTCR */
|
/* VTCR */
|
||||||
bool sext = extract32(tcr, 4, 1);
|
bool sext = extract32(tcr, 4, 1);
|
||||||
|
@ -10887,7 +10900,7 @@ static bool get_phys_addr_lpae(CPUARMState *env, uint64_t address,
|
||||||
goto do_fault;
|
goto do_fault;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mmu_idx != ARMMMUIdx_Stage2) {
|
if (mmu_idx != ARMMMUIdx_Stage2 && mmu_idx != ARMMMUIdx_Stage2_S) {
|
||||||
/*
|
/*
|
||||||
* The starting level depends on the virtual address size (which can
|
* The starting level depends on the virtual address size (which can
|
||||||
* be up to 48 bits) and the translation granule size. It indicates
|
* be up to 48 bits) and the translation granule size. It indicates
|
||||||
|
@ -11001,7 +11014,7 @@ static bool get_phys_addr_lpae(CPUARMState *env, uint64_t address,
|
||||||
attrs = extract64(descriptor, 2, 10)
|
attrs = extract64(descriptor, 2, 10)
|
||||||
| (extract64(descriptor, 52, 12) << 10);
|
| (extract64(descriptor, 52, 12) << 10);
|
||||||
|
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
/* Stage 2 table descriptors do not include any attribute fields */
|
/* Stage 2 table descriptors do not include any attribute fields */
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -11033,8 +11046,8 @@ static bool get_phys_addr_lpae(CPUARMState *env, uint64_t address,
|
||||||
|
|
||||||
ap = extract32(attrs, 4, 2);
|
ap = extract32(attrs, 4, 2);
|
||||||
|
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
ns = true;
|
ns = mmu_idx == ARMMMUIdx_Stage2;
|
||||||
xn = extract32(attrs, 11, 2);
|
xn = extract32(attrs, 11, 2);
|
||||||
*prot = get_S2prot(env, ap, xn, s1_is_el0);
|
*prot = get_S2prot(env, ap, xn, s1_is_el0);
|
||||||
} else {
|
} else {
|
||||||
|
@ -11062,7 +11075,7 @@ static bool get_phys_addr_lpae(CPUARMState *env, uint64_t address,
|
||||||
arm_tlb_bti_gp(txattrs) = true;
|
arm_tlb_bti_gp(txattrs) = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2 || mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
cacheattrs->attrs = convert_stage2_attrs(env, extract32(attrs, 0, 4));
|
cacheattrs->attrs = convert_stage2_attrs(env, extract32(attrs, 0, 4));
|
||||||
} else {
|
} else {
|
||||||
/* Index into MAIR registers for cache attributes */
|
/* Index into MAIR registers for cache attributes */
|
||||||
|
@ -11081,7 +11094,8 @@ do_fault:
|
||||||
fi->type = fault_type;
|
fi->type = fault_type;
|
||||||
fi->level = level;
|
fi->level = level;
|
||||||
/* Tag the error as S2 for failed S1 PTW at S2 or ordinary S2. */
|
/* Tag the error as S2 for failed S1 PTW at S2 or ordinary S2. */
|
||||||
fi->stage2 = fi->s1ptw || (mmu_idx == ARMMMUIdx_Stage2);
|
fi->stage2 = fi->s1ptw || (mmu_idx == ARMMMUIdx_Stage2 ||
|
||||||
|
mmu_idx == ARMMMUIdx_Stage2_S);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11892,6 +11906,8 @@ bool get_phys_addr(CPUARMState *env, target_ulong address,
|
||||||
int s2_prot;
|
int s2_prot;
|
||||||
int ret;
|
int ret;
|
||||||
ARMCacheAttrs cacheattrs2 = {0};
|
ARMCacheAttrs cacheattrs2 = {0};
|
||||||
|
ARMMMUIdx s2_mmu_idx;
|
||||||
|
bool is_el0;
|
||||||
|
|
||||||
ret = get_phys_addr(env, address, access_type, s1_mmu_idx, &ipa,
|
ret = get_phys_addr(env, address, access_type, s1_mmu_idx, &ipa,
|
||||||
attrs, prot, page_size, fi, cacheattrs);
|
attrs, prot, page_size, fi, cacheattrs);
|
||||||
|
@ -11902,9 +11918,11 @@ bool get_phys_addr(CPUARMState *env, target_ulong address,
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s2_mmu_idx = attrs->secure ? ARMMMUIdx_Stage2_S : ARMMMUIdx_Stage2;
|
||||||
|
is_el0 = mmu_idx == ARMMMUIdx_E10_0 || mmu_idx == ARMMMUIdx_SE10_0;
|
||||||
|
|
||||||
/* S1 is done. Now do S2 translation. */
|
/* S1 is done. Now do S2 translation. */
|
||||||
ret = get_phys_addr_lpae(env, ipa, access_type, ARMMMUIdx_Stage2,
|
ret = get_phys_addr_lpae(env, ipa, access_type, s2_mmu_idx, is_el0,
|
||||||
mmu_idx == ARMMMUIdx_E10_0,
|
|
||||||
phys_ptr, attrs, &s2_prot,
|
phys_ptr, attrs, &s2_prot,
|
||||||
page_size, fi, &cacheattrs2);
|
page_size, fi, &cacheattrs2);
|
||||||
fi->s2addr = ipa;
|
fi->s2addr = ipa;
|
||||||
|
@ -11928,6 +11946,18 @@ bool get_phys_addr(CPUARMState *env, target_ulong address,
|
||||||
cacheattrs->shareability = 0;
|
cacheattrs->shareability = 0;
|
||||||
}
|
}
|
||||||
*cacheattrs = combine_cacheattrs(*cacheattrs, cacheattrs2);
|
*cacheattrs = combine_cacheattrs(*cacheattrs, cacheattrs2);
|
||||||
|
|
||||||
|
/* Check if IPA translates to secure or non-secure PA space. */
|
||||||
|
if (arm_is_secure_below_el3(env)) {
|
||||||
|
if (attrs->secure) {
|
||||||
|
attrs->secure =
|
||||||
|
!(env->cp15.vstcr_el2.raw_tcr & (VSTCR_SA | VSTCR_SW));
|
||||||
|
} else {
|
||||||
|
attrs->secure =
|
||||||
|
!((env->cp15.vtcr_el2.raw_tcr & (VTCR_NSA | VTCR_NSW))
|
||||||
|
|| (env->cp15.vstcr_el2.raw_tcr & VSTCR_SA));
|
||||||
|
}
|
||||||
|
}
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -853,6 +853,9 @@ static inline bool regime_has_2_ranges(ARMMMUIdx mmu_idx)
|
||||||
case ARMMMUIdx_Stage1_E0:
|
case ARMMMUIdx_Stage1_E0:
|
||||||
case ARMMMUIdx_Stage1_E1:
|
case ARMMMUIdx_Stage1_E1:
|
||||||
case ARMMMUIdx_Stage1_E1_PAN:
|
case ARMMMUIdx_Stage1_E1_PAN:
|
||||||
|
case ARMMMUIdx_Stage1_SE0:
|
||||||
|
case ARMMMUIdx_Stage1_SE1:
|
||||||
|
case ARMMMUIdx_Stage1_SE1_PAN:
|
||||||
case ARMMMUIdx_E10_0:
|
case ARMMMUIdx_E10_0:
|
||||||
case ARMMMUIdx_E10_1:
|
case ARMMMUIdx_E10_1:
|
||||||
case ARMMMUIdx_E10_1_PAN:
|
case ARMMMUIdx_E10_1_PAN:
|
||||||
|
@ -898,7 +901,11 @@ static inline bool regime_is_secure(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
case ARMMMUIdx_SE20_0:
|
case ARMMMUIdx_SE20_0:
|
||||||
case ARMMMUIdx_SE20_2:
|
case ARMMMUIdx_SE20_2:
|
||||||
case ARMMMUIdx_SE20_2_PAN:
|
case ARMMMUIdx_SE20_2_PAN:
|
||||||
|
case ARMMMUIdx_Stage1_SE0:
|
||||||
|
case ARMMMUIdx_Stage1_SE1:
|
||||||
|
case ARMMMUIdx_Stage1_SE1_PAN:
|
||||||
case ARMMMUIdx_SE2:
|
case ARMMMUIdx_SE2:
|
||||||
|
case ARMMMUIdx_Stage2_S:
|
||||||
case ARMMMUIdx_MSPrivNegPri:
|
case ARMMMUIdx_MSPrivNegPri:
|
||||||
case ARMMMUIdx_MSUserNegPri:
|
case ARMMMUIdx_MSUserNegPri:
|
||||||
case ARMMMUIdx_MSPriv:
|
case ARMMMUIdx_MSPriv:
|
||||||
|
@ -913,6 +920,7 @@ static inline bool regime_is_pan(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
{
|
{
|
||||||
switch (mmu_idx) {
|
switch (mmu_idx) {
|
||||||
case ARMMMUIdx_Stage1_E1_PAN:
|
case ARMMMUIdx_Stage1_E1_PAN:
|
||||||
|
case ARMMMUIdx_Stage1_SE1_PAN:
|
||||||
case ARMMMUIdx_E10_1_PAN:
|
case ARMMMUIdx_E10_1_PAN:
|
||||||
case ARMMMUIdx_E20_2_PAN:
|
case ARMMMUIdx_E20_2_PAN:
|
||||||
case ARMMMUIdx_SE10_1_PAN:
|
case ARMMMUIdx_SE10_1_PAN:
|
||||||
|
@ -934,18 +942,22 @@ static inline uint32_t regime_el(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
case ARMMMUIdx_E20_2:
|
case ARMMMUIdx_E20_2:
|
||||||
case ARMMMUIdx_E20_2_PAN:
|
case ARMMMUIdx_E20_2_PAN:
|
||||||
case ARMMMUIdx_Stage2:
|
case ARMMMUIdx_Stage2:
|
||||||
|
case ARMMMUIdx_Stage2_S:
|
||||||
case ARMMMUIdx_SE2:
|
case ARMMMUIdx_SE2:
|
||||||
case ARMMMUIdx_E2:
|
case ARMMMUIdx_E2:
|
||||||
return 2;
|
return 2;
|
||||||
case ARMMMUIdx_SE3:
|
case ARMMMUIdx_SE3:
|
||||||
return 3;
|
return 3;
|
||||||
case ARMMMUIdx_SE10_0:
|
case ARMMMUIdx_SE10_0:
|
||||||
|
case ARMMMUIdx_Stage1_SE0:
|
||||||
return arm_el_is_aa64(env, 3) ? 1 : 3;
|
return arm_el_is_aa64(env, 3) ? 1 : 3;
|
||||||
case ARMMMUIdx_SE10_1:
|
case ARMMMUIdx_SE10_1:
|
||||||
case ARMMMUIdx_SE10_1_PAN:
|
case ARMMMUIdx_SE10_1_PAN:
|
||||||
case ARMMMUIdx_Stage1_E0:
|
case ARMMMUIdx_Stage1_E0:
|
||||||
case ARMMMUIdx_Stage1_E1:
|
case ARMMMUIdx_Stage1_E1:
|
||||||
case ARMMMUIdx_Stage1_E1_PAN:
|
case ARMMMUIdx_Stage1_E1_PAN:
|
||||||
|
case ARMMMUIdx_Stage1_SE1:
|
||||||
|
case ARMMMUIdx_Stage1_SE1_PAN:
|
||||||
case ARMMMUIdx_E10_0:
|
case ARMMMUIdx_E10_0:
|
||||||
case ARMMMUIdx_E10_1:
|
case ARMMMUIdx_E10_1:
|
||||||
case ARMMMUIdx_E10_1_PAN:
|
case ARMMMUIdx_E10_1_PAN:
|
||||||
|
@ -969,6 +981,13 @@ static inline TCR *regime_tcr(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
if (mmu_idx == ARMMMUIdx_Stage2) {
|
if (mmu_idx == ARMMMUIdx_Stage2) {
|
||||||
return &env->cp15.vtcr_el2;
|
return &env->cp15.vtcr_el2;
|
||||||
}
|
}
|
||||||
|
if (mmu_idx == ARMMMUIdx_Stage2_S) {
|
||||||
|
/*
|
||||||
|
* Note: Secure stage 2 nominally shares fields from VTCR_EL2, but
|
||||||
|
* those are not currently used by QEMU, so just return VSTCR_EL2.
|
||||||
|
*/
|
||||||
|
return &env->cp15.vstcr_el2;
|
||||||
|
}
|
||||||
return &env->cp15.tcr_el[regime_el(env, mmu_idx)];
|
return &env->cp15.tcr_el[regime_el(env, mmu_idx)];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1171,6 +1190,9 @@ static inline bool arm_mmu_idx_is_stage1_of_2(ARMMMUIdx mmu_idx)
|
||||||
case ARMMMUIdx_Stage1_E0:
|
case ARMMMUIdx_Stage1_E0:
|
||||||
case ARMMMUIdx_Stage1_E1:
|
case ARMMMUIdx_Stage1_E1:
|
||||||
case ARMMMUIdx_Stage1_E1_PAN:
|
case ARMMMUIdx_Stage1_E1_PAN:
|
||||||
|
case ARMMMUIdx_Stage1_SE0:
|
||||||
|
case ARMMMUIdx_Stage1_SE1:
|
||||||
|
case ARMMMUIdx_Stage1_SE1_PAN:
|
||||||
return true;
|
return true;
|
||||||
default:
|
default:
|
||||||
return false;
|
return false;
|
||||||
|
|
Loading…
Reference in a new issue