arch-arm: Distinguish IS TLBI from non-IS

TLBI broadcasting was the default implementation of most of TLBI
instructions. This patch applies the broadcasting behaviour only to the
Inner-Shareable subset, while simpler TLB invalidation instructions only
affect the PE that executes them.

Change-Id: Idb01d0d4f593131f657e8fc9668112de8e4ccdcb
Signed-off-by: Giacomo Travaglini <giacomo.travaglini@arm.com>
Reviewed-by: Nikos Nikoleris <nikos.nikoleris@arm.com>
Reviewed-on: https://gem5-review.googlesource.com/9182
Maintainer: Andreas Sandberg <andreas.sandberg@arm.com>
This commit is contained in:
Giacomo Travaglini
2018-02-22 14:14:48 +00:00
parent 33bb1aa386
commit 9c8af42920
6 changed files with 911 additions and 374 deletions

View File

@@ -82,6 +82,7 @@ if env['TARGET_ISA'] == 'arm':
Source('stage2_mmu.cc')
Source('stage2_lookup.cc')
Source('tlb.cc')
Source('tlbi_op.cc')
Source('utility.cc')
Source('vtophys.cc')

View File

@@ -42,6 +42,7 @@
#include "arch/arm/pmu.hh"
#include "arch/arm/system.hh"
#include "arch/arm/tlb.hh"
#include "arch/arm/tlbi_op.hh"
#include "cpu/base.hh"
#include "cpu/checker/cpu.hh"
#include "debug/Arm.hh"
@@ -683,37 +684,12 @@ ISA::setMiscRegNoEffect(int misc_reg, const MiscReg &val)
}
}
namespace {
template<typename T>
TLB *
getITBPtr(T *tc)
{
auto tlb = dynamic_cast<TLB *>(tc->getITBPtr());
assert(tlb);
return tlb;
}
template<typename T>
TLB *
getDTBPtr(T *tc)
{
auto tlb = dynamic_cast<TLB *>(tc->getDTBPtr());
assert(tlb);
return tlb;
}
} // anonymous namespace
void
ISA::setMiscReg(int misc_reg, const MiscReg &val, ThreadContext *tc)
{
MiscReg newVal = val;
bool secure_lookup;
bool hyp;
uint8_t target_el;
uint16_t asid;
SCR scr;
if (misc_reg == MISCREG_CPSR) {
@@ -1019,260 +995,501 @@ ISA::setMiscReg(int misc_reg, const MiscReg &val, ThreadContext *tc)
// ID registers are constants.
return;
// TLBI all entries, EL0&1 inner sharable (ignored)
case MISCREG_TLBIALLIS:
// TLB Invalidate All
case MISCREG_TLBIALL: // TLBI all entries, EL0&1,
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiALL(tc, secure_lookup, target_el);
return;
// TLBI all entries, EL0&1, instruction side
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
tlbiOp(tc);
return;
}
// TLB Invalidate All, Inner Shareable
case MISCREG_TLBIALLIS:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
tlbiOp.broadcast(tc);
return;
}
// Instruction TLB Invalidate All
case MISCREG_ITLBIALL:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
getITBPtr(tc)->flushAllSecurity(secure_lookup, target_el);
return;
// TLBI all entries, EL0&1, data side
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
ITLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
tlbiOp(tc);
return;
}
// Data TLB Invalidate All
case MISCREG_DTLBIALL:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
getDTBPtr(tc)->flushAllSecurity(secure_lookup, target_el);
return;
// TLBI based on VA, EL0&1 inner sharable (ignored)
case MISCREG_TLBIMVAL:
case MISCREG_TLBIMVALIS:
// mcr tlbimval(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
DTLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
tlbiOp(tc);
return;
}
// TLB Invalidate by VA
// mcr tlbimval(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
case MISCREG_TLBIMVA:
case MISCREG_TLBIMVAL:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVA tlbiOp(EL1,
haveSecurity && !scr.ns,
mbits(newVal, 31, 12),
bits(newVal, 7,0));
tlbiOp(tc);
return;
}
// TLB Invalidate by VA, Inner Shareable
case MISCREG_TLBIMVAIS:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiVA(tc, mbits(newVal, 31, 12), bits(newVal, 7,0),
secure_lookup, target_el);
return;
// TLBI by ASID, EL0&1, inner sharable
case MISCREG_TLBIASIDIS:
case MISCREG_TLBIMVALIS:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVA tlbiOp(EL1,
haveSecurity && !scr.ns,
mbits(newVal, 31, 12),
bits(newVal, 7,0));
tlbiOp.broadcast(tc);
return;
}
// TLB Invalidate by ASID match
case MISCREG_TLBIASID:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
asid = bits(newVal, 7,0);
tlbiASID(tc, asid, secure_lookup, target_el);
return;
// TLBI by address, EL0&1, inner sharable (ignored)
case MISCREG_TLBIMVAAL:
case MISCREG_TLBIMVAALIS:
// mcr tlbimvaal(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIASID tlbiOp(EL1,
haveSecurity && !scr.ns,
bits(newVal, 7,0));
tlbiOp(tc);
return;
}
// TLB Invalidate by ASID match, Inner Shareable
case MISCREG_TLBIASIDIS:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIASID tlbiOp(EL1,
haveSecurity && !scr.ns,
bits(newVal, 7,0));
tlbiOp.broadcast(tc);
return;
}
// mcr tlbimvaal(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
// TLB Invalidate by VA, All ASID
case MISCREG_TLBIMVAA:
case MISCREG_TLBIMVAAL:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
mbits(newVal, 31,12), false);
tlbiOp(tc);
return;
}
// TLB Invalidate by VA, All ASID, Inner Shareable
case MISCREG_TLBIMVAAIS:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
hyp = 0;
tlbiMVA(tc, mbits(newVal, 31,12), secure_lookup, hyp, target_el);
return;
// TLBI by address, EL2, hypervisor mode
case MISCREG_TLBIMVALH:
case MISCREG_TLBIMVALHIS:
// mcr tlbimvalh(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
case MISCREG_TLBIMVAALIS:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
mbits(newVal, 31,12), false);
tlbiOp.broadcast(tc);
return;
}
// mcr tlbimvalh(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
// TLB Invalidate by VA, Hyp mode
case MISCREG_TLBIMVAH:
case MISCREG_TLBIMVALH:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
mbits(newVal, 31,12), true);
tlbiOp(tc);
return;
}
// TLB Invalidate by VA, Hyp mode, Inner Shareable
case MISCREG_TLBIMVAHIS:
assert32(tc);
target_el = 1; // aarch32, use hyp bit
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
hyp = 1;
tlbiMVA(tc, mbits(newVal, 31,12), secure_lookup, hyp, target_el);
return;
case MISCREG_TLBIIPAS2L:
case MISCREG_TLBIIPAS2LIS:
// mcr tlbiipas2l(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
case MISCREG_TLBIMVALHIS:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
mbits(newVal, 31,12), true);
tlbiOp.broadcast(tc);
return;
}
// mcr tlbiipas2l(is) is invalidating all matching entries
// regardless of the level of lookup, since in gem5 we cache
// in the tlb the last level of lookup only.
// TLB Invalidate by Intermediate Physical Address, Stage 2
case MISCREG_TLBIIPAS2:
case MISCREG_TLBIIPAS2L:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIIPA tlbiOp(EL1,
haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 35, 0)) << 12);
tlbiOp(tc);
return;
}
// TLB Invalidate by Intermediate Physical Address, Stage 2,
// Inner Shareable
case MISCREG_TLBIIPAS2IS:
assert32(tc);
target_el = 1; // EL 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiIPA(tc, newVal, secure_lookup, target_el);
return;
// TLBI by address and asid, EL0&1, instruction side only
case MISCREG_TLBIIPAS2LIS:
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIIPA tlbiOp(EL1,
haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 35, 0)) << 12);
tlbiOp.broadcast(tc);
return;
}
// Instruction TLB Invalidate by VA
case MISCREG_ITLBIMVA:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
getITBPtr(tc)->flushMvaAsid(mbits(newVal, 31, 12),
bits(newVal, 7,0), secure_lookup, target_el);
return;
// TLBI by address and asid, EL0&1, data side only
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
ITLBIMVA tlbiOp(EL1,
haveSecurity && !scr.ns,
mbits(newVal, 31, 12),
bits(newVal, 7,0));
tlbiOp(tc);
return;
}
// Data TLB Invalidate by VA
case MISCREG_DTLBIMVA:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
getDTBPtr(tc)->flushMvaAsid(mbits(newVal, 31, 12),
bits(newVal, 7,0), secure_lookup, target_el);
return;
// TLBI by ASID, EL0&1, instrution side only
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
DTLBIMVA tlbiOp(EL1,
haveSecurity && !scr.ns,
mbits(newVal, 31, 12),
bits(newVal, 7,0));
tlbiOp(tc);
return;
}
// Instruction TLB Invalidate by ASID match
case MISCREG_ITLBIASID:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
getITBPtr(tc)->flushAsid(bits(newVal, 7,0), secure_lookup,
target_el);
return;
// TLBI by ASID EL0&1 data size only
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
ITLBIASID tlbiOp(EL1,
haveSecurity && !scr.ns,
bits(newVal, 7,0));
tlbiOp(tc);
return;
}
// Data TLB Invalidate by ASID match
case MISCREG_DTLBIASID:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
getDTBPtr(tc)->flushAsid(bits(newVal, 7,0), secure_lookup,
target_el);
return;
// Invalidate entire Non-secure Hyp/Non-Hyp Unified TLB
{
assert32(tc);
scr = readMiscReg(MISCREG_SCR, tc);
DTLBIASID tlbiOp(EL1,
haveSecurity && !scr.ns,
bits(newVal, 7,0));
tlbiOp(tc);
return;
}
// TLB Invalidate All, Non-Secure Non-Hyp
case MISCREG_TLBIALLNSNH:
{
assert32(tc);
TLBIALLN tlbiOp(EL1, false);
tlbiOp(tc);
return;
}
// TLB Invalidate All, Non-Secure Non-Hyp, Inner Shareable
case MISCREG_TLBIALLNSNHIS:
assert32(tc);
target_el = 1; // el 0 and 1 are handled together
hyp = 0;
tlbiALLN(tc, hyp, target_el);
return;
// TLBI all entries, EL2, hyp,
{
assert32(tc);
TLBIALLN tlbiOp(EL1, false);
tlbiOp.broadcast(tc);
return;
}
// TLB Invalidate All, Hyp mode
case MISCREG_TLBIALLH:
{
assert32(tc);
TLBIALLN tlbiOp(EL1, true);
tlbiOp(tc);
return;
}
// TLB Invalidate All, Hyp mode, Inner Shareable
case MISCREG_TLBIALLHIS:
assert32(tc);
target_el = 1; // aarch32, use hyp bit
hyp = 1;
tlbiALLN(tc, hyp, target_el);
return;
// AArch64 TLBI: invalidate all entries EL3
case MISCREG_TLBI_ALLE3IS:
{
assert32(tc);
TLBIALLN tlbiOp(EL1, true);
tlbiOp.broadcast(tc);
return;
}
// AArch64 TLB Invalidate All, EL3
case MISCREG_TLBI_ALLE3:
assert64(tc);
target_el = 3;
secure_lookup = true;
tlbiALL(tc, secure_lookup, target_el);
return;
{
assert64(tc);
TLBIALL tlbiOp(EL3, true);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate All, EL3, Inner Shareable
case MISCREG_TLBI_ALLE3IS:
{
assert64(tc);
TLBIALL tlbiOp(EL3, true);
tlbiOp.broadcast(tc);
return;
}
// @todo: uncomment this to enable Virtualization
// case MISCREG_TLBI_ALLE2IS:
// case MISCREG_TLBI_ALLE2:
// TLBI all entries, EL0&1
case MISCREG_TLBI_ALLE1IS:
// AArch64 TLB Invalidate All, EL1
case MISCREG_TLBI_ALLE1:
// AArch64 TLBI: invalidate all entries, stage 1, current VMID
case MISCREG_TLBI_VMALLE1IS:
case MISCREG_TLBI_VMALLE1:
// AArch64 TLBI: invalidate all entries, stages 1 & 2, current VMID
case MISCREG_TLBI_VMALLS12E1IS:
case MISCREG_TLBI_VMALLS12E1:
// @todo: handle VMID and stage 2 to enable Virtualization
assert64(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiALL(tc, secure_lookup, target_el);
return;
// AArch64 TLBI: invalidate by VA and ASID, stage 1, current VMID
// VAEx(IS) and VALEx(IS) are the same because TLBs only store entries
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate All, EL1, Inner Shareable
case MISCREG_TLBI_ALLE1IS:
case MISCREG_TLBI_VMALLE1IS:
case MISCREG_TLBI_VMALLS12E1IS:
// @todo: handle VMID and stage 2 to enable Virtualization
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
tlbiOp.broadcast(tc);
return;
}
// VAEx(IS) and VALEx(IS) are the same because TLBs
// only store entries
// from the last level of translation table walks
// @todo: handle VMID to enable Virtualization
// TLBI all entries, EL0&1
case MISCREG_TLBI_VAE3IS_Xt:
// AArch64 TLB Invalidate by VA, EL3
case MISCREG_TLBI_VAE3_Xt:
// TLBI by VA, EL3 regime stage 1, last level walk
case MISCREG_TLBI_VALE3IS_Xt:
case MISCREG_TLBI_VALE3_Xt:
assert64(tc);
target_el = 3;
asid = 0xbeef; // does not matter, tlbi is global
secure_lookup = true;
tlbiVA(tc, ((Addr) bits(newVal, 43, 0)) << 12,
asid, secure_lookup, target_el);
return;
// TLBI by VA, EL2
case MISCREG_TLBI_VAE2IS_Xt:
{
assert64(tc);
TLBIMVA tlbiOp(EL3, true,
static_cast<Addr>(bits(newVal, 43, 0)) << 12,
0xbeef);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate by VA, EL3, Inner Shareable
case MISCREG_TLBI_VAE3IS_Xt:
case MISCREG_TLBI_VALE3IS_Xt:
{
assert64(tc);
TLBIMVA tlbiOp(EL3, true,
static_cast<Addr>(bits(newVal, 43, 0)) << 12,
0xbeef);
tlbiOp.broadcast(tc);
return;
}
// AArch64 TLB Invalidate by VA, EL2
case MISCREG_TLBI_VAE2_Xt:
// TLBI by VA, EL2, stage1 last level walk
case MISCREG_TLBI_VALE2IS_Xt:
case MISCREG_TLBI_VALE2_Xt:
assert64(tc);
target_el = 2;
asid = 0xbeef; // does not matter, tlbi is global
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiVA(tc, ((Addr) bits(newVal, 43, 0)) << 12,
asid, secure_lookup, target_el);
return;
// TLBI by VA EL1 & 0, stage1, ASID, current VMID
case MISCREG_TLBI_VAE1IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVA tlbiOp(EL2, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 43, 0)) << 12,
0xbeef);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate by VA, EL2, Inner Shareable
case MISCREG_TLBI_VAE2IS_Xt:
case MISCREG_TLBI_VALE2IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVA tlbiOp(EL2, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 43, 0)) << 12,
0xbeef);
tlbiOp.broadcast(tc);
return;
}
// AArch64 TLB Invalidate by VA, EL1
case MISCREG_TLBI_VAE1_Xt:
case MISCREG_TLBI_VALE1IS_Xt:
case MISCREG_TLBI_VALE1_Xt:
assert64(tc);
asid = bits(newVal, 63, 48);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiVA(tc, ((Addr) bits(newVal, 43, 0)) << 12,
asid, secure_lookup, target_el);
return;
// AArch64 TLBI: invalidate by ASID, stage 1, current VMID
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
bits(newVal, 55, 48);
TLBIMVA tlbiOp(EL1, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 43, 0)) << 12,
asid);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate by VA, EL1, Inner Shareable
case MISCREG_TLBI_VAE1IS_Xt:
case MISCREG_TLBI_VALE1IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
bits(newVal, 55, 48);
TLBIMVA tlbiOp(EL1, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 43, 0)) << 12,
asid);
tlbiOp.broadcast(tc);
return;
}
// AArch64 TLB Invalidate by ASID, EL1
// @todo: handle VMID to enable Virtualization
case MISCREG_TLBI_ASIDE1IS_Xt:
case MISCREG_TLBI_ASIDE1_Xt:
assert64(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
asid = bits(newVal, 63, 48);
tlbiASID(tc, asid, secure_lookup, target_el);
return;
// AArch64 TLBI: invalidate by VA, ASID, stage 1, current VMID
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
bits(newVal, 55, 48);
TLBIASID tlbiOp(EL1, haveSecurity && !scr.ns, asid);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate by ASID, EL1, Inner Shareable
case MISCREG_TLBI_ASIDE1IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
bits(newVal, 55, 48);
TLBIASID tlbiOp(EL1, haveSecurity && !scr.ns, asid);
tlbiOp.broadcast(tc);
return;
}
// VAAE1(IS) and VAALE1(IS) are the same because TLBs only store
// entries from the last level of translation table walks
// @todo: handle VMID to enable Virtualization
case MISCREG_TLBI_VAAE1IS_Xt:
// AArch64 TLB Invalidate by VA, All ASID, EL1
case MISCREG_TLBI_VAAE1_Xt:
case MISCREG_TLBI_VAALE1IS_Xt:
case MISCREG_TLBI_VAALE1_Xt:
assert64(tc);
target_el = 1; // el 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiMVA(tc,
((Addr)bits(newVal, 43, 0)) << 12,
secure_lookup, false, target_el);
return;
// AArch64 TLBI: invalidate by IPA, stage 2, current VMID
case MISCREG_TLBI_IPAS2LE1IS_Xt:
case MISCREG_TLBI_IPAS2LE1_Xt:
case MISCREG_TLBI_IPAS2E1IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 43, 0)) << 12, false);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate by VA, All ASID, EL1, Inner Shareable
case MISCREG_TLBI_VAAE1IS_Xt:
case MISCREG_TLBI_VAALE1IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 43, 0)) << 12, false);
tlbiOp.broadcast(tc);
return;
}
// AArch64 TLB Invalidate by Intermediate Physical Address,
// Stage 2, EL1
case MISCREG_TLBI_IPAS2E1_Xt:
assert64(tc);
target_el = 1; // EL 0 and 1 are handled together
scr = readMiscReg(MISCREG_SCR, tc);
secure_lookup = haveSecurity && !scr.ns;
tlbiIPA(tc, newVal, secure_lookup, target_el);
return;
case MISCREG_TLBI_IPAS2LE1_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIIPA tlbiOp(EL1, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 35, 0)) << 12);
tlbiOp(tc);
return;
}
// AArch64 TLB Invalidate by Intermediate Physical Address,
// Stage 2, EL1, Inner Shareable
case MISCREG_TLBI_IPAS2E1IS_Xt:
case MISCREG_TLBI_IPAS2LE1IS_Xt:
{
assert64(tc);
scr = readMiscReg(MISCREG_SCR, tc);
TLBIIPA tlbiOp(EL1, haveSecurity && !scr.ns,
static_cast<Addr>(bits(newVal, 35, 0)) << 12);
tlbiOp.broadcast(tc);
return;
}
case MISCREG_ACTLR:
warn("Not doing anything for write of miscreg ACTLR\n");
break;
@@ -1718,128 +1935,6 @@ ISA::setMiscReg(int misc_reg, const MiscReg &val, ThreadContext *tc)
setMiscRegNoEffect(misc_reg, newVal);
}
void
ISA::tlbiVA(ThreadContext *tc, Addr va, uint16_t asid,
bool secure_lookup, uint8_t target_el)
{
if (!haveLargeAsid64)
asid &= mask(8);
System *sys = tc->getSystemPtr();
for (int x = 0; x < sys->numContexts(); x++) {
ThreadContext *oc = sys->getThreadContext(x);
getITBPtr(oc)->flushMvaAsid(va, asid,
secure_lookup, target_el);
getDTBPtr(oc)->flushMvaAsid(va, asid,
secure_lookup, target_el);
CheckerCPU *checker = oc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushMvaAsid(
va, asid, secure_lookup, target_el);
getDTBPtr(checker)->flushMvaAsid(
va, asid, secure_lookup, target_el);
}
}
}
void
ISA::tlbiALL(ThreadContext *tc, bool secure_lookup, uint8_t target_el)
{
System *sys = tc->getSystemPtr();
for (int x = 0; x < sys->numContexts(); x++) {
ThreadContext *oc = sys->getThreadContext(x);
getITBPtr(oc)->flushAllSecurity(secure_lookup, target_el);
getDTBPtr(oc)->flushAllSecurity(secure_lookup, target_el);
// If CheckerCPU is connected, need to notify it of a flush
CheckerCPU *checker = oc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushAllSecurity(secure_lookup,
target_el);
getDTBPtr(checker)->flushAllSecurity(secure_lookup,
target_el);
}
}
}
void
ISA::tlbiALLN(ThreadContext *tc, bool hyp, uint8_t target_el)
{
System *sys = tc->getSystemPtr();
for (int x = 0; x < sys->numContexts(); x++) {
ThreadContext *oc = sys->getThreadContext(x);
getITBPtr(oc)->flushAllNs(hyp, target_el);
getDTBPtr(oc)->flushAllNs(hyp, target_el);
CheckerCPU *checker = oc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushAllNs(hyp, target_el);
getDTBPtr(checker)->flushAllNs(hyp, target_el);
}
}
}
void
ISA::tlbiMVA(ThreadContext *tc, Addr va, bool secure_lookup, bool hyp,
uint8_t target_el)
{
System *sys = tc->getSystemPtr();
for (int x = 0; x < sys->numContexts(); x++) {
ThreadContext *oc = sys->getThreadContext(x);
getITBPtr(oc)->flushMva(va, secure_lookup, hyp, target_el);
getDTBPtr(oc)->flushMva(va, secure_lookup, hyp, target_el);
CheckerCPU *checker = oc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushMva(va, secure_lookup, hyp, target_el);
getDTBPtr(checker)->flushMva(va, secure_lookup, hyp, target_el);
}
}
}
void
ISA::tlbiIPA(ThreadContext *tc, MiscReg newVal, bool secure_lookup,
uint8_t target_el)
{
System *sys = tc->getSystemPtr();
for (auto x = 0; x < sys->numContexts(); x++) {
tc = sys->getThreadContext(x);
Addr ipa = ((Addr) bits(newVal, 35, 0)) << 12;
getITBPtr(tc)->flushIpaVmid(ipa,
secure_lookup, false, target_el);
getDTBPtr(tc)->flushIpaVmid(ipa,
secure_lookup, false, target_el);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushIpaVmid(ipa,
secure_lookup, false, target_el);
getDTBPtr(checker)->flushIpaVmid(ipa,
secure_lookup, false, target_el);
}
}
}
void
ISA::tlbiASID(ThreadContext *tc, uint16_t asid, bool secure_lookup,
uint8_t target_el)
{
if (!haveLargeAsid64)
asid &= mask(8);
System *sys = tc->getSystemPtr();
for (auto x = 0; x < sys->numContexts(); x++) {
tc = sys->getThreadContext(x);
getITBPtr(tc)->flushAsid(asid, secure_lookup, target_el);
getDTBPtr(tc)->flushAsid(asid, secure_lookup, target_el);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushAsid(asid, secure_lookup, target_el);
getDTBPtr(checker)->flushAsid(asid, secure_lookup, target_el);
}
}
}
BaseISADevice &
ISA::getGenericTimer(ThreadContext *tc)
{

View File

@@ -391,22 +391,6 @@ namespace ArmISA
assert(!cpsr.width);
}
void tlbiVA(ThreadContext *tc, MiscReg newVal, uint16_t asid,
bool secure_lookup, uint8_t target_el);
void tlbiALL(ThreadContext *tc, bool secure_lookup, uint8_t target_el);
void tlbiALLN(ThreadContext *tc, bool hyp, uint8_t target_el);
void tlbiMVA(ThreadContext *tc, MiscReg newVal, bool secure_lookup,
bool hyp, uint8_t target_el);
void tlbiIPA(ThreadContext *tc, MiscReg newVal, bool secure_lookup,
uint8_t target_el);
void tlbiASID(ThreadContext *tc, uint16_t asid, bool secure_lookup,
uint8_t target_el);
public:
void clear();
void clear64(const ArmISAParams *p);

View File

@@ -445,6 +445,24 @@ private:
LookupLevel lookup_level);
};
template<typename T>
TLB *
getITBPtr(T *tc)
{
auto tlb = static_cast<TLB *>(tc->getITBPtr());
assert(tlb);
return tlb;
}
template<typename T>
TLB *
getDTBPtr(T *tc)
{
auto tlb = static_cast<TLB *>(tc->getDTBPtr());
assert(tlb);
return tlb;
}
} // namespace ArmISA
#endif // __ARCH_ARM_TLB_HH__

173
src/arch/arm/tlbi_op.cc Normal file
View File

@@ -0,0 +1,173 @@
/*
* Copyright (c) 2018 ARM Limited
* All rights reserved
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Giacomo Travaglini
*/
#include "arch/arm/tlbi_op.hh"
#include "arch/arm/tlb.hh"
#include "cpu/checker/cpu.hh"
namespace ArmISA {
void
TLBIALL::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushAllSecurity(secureLookup, targetEL);
getDTBPtr(tc)->flushAllSecurity(secureLookup, targetEL);
// If CheckerCPU is connected, need to notify it of a flush
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushAllSecurity(secureLookup,
targetEL);
getDTBPtr(checker)->flushAllSecurity(secureLookup,
targetEL);
}
}
void
ITLBIALL::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushAllSecurity(secureLookup, targetEL);
}
void
DTLBIALL::operator()(ThreadContext* tc)
{
getDTBPtr(tc)->flushAllSecurity(secureLookup, targetEL);
}
void
TLBIASID::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushAsid(asid, secureLookup, targetEL);
getDTBPtr(tc)->flushAsid(asid, secureLookup, targetEL);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushAsid(asid, secureLookup, targetEL);
getDTBPtr(checker)->flushAsid(asid, secureLookup, targetEL);
}
}
void
ITLBIASID::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushAsid(asid, secureLookup, targetEL);
}
void
DTLBIASID::operator()(ThreadContext* tc)
{
getDTBPtr(tc)->flushAsid(asid, secureLookup, targetEL);
}
void
TLBIALLN::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushAllNs(hyp, targetEL);
getDTBPtr(tc)->flushAllNs(hyp, targetEL);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushAllNs(hyp, targetEL);
getDTBPtr(checker)->flushAllNs(hyp, targetEL);
}
}
void
TLBIMVAA::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushMva(addr, secureLookup, hyp, targetEL);
getDTBPtr(tc)->flushMva(addr, secureLookup, hyp, targetEL);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushMva(addr, secureLookup, hyp, targetEL);
getDTBPtr(checker)->flushMva(addr, secureLookup, hyp, targetEL);
}
}
void
TLBIMVA::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushMvaAsid(addr, asid,
secureLookup, targetEL);
getDTBPtr(tc)->flushMvaAsid(addr, asid,
secureLookup, targetEL);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushMvaAsid(
addr, asid, secureLookup, targetEL);
getDTBPtr(checker)->flushMvaAsid(
addr, asid, secureLookup, targetEL);
}
}
void
ITLBIMVA::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushMvaAsid(
addr, asid, secureLookup, targetEL);
}
void
DTLBIMVA::operator()(ThreadContext* tc)
{
getDTBPtr(tc)->flushMvaAsid(
addr, asid, secureLookup, targetEL);
}
void
TLBIIPA::operator()(ThreadContext* tc)
{
getITBPtr(tc)->flushIpaVmid(addr,
secureLookup, false, targetEL);
getDTBPtr(tc)->flushIpaVmid(addr,
secureLookup, false, targetEL);
CheckerCPU *checker = tc->getCheckerCpuPtr();
if (checker) {
getITBPtr(checker)->flushIpaVmid(addr,
secureLookup, false, targetEL);
getDTBPtr(checker)->flushIpaVmid(addr,
secureLookup, false, targetEL);
}
}
} // namespace ArmISA

266
src/arch/arm/tlbi_op.hh Normal file
View File

@@ -0,0 +1,266 @@
/*
* Copyright (c) 2018 ARM Limited
* All rights reserved
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Giacomo Travaglini
*/
#ifndef __ARCH_ARM_TLBI_HH__
#define __ARCH_ARM_TLBI_HH__
#include "arch/arm/system.hh"
#include "arch/arm/tlb.hh"
#include "cpu/thread_context.hh"
/**
* @file
* The file contains the definition of a set of TLB Invalidate
* Instructions. Those are the ISA interface for TLB flushing
* operations.
*/
namespace ArmISA {
class TLBIOp
{
public:
TLBIOp(ExceptionLevel _targetEL, bool _secure)
: secureLookup(_secure), targetEL(_targetEL)
{}
virtual ~TLBIOp() {}
virtual void operator()(ThreadContext* tc) {}
/**
* Broadcast the TLB Invalidate operation to all
* TLBs in the Arm system.
* @param tc Thread Context
*/
void
broadcast(ThreadContext *tc)
{
System *sys = tc->getSystemPtr();
for (int x = 0; x < sys->numContexts(); x++) {
ThreadContext *oc = sys->getThreadContext(x);
(*this)(oc);
}
}
protected:
bool secureLookup;
ExceptionLevel targetEL;
};
/** TLB Invalidate All */
class TLBIALL : public TLBIOp
{
public:
TLBIALL(ExceptionLevel _targetEL, bool _secure)
: TLBIOp(_targetEL, _secure)
{}
void operator()(ThreadContext* tc) override;
};
/** Instruction TLB Invalidate All */
class ITLBIALL : public TLBIOp
{
public:
ITLBIALL(ExceptionLevel _targetEL, bool _secure)
: TLBIOp(_targetEL, _secure)
{}
void broadcast(ThreadContext *tc) = delete;
void operator()(ThreadContext* tc) override;
};
/** Data TLB Invalidate All */
class DTLBIALL : public TLBIOp
{
public:
DTLBIALL(ExceptionLevel _targetEL, bool _secure)
: TLBIOp(_targetEL, _secure)
{}
void broadcast(ThreadContext *tc) = delete;
void operator()(ThreadContext* tc) override;
};
/** TLB Invalidate by ASID match */
class TLBIASID : public TLBIOp
{
public:
TLBIASID(ExceptionLevel _targetEL, bool _secure, uint16_t _asid)
: TLBIOp(_targetEL, _secure), asid(_asid)
{}
void operator()(ThreadContext* tc) override;
protected:
uint16_t asid;
};
/** Instruction TLB Invalidate by ASID match */
class ITLBIASID : public TLBIOp
{
public:
ITLBIASID(ExceptionLevel _targetEL, bool _secure, uint16_t _asid)
: TLBIOp(_targetEL, _secure), asid(_asid)
{}
void broadcast(ThreadContext *tc) = delete;
void operator()(ThreadContext* tc) override;
protected:
uint16_t asid;
};
/** Data TLB Invalidate by ASID match */
class DTLBIASID : public TLBIOp
{
public:
DTLBIASID(ExceptionLevel _targetEL, bool _secure, uint16_t _asid)
: TLBIOp(_targetEL, _secure), asid(_asid)
{}
void broadcast(ThreadContext *tc) = delete;
void operator()(ThreadContext* tc) override;
protected:
uint16_t asid;
};
/** TLB Invalidate All, Non-Secure */
class TLBIALLN : public TLBIOp
{
public:
TLBIALLN(ExceptionLevel _targetEL, bool _hyp)
: TLBIOp(_targetEL, false), hyp(_hyp)
{}
void operator()(ThreadContext* tc) override;
protected:
bool hyp;
};
/** TLB Invalidate by VA, All ASID */
class TLBIMVAA : public TLBIOp
{
public:
TLBIMVAA(ExceptionLevel _targetEL, bool _secure,
Addr _addr, bool _hyp)
: TLBIOp(_targetEL, _secure), addr(_addr), hyp(_hyp)
{}
void operator()(ThreadContext* tc) override;
protected:
Addr addr;
bool hyp;
};
/** TLB Invalidate by VA */
class TLBIMVA : public TLBIOp
{
public:
TLBIMVA(ExceptionLevel _targetEL, bool _secure,
Addr _addr, uint16_t _asid)
: TLBIOp(_targetEL, _secure), addr(_addr), asid(_asid)
{}
void operator()(ThreadContext* tc) override;
protected:
Addr addr;
uint16_t asid;
};
/** Instruction TLB Invalidate by VA */
class ITLBIMVA : public TLBIOp
{
public:
ITLBIMVA(ExceptionLevel _targetEL, bool _secure,
Addr _addr, uint16_t _asid)
: TLBIOp(_targetEL, _secure), addr(_addr), asid(_asid)
{}
void broadcast(ThreadContext *tc) = delete;
void operator()(ThreadContext* tc) override;
protected:
Addr addr;
uint16_t asid;
};
/** Data TLB Invalidate by VA */
class DTLBIMVA : public TLBIOp
{
public:
DTLBIMVA(ExceptionLevel _targetEL, bool _secure,
Addr _addr, uint16_t _asid)
: TLBIOp(_targetEL, _secure), addr(_addr), asid(_asid)
{}
void broadcast(ThreadContext *tc) = delete;
void operator()(ThreadContext* tc) override;
protected:
Addr addr;
uint16_t asid;
};
/** TLB Invalidate by Intermediate Physical Address */
class TLBIIPA : public TLBIOp
{
public:
TLBIIPA(ExceptionLevel _targetEL, bool _secure, Addr _addr)
: TLBIOp(_targetEL, _secure), addr(_addr)
{}
void operator()(ThreadContext* tc) override;
protected:
Addr addr;
};
} // namespace ArmISA
#endif //__ARCH_ARM_TLBI_HH__