diff --git a/src/arch/arm/mmu.cc b/src/arch/arm/mmu.cc index 257b3e6de5..d2fc706e2c 100644 --- a/src/arch/arm/mmu.cc +++ b/src/arch/arm/mmu.cc @@ -1089,8 +1089,16 @@ MMU::updateMiscReg(ThreadContext *tc, itbStage2->setVMID(state.vmid); dtbStage2->setVMID(state.vmid); - getITBPtr()->setVMID(state.vmid); - getDTBPtr()->setVMID(state.vmid); + + for (auto tlb : instruction) { + static_cast(tlb)->setVMID(state.vmid); + } + for (auto tlb : data) { + static_cast(tlb)->setVMID(state.vmid); + } + for (auto tlb : unified) { + static_cast(tlb)->setVMID(state.vmid); + } miscRegContext = tc->contextId(); } @@ -1309,6 +1317,16 @@ MMU::getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, is_secure, tran_type, stage2 ? s2State : s1State); } +TlbEntry* +MMU::lookup(Addr va, uint16_t asid, vmid_t vmid, bool hyp, bool secure, + bool functional, bool ignore_asn, ExceptionLevel target_el, + bool in_host, bool stage2, BaseMMU::Mode mode) +{ + TLB *tlb = getTlb(mode, stage2); + return tlb->multiLookup(va, asid, vmid, hyp, secure, functional, + ignore_asn, target_el, in_host, mode); +} + Fault MMU::getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool timing, bool functional, @@ -1331,9 +1349,9 @@ MMU::getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, vaddr = vaddr_tainted; } - auto tlb = getTlb(mode, state.isStage2); - *te = tlb->lookup(vaddr, state.asid, state.vmid, state.isHyp, is_secure, - false, false, target_el, false, mode); + *te = lookup(vaddr, state.asid, state.vmid, state.isHyp, is_secure, false, + false, target_el, false, state.isStage2, mode); + if (*te == NULL) { if (req->isPrefetch()) { // if the request is a prefetch don't attempt to fill the TLB or go @@ -1361,10 +1379,8 @@ MMU::getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, return fault; } - *te = tlb->lookup(vaddr, state.asid, state.vmid, state.isHyp, is_secure, - true, false, target_el, false, mode); - if (!*te) - tlb->printTlb(); + *te = lookup(vaddr, state.asid, state.vmid, state.isHyp, is_secure, + true, false, target_el, false, state.isStage2, mode); assert(*te); } return NoFault; diff --git a/src/arch/arm/mmu.hh b/src/arch/arm/mmu.hh index fddaa1f69d..0e1fd87e76 100644 --- a/src/arch/arm/mmu.hh +++ b/src/arch/arm/mmu.hh @@ -269,8 +269,15 @@ class MMU : public BaseMMU void flushStage1(const OP &tlbi_op) { - iflush(tlbi_op); - dflush(tlbi_op); + for (auto tlb : instruction) { + static_cast(tlb)->flush(tlbi_op); + } + for (auto tlb : data) { + static_cast(tlb)->flush(tlbi_op); + } + for (auto tlb : unified) { + static_cast(tlb)->flush(tlbi_op); + } } template @@ -285,14 +292,24 @@ class MMU : public BaseMMU void iflush(const OP &tlbi_op) { - getITBPtr()->flush(tlbi_op); + for (auto tlb : instruction) { + static_cast(tlb)->flush(tlbi_op); + } + for (auto tlb : unified) { + static_cast(tlb)->flush(tlbi_op); + } } template void dflush(const OP &tlbi_op) { - getDTBPtr()->flush(tlbi_op); + for (auto tlb : data) { + static_cast(tlb)->flush(tlbi_op); + } + for (auto tlb : unified) { + static_cast(tlb)->flush(tlbi_op); + } } void @@ -325,6 +342,24 @@ class MMU : public BaseMMU static ExceptionLevel tranTypeEL(CPSR cpsr, ArmTranslationType type); public: + /** Lookup an entry in the TLB + * @param vpn virtual address + * @param asn context id/address space id to use + * @param vmid The virtual machine ID used for stage 2 translation + * @param secure if the lookup is secure + * @param hyp if the lookup is done from hyp mode + * @param functional if the lookup should modify state + * @param ignore_asn if on lookup asn should be ignored + * @param target_el selecting the translation regime + * @param in_host if we are in host (EL2&0 regime) + * @param mode to differentiate between read/writes/fetches. + * @return pointer to TLB entry if it exists + */ + TlbEntry *lookup(Addr vpn, uint16_t asn, vmid_t vmid, bool hyp, + bool secure, bool functional, + bool ignore_asn, ExceptionLevel target_el, + bool in_host, bool stage2, BaseMMU::Mode mode); + Fault getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool timing, bool functional, diff --git a/src/arch/arm/table_walker.cc b/src/arch/arm/table_walker.cc index 2d1cfa6b7b..4cc0fffb02 100644 --- a/src/arch/arm/table_walker.cc +++ b/src/arch/arm/table_walker.cc @@ -478,9 +478,9 @@ TableWalker::processWalkWrapper() // Check if a previous walk filled this request already // @TODO Should this always be the TLB or should we look in the stage2 TLB? - TlbEntry* te = tlb->lookup(currState->vaddr, currState->asid, - currState->vmid, currState->isHyp, currState->isSecure, true, false, - currState->el, false, BaseMMU::Read); + TlbEntry* te = mmu->lookup(currState->vaddr, currState->asid, + currState->vmid, currState->isHyp, currState->isSecure, true, false, + currState->el, false, isStage2, currState->mode); // Check if we still need to have a walk for this request. If the requesting // instruction has been squashed, or a previous walk has filled the TLB with @@ -544,9 +544,9 @@ TableWalker::processWalkWrapper() // peak at the next one if (pendingQueue.size()) { currState = pendingQueue.front(); - te = tlb->lookup(currState->vaddr, currState->asid, + te = mmu->lookup(currState->vaddr, currState->asid, currState->vmid, currState->isHyp, currState->isSecure, true, - false, currState->el, false, BaseMMU::Read); + false, currState->el, false, isStage2, currState->mode); } else { // Terminate the loop, nothing more to do currState = NULL; @@ -2354,8 +2354,8 @@ TableWalker::insertTableEntry(DescriptorBase &descriptor, bool long_descriptor) descriptor.lookupLevel, static_cast(descriptor.domain()), descriptor.getRawData()); - // Insert the entry into the TLB - tlb->insert(te); + // Insert the entry into the TLBs + tlb->multiInsert(te); if (!currState->timing) { currState->tc = NULL; currState->req = NULL; diff --git a/src/arch/arm/tlb.cc b/src/arch/arm/tlb.cc index 666836b536..91437232d0 100644 --- a/src/arch/arm/tlb.cc +++ b/src/arch/arm/tlb.cc @@ -141,6 +141,41 @@ TLB::lookup(Addr va, uint16_t asn, vmid_t vmid, bool hyp, bool secure, return retval; } +TlbEntry* +TLB::multiLookup(Addr va, uint16_t asid, vmid_t vmid, bool hyp, bool secure, + bool functional, bool ignore_asn, ExceptionLevel target_el, + bool in_host, BaseMMU::Mode mode) +{ + TlbEntry* te = lookup(va, asid, vmid, hyp, secure, functional, + ignore_asn, target_el, in_host, mode); + + if (te) { + checkPromotion(te, mode); + } else { + if (auto tlb = static_cast(nextLevel())) { + te = tlb->multiLookup(va, asid, vmid, hyp, secure, functional, + ignore_asn, target_el, in_host, mode); + if (te && !functional) + insert(*te); + } + } + + return te; +} + +void +TLB::checkPromotion(TlbEntry *entry, BaseMMU::Mode mode) +{ + TypeTLB acc_type = (mode == BaseMMU::Execute) ? + TypeTLB::instruction : TypeTLB::data; + + // Hitting an instruction TLB entry on a data access or + // a data TLB entry on an instruction access: + // promoting the entry to unified + if (!(entry->type & acc_type)) + entry->type = TypeTLB::unified; +} + // insert a new TLB entry void TLB::insert(TlbEntry &entry) @@ -162,8 +197,7 @@ TLB::insert(TlbEntry &entry) table[size-1].nstid, table[size-1].global, table[size-1].isHyp, table[size-1].el); - //inserting to MRU position and evicting the LRU one - + // inserting to MRU position and evicting the LRU one for (int i = size - 1; i > 0; --i) table[i] = table[i-1]; table[0] = entry; @@ -172,6 +206,16 @@ TLB::insert(TlbEntry &entry) ppRefills->notify(1); } +void +TLB::multiInsert(TlbEntry &entry) +{ + insert(entry); + + if (auto next_level = static_cast(nextLevel())) { + next_level->multiInsert(entry); + } +} + void TLB::printTlb() const { diff --git a/src/arch/arm/tlb.hh b/src/arch/arm/tlb.hh index fa5e894c42..4b3c8297da 100644 --- a/src/arch/arm/tlb.hh +++ b/src/arch/arm/tlb.hh @@ -175,6 +175,26 @@ class TLB : public BaseTLB bool ignore_asn, ExceptionLevel target_el, bool in_host, BaseMMU::Mode mode); + /** Lookup an entry in the TLB and in the next levels by + * following the nextLevel pointer + * + * @param vpn virtual address + * @param asn context id/address space id to use + * @param vmid The virtual machine ID used for stage 2 translation + * @param secure if the lookup is secure + * @param hyp if the lookup is done from hyp mode + * @param functional if the lookup should modify state + * @param ignore_asn if on lookup asn should be ignored + * @param target_el selecting the translation regime + * @param in_host if we are in host (EL2&0 regime) + * @param mode to differentiate between read/writes/fetches. + * @return pointer to TLB entry if it exists + */ + TlbEntry *multiLookup(Addr vpn, uint16_t asn, vmid_t vmid, bool hyp, + bool secure, bool functional, + bool ignore_asn, ExceptionLevel target_el, + bool in_host, BaseMMU::Mode mode); + virtual ~TLB(); void takeOverFrom(BaseTLB *otlb) override; @@ -187,8 +207,12 @@ class TLB : public BaseTLB void setVMID(vmid_t _vmid) { vmid = _vmid; } + /** Insert a PTE in the current TLB */ void insert(TlbEntry &pte); + /** Insert a PTE in the current TLB and in the higher levels */ + void multiInsert(TlbEntry &pte); + /** Reset the entire TLB. Used for CPU switching to prevent stale * translations after multiple switches */ @@ -301,6 +325,13 @@ class TLB : public BaseTLB void _flushMva(Addr mva, uint64_t asn, bool secure_lookup, bool ignore_asn, ExceptionLevel target_el, bool in_host, TypeTLB entry_type); + + /** Check if the tlb entry passed as an argument needs to + * be "promoted" as a unified entry: + * this should happen if we are hitting an instruction TLB entry on a + * data access or a data TLB entry on an instruction access: + */ + void checkPromotion(TlbEntry *entry, BaseMMU::Mode mode); }; } // namespace ArmISA