49#include "debug/TLB.hh"
50#include "debug/TLBVerbose.hh"
58using namespace ArmISA;
62 itbStage2(
p.stage2_itb), dtbStage2(
p.stage2_dtb),
63 itbWalker(
p.itb_walker), dtbWalker(
p.dtb_walker),
64 itbStage2Walker(
p.stage2_itb_walker),
65 dtbStage2Walker(
p.stage2_dtb_walker),
68 s1State(this, false), s2State(this, true),
229 const Addr paddr = req->getPaddr();
234 req->setLocalAccessor(
257 Translation *translation,
bool &delay,
bool timing,
261 Addr vaddr_tainted = req->getVaddr();
267 vaddr = vaddr_tainted;
278 return std::make_shared<DataAbort>(
288 if (
const auto pte =
p->pTable->lookup(
vaddr); !pte) {
289 return std::make_shared<GenericPageTableFault>(vaddr_tainted);
291 req->setPaddr(pte->paddr +
p->pTable->pageOffset(
vaddr));
313 if (req->isCacheMaintenance()) {
331 if (
state.isStage2 && req->isPTWalk() &&
state.hcr.ptw &&
333 return std::make_shared<DataAbort>(
336 state.isStage2, tranMethod);
345 return std::make_shared<DataAbort>(
353 if (
te->nonCacheable) {
355 if (req->isPrefetch()) {
358 return std::make_shared<PrefetchAbort>(
360 state.isStage2, tranMethod);
364 if (!
te->longDescFormat) {
365 switch ((
state.dacr >> (
static_cast<uint8_t
>(
te->domain) * 2)) & 0x3) {
368 DPRINTF(
TLB,
"TLB Fault: Data abort on domain. DACR: %#x"
369 " domain: %#x write:%d\n",
state.dacr,
370 static_cast<uint8_t
>(
te->domain), is_write);
375 return std::make_shared<PrefetchAbort>(
378 state.isStage2, tranMethod);
380 return std::make_shared<DataAbort>(
383 state.isStage2, tranMethod);
388 panic(
"UNPRED domain\n");
395 uint8_t ap =
te->longDescFormat ?
te->ap << 1 :
te->ap;
396 uint8_t hap =
te->hap;
398 if (
state.sctlr.afe == 1 ||
te->longDescFormat)
402 bool isWritable =
true;
406 if (
state.isStage2) {
411 DPRINTF(
TLB,
"Access permissions 0, checking rs:%#x\n",
412 (
int)
state.sctlr.rs);
413 if (!
state.sctlr.xp) {
414 switch ((
int)
state.sctlr.rs) {
419 abt = is_write || !is_priv;
435 abt = !is_priv && is_write;
436 isWritable = is_priv;
442 panic(
"UNPRED premissions\n");
444 abt = !is_priv || is_write;
453 panic(
"Unknown permissions %#x\n", ap);
457 bool hapAbt = is_write ? !(hap & 2) : !(hap & 1);
458 bool xn =
te->xn || (isWritable &&
state.sctlr.wxn) ||
459 (ap == 3 &&
state.sctlr.uwxn && is_priv);
460 if (is_fetch && (abt || xn ||
461 (
te->longDescFormat &&
te->pxn && is_priv) ||
464 DPRINTF(
TLB,
"TLB Fault: Prefetch abort on permission check. AP:%d "
465 "priv:%d write:%d ns:%d sif:%d sctlr.afe: %d \n",
466 ap, is_priv, is_write,
te->ns,
470 return std::make_shared<PrefetchAbort>(
473 state.isStage2, tranMethod);
474 }
else if (abt | hapAbt) {
476 DPRINTF(
TLB,
"TLB Fault: Data abort on permission check. AP:%d priv:%d"
477 " write:%d\n", ap, is_priv, is_write);
478 return std::make_shared<DataAbort>(
481 state.isStage2 | !abt, tranMethod);
497 assert(
state.aarch64);
504 if (req->isCacheClean() &&
state.exceptionLevel !=
EL0 && !
state.isStage2) {
508 Addr vaddr_tainted = req->getVaddr();
515 bool is_write = !req->isCacheClean() &&
mode ==
Write;
516 bool is_atomic = req->isAtomic();
524 if (
state.isStage2 && req->isPTWalk() &&
state.hcr.ptw &&
526 return std::make_shared<DataAbort>(
527 vaddr_tainted,
te->domain, is_write,
538 return std::make_shared<DataAbort>(
541 is_atomic ?
false : is_write,
548 if (
te->nonCacheable) {
550 if (req->isPrefetch()) {
553 return std::make_shared<PrefetchAbort>(
565 bool grant_read =
true;
567 if (
state.isStage2) {
569 (!is_write && !is_fetch), is_write, is_fetch);
572 (!is_write && !is_fetch), is_write, is_fetch);
578 DPRINTF(
TLB,
"TLB Fault: Prefetch abort on permission check. "
579 "ns:%d scr.sif:%d sctlr.afe: %d\n",
583 return std::make_shared<PrefetchAbort>(
589 DPRINTF(
TLB,
"TLB Fault: Data abort on permission check."
591 return std::make_shared<DataAbort>(
592 vaddr_tainted,
te->domain,
593 (is_atomic && !grant_read) ?
false : is_write,
612 bool grant_read =
te->hap & 0b01;
613 bool grant_write =
te->hap & 0b10;
616 uint8_t pxn =
te->pxn;
624 "Checking S2 permissions: hap:%d, xn:%d, pxn:%d, r:%d, "
625 "w:%d, x:%d\n",
te->hap, xn, pxn,
r,
w,
x);
628 grant = grant_read && !xn;
629 }
else if (req->isAtomic()) {
630 grant = grant_read || grant_write;
636 panic(
"Invalid Operation\n");
639 return std::make_pair(grant, grant_read);
646 bool grant =
false, grant_read =
true, grant_write =
true, grant_exec =
true;
648 const uint8_t ap =
te->ap & 0b11;
649 const bool is_priv =
state.isPriv && !(req->getFlags() &
UserMode);
653 uint8_t pxn =
te->pxn;
655 DPRINTF(TLBVerbose,
"Checking S1 permissions: ap:%d, xn:%d, pxn:%d, r:%d, "
656 "w:%d, x:%d, is_priv: %d, wxn: %d\n", ap, xn,
657 pxn,
r,
w,
x, is_priv,
wxn);
660 return std::make_pair(
false,
false);
672 pr = 1; pw = 1; ur = 0;
uw = 0;
675 pr = 1; pw = 1; ur = 1;
uw = 1;
678 pr = 1; pw = 0; ur = 0;
uw = 0;
681 pr = 1; pw = 0; ur = 1;
uw = 0;
686 const bool px = !(pxn ||
uw);
689 grant_read = is_priv ?
pr : ur;
690 grant_write = is_priv ? pw :
uw;
691 grant_exec = is_priv ?
px :
ux;
693 switch (
bits(ap, 1)) {
695 grant_read = 1; grant_write = 1;
698 grant_read = 1; grant_write = 0;
706 grant_exec = grant_exec && !(
wxn && grant_write);
709 grant_exec = grant_exec && !
state.scr.sif;
714 }
else if (req->isAtomic()) {
715 grant = grant_read && grant_write;
722 return std::make_pair(grant, grant_read);
741 bool exception =
false;
742 switch (
state.exceptionLevel) {
774 if (
HaveExt(tc, ArmExtension::FEAT_PAN) &&
state.cpsr.pan && (ap & 0x1) &&
777 if (req->isCacheMaintenance() &&
781 }
else if (!is_priv && !(
state.hcr.e2h && !
state.hcr.tge)) {
796 const bool selbit =
bits(vaddr_tainted, 55);
799 const auto topbit =
state.computeAddrTop(tc, selbit, is_inst, tcr,
el);
810 bool is_atomic = req->isAtomic();
811 req->setPaddr(
vaddr);
826 f = std::make_shared<PrefetchAbort>(
vaddr,
830 f = std::make_shared<DataAbort>(
vaddr,
840 if (long_desc_format ||
state.sctlr.tre == 0 ||
state.nmrr.ir0 == 0 ||
841 state.nmrr.or0 == 0 ||
state.prrr.tr0 != 0x2) {
842 if (!req->isCacheMaintenance()) {
851 bool dc = (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
853 bool i_cacheability =
state.sctlr.i && !
state.sctlr.m;
857 temp_te.
innerAttrs = i_cacheability? 0x2: 0x0;
858 temp_te.
outerAttrs = i_cacheability? 0x2: 0x0;
869 DPRINTF(TLBVerbose,
"(No MMU) setting memory attributes: shareable: "
870 "%d, innerAttrs: %d, outerAttrs: %d, stage2: %d\n",
880 Translation *translation,
bool &delay,
bool timing,
889 Addr vaddr_tainted = req->getVaddr();
892 functional, &mergeTe,
state);
901 "Setting memory attributes: shareable: %d, innerAttrs: %d, "
902 "outerAttrs: %d, mtype: %d, stage2: %d\n",
903 te->shareable,
te->innerAttrs,
te->outerAttrs,
904 static_cast<uint8_t
>(
te->mtype),
state.isStage2);
907 if (
te->nonCacheable && !req->isCacheMaintenance())
919 if (
state.isSecure && !
te->ns) {
924 if (!is_fetch && fault ==
NoFault &&
931 return std::make_shared<DataAbort>(
947 Translation *translation,
bool &delay,
bool timing,
952 assert(!(timing && functional));
954 Addr vaddr_tainted = req->getVaddr();
960 vaddr = vaddr_tainted;
971 "CPSR is priv:%d UserMode:%d secure:%d S1S2NsTran:%d\n",
975 DPRINTF(
TLB,
"translateFs addr %#x, mode %d, st2 %d, scr %#x sctlr %#x "
976 "flags %#lx tranType 0x%x\n", vaddr_tainted,
mode,
979 if (!
state.isStage2) {
980 if ((req->isInstFetch() && (!
state.sctlr.i)) ||
981 ((!req->isInstFetch()) && (!
state.sctlr.c))){
982 if (!req->isCacheMaintenance()) {
992 return std::make_shared<DataAbort>(
1002 if (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
1005 else if (
state.hcr.dc == 1)
1012 long_desc_format,
state);
1014 DPRINTF(TLBVerbose,
"Translating %s=%#x context=%d\n",
1015 state.isStage2 ?
"IPA" :
"VA", vaddr_tainted,
state.asid);
1024 if (
sd->enabled() && fault ==
NoFault) {
1025 fault =
sd->testDebug(tc, req,
mode);
1048 tran_type,
false,
state);
1078 tran_type,
true,
state);
1092 assert(translation);
1115 fault =
translateFs(req, tc,
mode, translation, delay,
true, tran_type,
1120 DPRINTF(TLBVerbose,
"Translation returning delay=%d fault=%d\n", delay,
1129 if (translation && (call_from_s2 || !
state.stage2Req || req->hasPaddr() ||
1146 switch (mmfr1.vmidbits) {
1161 panic(
"Reserved ID_AA64MMFR1_EL1.VMIDBits value: %#x",
1178 ((tran_type ==
state.curTranType) || stage2)) {
1181 DPRINTF(TLBVerbose,
"TLB variables changed!\n");
1182 state.updateMiscReg(tc, tran_type);
1200 if (
state.directToStage2) {
1220 exceptionLevel =
tranTypeEL(cpsr, scr, tran_type);
1228 switch (currRegime) {
1233 uint64_t ttbr_asid = ttbcr.a1 ?
1237 (mmu->haveLargeAsid64 && ttbcr.as) ? 63 : 55, 48);
1245 uint64_t ttbr_asid = ttbcr.a1 ?
1249 (mmu->haveLargeAsid64 && ttbcr.as) ? 63 : 55, 48);
1264 isPriv = exceptionLevel !=
EL0;
1265 if (mmu->release()->has(ArmExtension::VIRTUALIZATION)) {
1268 if (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
1269 hcr.e2h == 1 && hcr.tge ==1) {
1273 if (hcr.e2h == 1 && (exceptionLevel ==
EL2
1274 || (hcr.tge ==1 && exceptionLevel ==
EL0))) {
1275 directToStage2 =
false;
1277 stage2DescReq =
false;
1283 stage2Req = isStage2 ||
1284 (
vm && exceptionLevel <
EL2 && el2_enabled &&
1287 stage2DescReq = isStage2 ||
1288 (
vm && exceptionLevel <
EL2 && el2_enabled);
1289 directToStage2 = !isStage2 && stage2Req && !sctlr.m;
1293 directToStage2 =
false;
1295 stage2DescReq =
false;
1312 asid = context_id.asid;
1321 if (mmu->release()->has(ArmExtension::VIRTUALIZATION)) {
1323 if (exceptionLevel ==
EL2) {
1330 stage2Req = isStage2 ||
1331 (hcr.vm && exceptionLevel <
EL2 && el2_enabled &&
1333 stage2DescReq = isStage2 ||
1334 (hcr.vm && exceptionLevel <
EL2 && el2_enabled);
1335 directToStage2 = !isStage2 && stage2Req && !sctlr.m;
1339 directToStage2 =
false;
1340 stage2DescReq =
false;
1343 miscRegValid =
true;
1344 curTranType = tran_type;
1368 return currEL(cpsr) ==
EL3 && scr.ns == 0 ?
1375 panic(
"Unknown translation mode!\n");
1381 Translation *translation,
bool timing,
bool functional,
1385 return getTE(
te, req, tc,
mode, translation, timing, functional,
1398 lookup_data.
va =
va;
1401 lookup_data.
vmid = vmid;
1402 lookup_data.
secure = secure;
1407 return tlb->multiLookup(lookup_data);
1412 Translation *translation,
bool timing,
bool functional,
1418 if (
state.isStage2) {
1422 Addr vaddr_tainted = req->getVaddr();
1426 if (
state.aarch64) {
1430 vaddr = vaddr_tainted;
1437 if (req->isPrefetch()) {
1442 return std::make_shared<PrefetchAbort>(
1449 "TLB Miss: Starting hardware table walker for %#x(%d:%d)\n",
1455 translation, timing, functional, is_secure,
1456 tran_type,
state.stage2DescReq, *
te);
1459 if (timing || fault !=
NoFault) {
1464 true,
false, regime,
state.isStage2,
mode);
1473 Translation *translation,
bool timing,
bool functional,
1478 if (
state.isStage2) {
1483 fault =
getTE(&s2_te, req, tc,
mode, translation, timing, functional,
1498 Addr vaddr_tainted = req->getVaddr();
1501 fault =
getTE(&s1_te, req, tc,
mode, translation, timing, functional,
1512 req, translation,
mode, timing, functional,
state.isSecure,
1514 fault = s2_lookup->
getTe(tc, mergeTe);
1528 if (
state.isStage2) {
1529 DPRINTF(TLBVerbose,
"s2TLB: reqVa %#x, reqPa %#x, fault %p\n",
1530 vaddr_tainted, req->hasPaddr() ? req->getPaddr() : ~0,
1533 auto arm_fault =
reinterpret_cast<ArmFault*
>(fault.get());
1547 return entry && !entry->
partial;
1555 auto *ommu =
dynamic_cast<MMU*
>(old_mmu);
1558 _attr = ommu->_attr;
1571 fatal_if(!
ti,
"%s is not a valid ARM TLB tester\n", _ti->
name());
1584 if (!
test || !req->hasSize() || req->getSize() == 0 ||
1585 req->isCacheMaintenance()) {
1593 : statistics::
Group(parent),
1594 ADD_STAT(alignFaults, statistics::units::Count::get(),
1595 "Number of MMU faults due to alignment restrictions"),
1596 ADD_STAT(prefetchFaults, statistics::units::Count::get(),
1597 "Number of MMU faults due to prefetch"),
1598 ADD_STAT(domainFaults, statistics::units::Count::get(),
1599 "Number of MMU faults due to domain restrictions"),
1600 ADD_STAT(permsFaults, statistics::units::Count::get(),
1601 "Number of MMU faults due to permissions restrictions")
virtual void annotate(AnnotationIDs id, uint64_t val)
SelfDebug * getSelfDebug() const
void setAttr(uint64_t attr)
Accessor functions for memory attributes for last accessed TLB entry.
bool checkPAN(ThreadContext *tc, uint8_t ap, const RequestPtr &req, Mode mode, const bool is_priv, CachedState &state)
static bool hasUnprivRegime(TranslationRegime regime)
Fault translateSe(const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool &delay, bool timing, CachedState &state)
void drainResume() override
Resume execution after a successful drain.
Fault translateComplete(const RequestPtr &req, ThreadContext *tc, Translation *translation, Mode mode, ArmTranslationType tran_type, bool call_from_s2)
TLB * getTlb(BaseMMU::Mode mode, bool stage2) const
Fault translateMmuOff(ThreadContext *tc, const RequestPtr &req, Mode mode, ArmTranslationType tran_type, Addr vaddr, bool long_desc_format, CachedState &state)
void init() override
Called at init time, this method is traversing the TLB hierarchy and pupulating the instruction/data/...
Fault getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool timing, bool functional, bool is_secure, ArmTranslationType tran_type, bool stage2)
CachedState & updateMiscReg(ThreadContext *tc, ArmTranslationType tran_type, bool stage2)
bool isCompleteTranslation(TlbEntry *te) const
Fault finalizePhysical(const RequestPtr &req, ThreadContext *tc, Mode mode) const override
std::pair< bool, bool > s1PermBits64(TlbEntry *te, const RequestPtr &req, Mode mode, ThreadContext *tc, CachedState &state, bool r, bool w, bool x)
Fault getResultTe(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool timing, bool functional, TlbEntry *mergeTe, CachedState &state)
static ExceptionLevel tranTypeEL(CPSR cpsr, SCR scr, ArmTranslationType type)
Determine the EL to use for the purpose of a translation given a specific translation type.
bool checkWalkCache() const
void translateTiming(const RequestPtr &req, ThreadContext *tc, Translation *translation, Mode mode) override
void setTestInterface(SimObject *ti)
TlbEntry * lookup(Addr vpn, uint16_t asn, vmid_t vmid, bool secure, bool functional, bool ignore_asn, TranslationRegime target_regime, bool stage2, BaseMMU::Mode mode)
Lookup an entry in the TLB.
TableWalker * itbStage2Walker
TableWalker * dtbStage2Walker
Fault checkPermissions64(TlbEntry *te, const RequestPtr &req, Mode mode, ThreadContext *tc, bool stage2)
Fault translateFs(const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool &delay, bool timing, ArmTranslationType tran_type, bool functional, CachedState &state)
ArmISA::TLB * getITBPtr() const
const ArmRelease * _release
gem5::ArmISA::MMU::Stats stats
std::pair< bool, bool > s2PermBits64(TlbEntry *te, const RequestPtr &req, Mode mode, ThreadContext *tc, CachedState &state, bool r, bool w, bool x)
Fault checkPermissions(TlbEntry *te, const RequestPtr &req, Mode mode, bool stage2)
bool faultPAN(ThreadContext *tc, uint8_t ap, const RequestPtr &req, Mode mode, const bool is_priv, CachedState &state)
MMU(const ArmMMUParams &p)
TableWalker * getTableWalker(BaseMMU::Mode mode, bool stage2) const
void takeOverFrom(BaseMMU *old_mmu) override
Fault translateMmuOn(ThreadContext *tc, const RequestPtr &req, Mode mode, Translation *translation, bool &delay, bool timing, bool functional, Addr vaddr, ArmFault::TranMethod tranMethod, CachedState &state)
Fault testAndFinalize(const RequestPtr &req, ThreadContext *tc, Mode mode, TlbEntry *te, CachedState &state) const
Addr purifyTaggedAddr(Addr vaddr_tainted, ThreadContext *tc, ExceptionLevel el, TCR tcr, bool is_inst, CachedState &state)
ArmISA::TLB * getDTBPtr() const
Fault testTranslation(const RequestPtr &req, Mode mode, TlbEntry::DomainType domain, CachedState &state) const
Fault translateAtomic(const RequestPtr &req, ThreadContext *tc, Mode mode) override
TranslationGenPtr translateFunctional(Addr start, Addr size, ThreadContext *tc, Mode mode, Request::Flags flags) override
Returns a translation generator for a region of virtual addresses, instead of directly translating a ...
Fault getTe(ThreadContext *tc, TlbEntry *destTe)
void setTableWalker(TableWalker *table_walker)
void setVMID(vmid_t _vmid)
bool isStage2
Indicates this TLB caches IPA->PA translations.
Fault walk(const RequestPtr &req, ThreadContext *tc, uint16_t asid, vmid_t _vmid, BaseMMU::Mode mode, BaseMMU::Translation *_trans, bool timing, bool functional, bool secure, MMU::ArmTranslationType tran_type, bool stage2, const TlbEntry *walk_entry)
void setTestInterface(TlbTestInterface *ti)
uint8_t physAddrRange() const
Returns the supported physical address range in bits.
bool haveLargeAsid64() const
Returns true if ASID is 16 bits in AArch64 (ARMv8)
static bool haveEL(ThreadContext *tc, ArmISA::ExceptionLevel el)
Return true if the system implements a specific exception level.
const ArmRelease * releaseFS() const
virtual void finish(const Fault &fault, const RequestPtr &req, ThreadContext *tc, BaseMMU::Mode mode)=0
virtual void markDelayed()=0
Signal that the translation has been delayed due to a hw page table walk.
std::set< BaseTLB * > instruction
It is possible from the MMU to traverse the entire hierarchy of TLBs, starting from the DTB and ITB (...
void init() override
Called at init time, this method is traversing the TLB hierarchy and pupulating the instruction/data/...
virtual void takeOverFrom(BaseMMU *old_mmu)
std::set< BaseTLB * > data
std::set< BaseTLB * > unified
Cycles is a wrapper class for representing cycle counts, i.e.
virtual std::string name() const
A Packet is used to encapsulate a transfer between two objects in the memory system (e....
void setLE(T v)
Set the value in the data pointer to v as little endian.
@ SECURE
The request targets the secure memory space.
@ STRICT_ORDER
The request is required to be strictly ordered by CPU models and is non-speculative.
@ UNCACHEABLE
The request is to an uncacheable address.
@ CACHE_BLOCK_ZERO
This is a write that is targeted and zeroing an entire cache block.
Abstract superclass for simulation objects.
ThreadContext is the external interface to all thread state for anything outside of the CPU.
virtual RegVal readMiscReg(RegIndex misc_reg)=0
virtual Process * getProcessPtr()=0
virtual ContextID contextId() const =0
#define ADD_STAT(n,...)
Convenience macro to add a stat to a statistics group.
bool contains(const Addr &a) const
Determine if the range contains an address.
Addr start() const
Get the start address of the range.
constexpr T bits(T val, unsigned first, unsigned last)
Extract the bitfield from position 'first' to 'last' (inclusive) from 'val' and right justify it.
#define panic(...)
This implements a cprintf based panic() function.
#define fatal_if(cond,...)
Conditional fatal macro that checks the supplied condition and only causes a fatal error if the condi...
void tagRequest(ThreadContext *tc, const RequestPtr &req, bool ind)
Tag a memory request with MPAM information.
ExceptionLevel currEL(const ThreadContext *tc)
Returns the current Exception Level (EL) of the provided ThreadContext.
bool isSecure(ThreadContext *tc)
Addr maskTaggedAddr(Addr addr, ThreadContext *tc, ExceptionLevel el, int topbit)
bool longDescFormatInUse(ThreadContext *tc)
bool ELIs64(ThreadContext *tc, ExceptionLevel el)
TranslationRegime translationRegime(ThreadContext *tc, ExceptionLevel el)
bool EL2Enabled(ThreadContext *tc)
ExceptionLevel translationEl(TranslationRegime regime)
int computeAddrTop(ThreadContext *tc, bool selbit, bool is_instr, TCR tcr, ExceptionLevel el)
@ MISCREG_ID_AA64MMFR1_EL1
int snsBankedIndex(MiscRegIndex reg, ThreadContext *tc)
bool inAArch64(ThreadContext *tc)
bool HaveExt(ThreadContext *tc, ArmExtension ext)
Returns true if the provided ThreadContext supports the ArmExtension passed as a second argument.
static void decodeAddrOffset(Addr offset, uint8_t &func)
bool pseudoInst(ThreadContext *tc, uint8_t func, uint64_t &result)
Copyright (c) 2024 - Pranith Kumar Copyright (c) 2020 Inria All rights reserved.
std::shared_ptr< FaultBase > Fault
std::shared_ptr< Request > RequestPtr
uint64_t Addr
Address type This will probably be moved somewhere else in the near future.
bool FullSystem
The FullSystem variable can be used to determine the current mode of simulation.
constexpr decltype(nullptr) NoFault
void updateMiscReg(ThreadContext *tc, ArmTranslationType tran_type)
vmid_t getVMID(ThreadContext *tc) const
Returns the current VMID (information stored in the VTTBR_EL2 register)
Memoizer< int, ThreadContext *, bool, bool, TCR, ExceptionLevel > computeAddrTop
Stats(statistics::Group *parent)
statistics::Scalar permsFaults
statistics::Scalar alignFaults
statistics::Scalar prefetchFaults
statistics::Scalar domainFaults
TranslationRegime targetRegime
void setAttributes(bool lpae)
Addr pAddr(Addr va) const
The file contains the definition of a set of TLB Invalidate Instructions.