Go to the documentation of this file.
48 #include "debug/TLB.hh"
49 #include "debug/TLBVerbose.hh"
57 using namespace ArmISA;
61 itbStage2(
p.stage2_itb), dtbStage2(
p.stage2_dtb),
62 itbWalker(
p.itb_walker), dtbWalker(
p.dtb_walker),
63 itbStage2Walker(
p.stage2_itb_walker),
64 dtbStage2Walker(
p.stage2_dtb_walker),
67 s1State(this, false), s2State(this, true),
114 if (
static_cast<TLB*
>(
tlb)->walkCache())
118 if (
static_cast<TLB*
>(
tlb)->walkCache())
122 if (
static_cast<TLB*
>(
tlb)->walkCache())
185 lookup_data.
inHost =
false;
208 const Addr paddr = req->getPaddr();
213 req->setLocalAccessor(
218 pseudo_inst::pseudoInst<RegABI64>(tc, func, ret);
220 pseudo_inst::pseudoInst<RegABI32>(tc, func, ret);
236 Translation *translation,
bool &delay,
bool timing,
240 Addr vaddr_tainted = req->getVaddr();
246 vaddr = vaddr_tainted;
257 return std::make_shared<DataAbort>(
269 if (!
p->pTable->translate(
vaddr, paddr))
270 return std::make_shared<GenericPageTableFault>(vaddr_tainted);
271 req->setPaddr(paddr);
289 if (req->isCacheMaintenance()) {
307 if (
state.isStage2 && req->isPTWalk() &&
state.hcr.ptw &&
309 return std::make_shared<DataAbort>(
312 state.isStage2, tranMethod);
321 return std::make_shared<DataAbort>(
329 if (
te->nonCacheable) {
331 if (req->isPrefetch()) {
334 return std::make_shared<PrefetchAbort>(
336 state.isStage2, tranMethod);
340 if (!
te->longDescFormat) {
341 switch ((
state.dacr >> (
static_cast<uint8_t
>(
te->domain) * 2)) & 0x3) {
344 DPRINTF(
TLB,
"TLB Fault: Data abort on domain. DACR: %#x"
345 " domain: %#x write:%d\n",
state.dacr,
346 static_cast<uint8_t
>(
te->domain), is_write);
351 return std::make_shared<PrefetchAbort>(
354 state.isStage2, tranMethod);
356 return std::make_shared<DataAbort>(
359 state.isStage2, tranMethod);
364 panic(
"UNPRED domain\n");
371 uint8_t ap =
te->longDescFormat ?
te->ap << 1 :
te->ap;
372 uint8_t hap =
te->hap;
374 if (
state.sctlr.afe == 1 ||
te->longDescFormat)
378 bool isWritable =
true;
382 if (
state.isStage2) {
387 DPRINTF(
TLB,
"Access permissions 0, checking rs:%#x\n",
388 (
int)
state.sctlr.rs);
389 if (!
state.sctlr.xp) {
390 switch ((
int)
state.sctlr.rs) {
395 abt = is_write || !is_priv;
411 abt = !is_priv && is_write;
412 isWritable = is_priv;
418 panic(
"UNPRED premissions\n");
420 abt = !is_priv || is_write;
429 panic(
"Unknown permissions %#x\n", ap);
433 bool hapAbt = is_write ? !(hap & 2) : !(hap & 1);
434 bool xn =
te->xn || (isWritable &&
state.sctlr.wxn) ||
435 (ap == 3 &&
state.sctlr.uwxn && is_priv);
436 if (is_fetch && (abt || xn ||
437 (
te->longDescFormat &&
te->pxn && is_priv) ||
440 DPRINTF(
TLB,
"TLB Fault: Prefetch abort on permission check. AP:%d "
441 "priv:%d write:%d ns:%d sif:%d sctlr.afe: %d \n",
442 ap, is_priv, is_write,
te->ns,
446 return std::make_shared<PrefetchAbort>(
449 state.isStage2, tranMethod);
450 }
else if (abt | hapAbt) {
452 DPRINTF(
TLB,
"TLB Fault: Data abort on permission check. AP:%d priv:%d"
453 " write:%d\n", ap, is_priv, is_write);
454 return std::make_shared<DataAbort>(
457 state.isStage2 | !abt, tranMethod);
473 assert(
state.aarch64);
480 if (req->isCacheClean() &&
state.aarch64EL !=
EL0 && !
state.isStage2) {
484 Addr vaddr_tainted = req->getVaddr();
491 bool is_write = !req->isCacheClean() &&
mode ==
Write;
492 bool is_atomic = req->isAtomic();
500 if (
state.isStage2 && req->isPTWalk() &&
state.hcr.ptw &&
502 return std::make_shared<DataAbort>(
503 vaddr_tainted,
te->domain, is_write,
514 return std::make_shared<DataAbort>(
517 is_atomic ?
false : is_write,
524 if (
te->nonCacheable) {
526 if (req->isPrefetch()) {
529 return std::make_shared<PrefetchAbort>(
541 bool grant_read =
true;
543 if (
state.isStage2) {
545 (!is_write && !is_fetch), is_write, is_fetch);
548 (!is_write && !is_fetch), is_write, is_fetch);
554 DPRINTF(
TLB,
"TLB Fault: Prefetch abort on permission check. "
555 "ns:%d scr.sif:%d sctlr.afe: %d\n",
559 return std::make_shared<PrefetchAbort>(
565 DPRINTF(
TLB,
"TLB Fault: Data abort on permission check."
567 return std::make_shared<DataAbort>(
568 vaddr_tainted,
te->domain,
569 (is_atomic && !grant_read) ?
false : is_write,
588 bool grant_read =
te->hap & 0b01;
589 bool grant_write =
te->hap & 0
b10;
592 uint8_t pxn =
te->pxn;
600 "Checking S2 permissions: hap:%d, xn:%d, pxn:%d, r:%d, "
601 "w:%d, x:%d\n",
te->hap, xn, pxn,
r,
w,
x);
604 grant = grant_read && !xn;
605 }
else if (req->isAtomic()) {
606 grant = grant_read || grant_write;
612 panic(
"Invalid Operation\n");
615 return std::make_pair(grant, grant_read);
622 bool grant =
false, grant_read =
true;
624 const uint8_t ap =
te->ap & 0
b11;
625 const bool is_priv =
state.isPriv && !(req->getFlags() &
UserMode);
629 uint8_t pxn =
te->pxn;
636 DPRINTF(TLBVerbose,
"Checking S1 permissions: ap:%d, xn:%d, pxn:%d, r:%d, "
637 "w:%d, x:%d, is_priv: %d, wxn: %d\n", ap, xn,
638 pxn,
r,
w,
x, is_priv,
wxn);
641 return std::make_pair(
false,
false);
648 grant_read = ap & 0x1;
649 uint8_t
perm = (ap << 2) | (xn << 1) | pxn;
659 grant =
r ||
w || (
x && !
wxn);
680 uint8_t
perm = (ap << 2) | (xn << 1) | pxn;
684 grant =
r ||
w || (
x && !
wxn);
716 uint8_t
perm = (ap & 0x2) | xn;
719 grant =
r ||
w || (
x && !
wxn);
737 return std::make_pair(grant, grant_read);
744 bool exception =
false;
745 switch (
state.aarch64EL) {
777 if (
HaveExt(tc, ArmExtension::FEAT_PAN) &&
state.cpsr.pan && (ap & 0x1) &&
780 if (req->isCacheMaintenance() &&
784 }
else if (!is_priv && !(
state.hcr.e2h && !
state.hcr.tge)) {
798 const bool selbit =
bits(vaddr_tainted, 55);
801 const auto topbit =
state.computeAddrTop(tc, selbit, is_inst, tcr,
el);
812 bool is_atomic = req->isAtomic();
813 req->setPaddr(
vaddr);
827 f = std::make_shared<PrefetchAbort>(
vaddr,
831 f = std::make_shared<DataAbort>(
vaddr,
841 if (long_desc_format ||
state.sctlr.tre == 0 ||
state.nmrr.ir0 == 0 ||
842 state.nmrr.or0 == 0 ||
state.prrr.tr0 != 0x2) {
843 if (!req->isCacheMaintenance()) {
852 bool dc = (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
854 bool i_cacheability =
state.sctlr.i && !
state.sctlr.m;
860 temp_te.
innerAttrs = i_cacheability? 0x2: 0x0;
861 temp_te.
outerAttrs = i_cacheability? 0x2: 0x0;
872 DPRINTF(TLBVerbose,
"(No MMU) setting memory attributes: shareable: "
873 "%d, innerAttrs: %d, outerAttrs: %d, stage2: %d\n",
883 Translation *translation,
bool &delay,
bool timing,
892 Addr vaddr_tainted = req->getVaddr();
895 functional, &mergeTe,
state);
904 "Setting memory attributes: shareable: %d, innerAttrs: %d, "
905 "outerAttrs: %d, mtype: %d, stage2: %d\n",
906 te->shareable,
te->innerAttrs,
te->outerAttrs,
907 static_cast<uint8_t
>(
te->mtype),
state.isStage2);
910 if (
te->nonCacheable && !req->isCacheMaintenance())
922 if (
state.isSecure && !
te->ns) {
925 if (!is_fetch && fault ==
NoFault &&
932 return std::make_shared<DataAbort>(
955 Translation *translation,
bool &delay,
bool timing,
960 assert(!(timing && functional));
962 Addr vaddr_tainted = req->getVaddr();
968 vaddr = vaddr_tainted;
979 "CPSR is priv:%d UserMode:%d secure:%d S1S2NsTran:%d\n",
983 DPRINTF(
TLB,
"translateFs addr %#x, mode %d, st2 %d, scr %#x sctlr %#x "
984 "flags %#lx tranType 0x%x\n", vaddr_tainted,
mode,
987 if (!
state.isStage2) {
988 if ((req->isInstFetch() && (!
state.sctlr.i)) ||
989 ((!req->isInstFetch()) && (!
state.sctlr.c))){
990 if (!req->isCacheMaintenance()) {
1000 return std::make_shared<DataAbort>(
1010 if (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
1013 else if (
state.hcr.dc == 1)
1020 long_desc_format,
state);
1022 DPRINTF(TLBVerbose,
"Translating %s=%#x context=%d\n",
1023 state.isStage2 ?
"IPA" :
"VA", vaddr_tainted,
state.asid);
1032 if (
sd->enabled() && fault ==
NoFault) {
1033 fault =
sd->testDebug(tc, req,
mode);
1056 tran_type,
false,
state);
1086 tran_type,
true,
state);
1100 assert(translation);
1123 fault =
translateFs(req, tc,
mode, translation, delay,
true, tran_type,
1128 DPRINTF(TLBVerbose,
"Translation returning delay=%d fault=%d\n", delay,
1137 if (translation && (call_from_s2 || !
state.stage2Req || req->hasPaddr() ||
1154 switch (mmfr1.vmidbits) {
1169 panic(
"Reserved ID_AA64MMFR1_EL1.VMIDBits value: %#x",
1186 ((tran_type ==
state.curTranType) || stage2)) {
1189 DPRINTF(TLBVerbose,
"TLB variables changed!\n");
1190 state.updateMiscReg(tc, tran_type);
1208 if (
state.directToStage2) {
1234 switch (aarch64EL) {
1236 if (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
1237 hcr.tge == 1 && hcr.e2h == 1) {
1241 uint64_t ttbr_asid = ttbcr.a1 ?
1245 (mmu->haveLargeAsid64 && ttbcr.as) ? 63 : 55, 48);
1250 uint64_t ttbr_asid = ttbcr.a1 ?
1254 (mmu->haveLargeAsid64 && ttbcr.as) ? 63 : 55, 48);
1262 uint64_t ttbr_asid = ttbcr.a1 ?
1266 (mmu->haveLargeAsid64 && ttbcr.as) ? 63 : 55, 48);
1274 uint64_t ttbr_asid = ttbcr.a1 ?
1278 (mmu->haveLargeAsid64 && ttbcr.as) ? 63 : 55, 48);
1291 isPriv = aarch64EL !=
EL0;
1292 if (mmu->release()->has(ArmExtension::VIRTUALIZATION)) {
1294 isHyp = aarch64EL ==
EL2;
1297 isHyp &= (tran_type &
S1CTran) == 0;
1299 if (
HaveExt(tc, ArmExtension::FEAT_VHE) &&
1300 hcr.e2h == 1 && hcr.tge ==1) {
1304 if (hcr.e2h == 1 && (aarch64EL ==
EL2
1305 || (hcr.tge ==1 && aarch64EL ==
EL0))) {
1307 directToStage2 =
false;
1309 stage2DescReq =
false;
1315 stage2Req = isStage2 ||
1316 (
vm && !isHyp && sec &&
1317 !(tran_type &
S1CTran) && (aarch64EL <
EL2) &&
1319 stage2DescReq = isStage2 || (
vm && !isHyp && sec &&
1321 directToStage2 = !isStage2 && stage2Req && !sctlr.m;
1326 directToStage2 =
false;
1328 stage2DescReq =
false;
1346 asid = context_id.asid;
1356 if (mmu->release()->has(ArmExtension::VIRTUALIZATION)) {
1361 isHyp &= (tran_type &
S1CTran) == 0;
1369 stage2Req = hcr.vm && !isStage2 && !isHyp && sec &&
1371 stage2DescReq = hcr.vm && !isStage2 && !isHyp && sec;
1372 directToStage2 = stage2Req && !sctlr.m;
1377 directToStage2 =
false;
1378 stage2DescReq =
false;
1381 miscRegValid =
true;
1382 curTranType = tran_type;
1410 panic(
"Unknown translation mode!\n");
1416 Translation *translation,
bool timing,
bool functional,
1420 return getTE(
te, req, tc,
mode, translation, timing, functional,
1433 lookup_data.
va =
va;
1436 lookup_data.
vmid = vmid;
1437 lookup_data.
hyp = hyp;
1438 lookup_data.
secure = secure;
1441 lookup_data.
inHost = in_host;
1444 return tlb->multiLookup(lookup_data);
1449 Translation *translation,
bool timing,
bool functional,
1455 if (
state.isStage2) {
1459 Addr vaddr_tainted = req->getVaddr();
1462 if (
state.aarch64) {
1466 vaddr = vaddr_tainted;
1470 false, target_el,
false,
state.isStage2,
mode);
1473 if (req->isPrefetch()) {
1478 return std::make_shared<PrefetchAbort>(
1485 "TLB Miss: Starting hardware table walker for %#x(%d:%d)\n",
1491 translation, timing, functional, is_secure,
1492 tran_type,
state.stage2DescReq, *
te);
1495 if (timing || fault !=
NoFault) {
1500 true,
false, target_el,
false,
state.isStage2,
mode);
1509 Translation *translation,
bool timing,
bool functional,
1514 if (
state.isStage2) {
1519 fault =
getTE(&s2_te, req, tc,
mode, translation, timing, functional,
1534 Addr vaddr_tainted = req->getVaddr();
1537 fault =
getTE(&s1_te, req, tc,
mode, translation, timing, functional,
1548 req, translation,
mode, timing, functional,
state.isSecure,
1550 fault = s2_lookup->
getTe(tc, mergeTe);
1564 if (
state.isStage2) {
1565 DPRINTF(TLBVerbose,
"s2TLB: reqVa %#x, reqPa %#x, fault %p\n",
1566 vaddr_tainted, req->hasPaddr() ? req->getPaddr() : ~0,
1569 auto arm_fault =
reinterpret_cast<ArmFault*
>(fault.get());
1583 return entry && !entry->
partial;
1591 auto *ommu =
dynamic_cast<MMU*
>(old_mmu);
1594 _attr = ommu->_attr;
1607 fatal_if(!
ti,
"%s is not a valid ARM TLB tester\n", _ti->
name());
1616 if (!
test || !req->hasSize() || req->getSize() == 0 ||
1617 req->isCacheMaintenance()) {
1647 : statistics::
Group(parent),
1648 ADD_STAT(alignFaults, statistics::units::Count::get(),
1649 "Number of MMU faults due to alignment restrictions"),
1650 ADD_STAT(prefetchFaults, statistics::units::Count::get(),
1651 "Number of MMU faults due to prefetch"),
1652 ADD_STAT(domainFaults, statistics::units::Count::get(),
1653 "Number of MMU faults due to domain restrictions"),
1654 ADD_STAT(permsFaults, statistics::units::Count::get(),
1655 "Number of MMU faults due to permissions restrictions")
uint8_t physAddrRange() const
Returns the supported physical address range in bits.
Fault finalizePhysical(const RequestPtr &req, ThreadContext *tc, Mode mode) const override
Addr maskTaggedAddr(Addr addr, ThreadContext *tc, ExceptionLevel el, int topbit)
virtual RegVal readMiscReg(RegIndex misc_reg)=0
constexpr decltype(nullptr) NoFault
bool ELIs64(ThreadContext *tc, ExceptionLevel el)
Addr start() const
Get the start address of the range.
enums::ArmLookupLevel LookupLevel
std::pair< bool, bool > s2PermBits64(TlbEntry *te, const RequestPtr &req, Mode mode, ThreadContext *tc, CachedState &state, bool r, bool w, bool x)
statistics::Scalar domainFaults
Fault getTE(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool timing, bool functional, bool is_secure, ArmTranslationType tran_type, bool stage2)
bool contains(const Addr &a) const
Determine if the range contains an address.
TableWalker * dtbStage2Walker
virtual void markDelayed()=0
Signal that the translation has been delayed due to a hw page table walk.
void updateMiscReg(ThreadContext *tc, ArmTranslationType tran_type)
@ SECURE
The request targets the secure memory space.
@ CACHE_BLOCK_ZERO
This is a write that is targeted and zeroing an entire cache block.
bool haveLargeAsid64() const
Returns true if ASID is 16 bits in AArch64 (ARMv8)
virtual ContextID contextId() const =0
Fault getResultTe(TlbEntry **te, const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool timing, bool functional, TlbEntry *mergeTe, CachedState &state)
void init() override
Called at init time, this method is traversing the TLB hierarchy and pupulating the instruction/data/...
void setAttributes(bool lpae)
Addr purifyTaggedAddr(Addr vaddr_tainted, ThreadContext *tc, ExceptionLevel el, TCR tcr, bool is_inst, CachedState &state)
bool inAArch64(ThreadContext *tc)
ArmISA::TLB * getDTBPtr() const
Fault walk(const RequestPtr &req, ThreadContext *tc, uint16_t asid, vmid_t _vmid, bool hyp, BaseMMU::Mode mode, BaseMMU::Translation *_trans, bool timing, bool functional, bool secure, MMU::ArmTranslationType tran_type, bool stage2, const TlbEntry *walk_entry)
Fault testWalk(Addr pa, Addr size, Addr va, bool is_secure, Mode mode, TlbEntry::DomainType domain, LookupLevel lookup_level, bool stage2)
MMU(const ArmMMUParams &p)
void init() override
init() is called after all C++ SimObjects have been created and all ports are connected.
statistics::Scalar permsFaults
TableWalker * itbStage2Walker
Fault translateFs(const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool &delay, bool timing, ArmTranslationType tran_type, bool functional, CachedState &state)
Cycles is a wrapper class for representing cycle counts, i.e.
Fault translateComplete(const RequestPtr &req, ThreadContext *tc, Translation *translation, Mode mode, ArmTranslationType tran_type, bool call_from_s2)
TableWalker * getTableWalker(BaseMMU::Mode mode, bool stage2) const
@ UNCACHEABLE
The request is to an uncacheable address.
statistics::Scalar prefetchFaults
ThreadContext is the external interface to all thread state for anything outside of the CPU.
void setTestInterface(SimObject *ti)
virtual std::string name() const
std::shared_ptr< FaultBase > Fault
std::set< BaseTLB * > unified
#define ADD_STAT(n,...)
Convenience macro to add a stat to a statistics group.
A Packet is used to encapsulate a transfer between two objects in the memory system (e....
static ExceptionLevel tranTypeEL(CPSR cpsr, ArmTranslationType type)
Determine the EL to use for the purpose of a translation given a specific translation type.
Fault translateSe(const RequestPtr &req, ThreadContext *tc, Mode mode, Translation *translation, bool &delay, bool timing, CachedState &state)
std::shared_ptr< Request > RequestPtr
Memoizer< int, ThreadContext *, bool, bool, TCR, ExceptionLevel > computeAddrTop
Fault translateAtomic(const RequestPtr &req, ThreadContext *tc, Mode mode) override
void drainResume() override
Resume execution after a successful drain.
constexpr T bits(T val, unsigned first, unsigned last)
Extract the bitfield from position 'first' to 'last' (inclusive) from 'val' and right justify it.
Abstract superclass for simulation objects.
static void decodeAddrOffset(Addr offset, uint8_t &func)
@ MISCREG_ID_AA64MMFR1_EL1
Fault translateMmuOff(ThreadContext *tc, const RequestPtr &req, Mode mode, ArmTranslationType tran_type, Addr vaddr, bool long_desc_format, CachedState &state)
uint64_t Addr
Address type This will probably be moved somewhere else in the near future.
void setVMID(vmid_t _vmid)
bool isSecure(ThreadContext *tc)
bool checkWalkCache() const
std::set< BaseTLB * > instruction
It is possible from the MMU to traverse the entire hierarchy of TLBs, starting from the DTB and ITB (...
ExceptionLevel currEL(const ThreadContext *tc)
Returns the current Exception Level (EL) of the provided ThreadContext.
bool faultPAN(ThreadContext *tc, uint8_t ap, const RequestPtr &req, Mode mode, const bool is_priv, CachedState &state)
@ STRICT_ORDER
The request is required to be strictly ordered by CPU models and is non-speculative.
virtual Process * getProcessPtr()=0
ArmISA::TLB * getITBPtr() const
bool FullSystem
The FullSystem variable can be used to determine the current mode of simulation.
TLB * getTlb(BaseMMU::Mode mode, bool stage2) const
void translateTiming(const RequestPtr &req, ThreadContext *tc, Translation *translation, Mode mode) override
bool IsSecureEL2Enabled(ThreadContext *tc)
Fault getTe(ThreadContext *tc, TlbEntry *destTe)
Fault testTranslation(const RequestPtr &req, Mode mode, TlbEntry::DomainType domain, CachedState &state)
TranslationGenPtr translateFunctional(Addr start, Addr size, ThreadContext *tc, Mode mode, Request::Flags flags) override
Returns a translation generator for a region of virtual addresses, instead of directly translating a ...
void setTableWalker(TableWalker *table_walker)
bool checkPAN(ThreadContext *tc, uint8_t ap, const RequestPtr &req, Mode mode, const bool is_priv, CachedState &state)
SelfDebug * getSelfDebug() const
vmid_t getVMID(ThreadContext *tc) const
Returns the current VMID (information stored in the VTTBR_EL2 register)
Fault checkPermissions(TlbEntry *te, const RequestPtr &req, Mode mode, bool stage2)
bool longDescFormatInUse(ThreadContext *tc)
virtual void annotate(AnnotationIDs id, uint64_t val)
CachedState & updateMiscReg(ThreadContext *tc, ArmTranslationType tran_type, bool stage2)
const ArmRelease * releaseFS() const
void setAttr(uint64_t attr)
Accessor functions for memory attributes for last accessed TLB entry.
int snsBankedIndex(MiscRegIndex reg, ThreadContext *tc)
void setLE(T v)
Set the value in the data pointer to v as little endian.
void takeOverFrom(BaseMMU *old_mmu) override
static bool haveEL(ThreadContext *tc, ArmISA::ExceptionLevel el)
Return true if the system implements a specific exception level.
#define fatal_if(cond,...)
Conditional fatal macro that checks the supplied condition and only causes a fatal error if the condi...
Fault translateMmuOn(ThreadContext *tc, const RequestPtr &req, Mode mode, Translation *translation, bool &delay, bool timing, bool functional, Addr vaddr, ArmFault::TranMethod tranMethod, CachedState &state)
Reference material can be found at the JEDEC website: UFS standard http://www.jedec....
int computeAddrTop(ThreadContext *tc, bool selbit, bool is_instr, TCR tcr, ExceptionLevel el)
const ArmRelease * _release
virtual void takeOverFrom(BaseMMU *old_mmu)
gem5::ArmISA::MMU::Stats stats
bool isCompleteTranslation(TlbEntry *te) const
virtual void finish(const Fault &fault, const RequestPtr &req, ThreadContext *tc, BaseMMU::Mode mode)=0
Stats(statistics::Group *parent)
bool HaveExt(ThreadContext *tc, ArmExtension ext)
Returns true if the provided ThreadContext supports the ArmExtension passed as a second argument.
statistics::Scalar alignFaults
TlbEntry * lookup(Addr vpn, uint16_t asn, vmid_t vmid, bool hyp, bool secure, bool functional, bool ignore_asn, ExceptionLevel target_el, bool in_host, bool stage2, BaseMMU::Mode mode)
Lookup an entry in the TLB.
std::set< BaseTLB * > data
#define panic(...)
This implements a cprintf based panic() function.
Fault checkPermissions64(TlbEntry *te, const RequestPtr &req, Mode mode, ThreadContext *tc, bool stage2)
std::pair< bool, bool > s1PermBits64(TlbEntry *te, const RequestPtr &req, Mode mode, ThreadContext *tc, CachedState &state, bool r, bool w, bool x)
Generated on Thu Jun 16 2022 10:41:40 for gem5 by doxygen 1.8.17