48#include "debug/Branch.hh"
49#include "debug/Fetch.hh"
50#include "debug/MinorTrace.hh"
60 const BaseMinorCPUParams ¶ms,
82 if (params.fetch2InputBufferSize < 1) {
83 fatal(
"%s: fetch2InputBufferSize must be >= 1 (%d)\n", name,
84 params.fetch2InputBufferSize);
88 for (
ThreadID tid = 0; tid < params.numThreads; tid++) {
91 name +
".inputBuffer" + std::to_string(tid),
"lines",
92 params.fetch2InputBufferSize));
121 DPRINTF(Fetch,
"Dumping whole input buffer\n");
134 if (inst->isFault() || !inst->triedToPredict)
156 DPRINTF(
Branch,
"Unpredicted branch seen inst: %s\n", *inst);
158 *branch.
target,
true, inst->id.threadId);
166 DPRINTF(
Branch,
"Branch predicted correctly inst: %s\n", *inst);
174 *branch.
target ,
false, inst->id.threadId);
182 DPRINTF(
Branch,
"Branch mis-predicted target inst: %s target: %s\n",
185 *branch.
target,
true, inst->id.threadId);
195 assert(!inst->predictedTaken);
198 if (inst->staticInst->isControl() || inst->staticInst->isSyscall()){
199 std::unique_ptr<PCStateBase> inst_pc(inst->pc->clone());
202 inst->triedToPredict =
true;
206 cpu.fetchStats[inst->id.threadId]->numBranches++;
208 inst->id.fetchSeqNum, *inst_pc, inst->id.threadId)) {
210 inst->predictedTaken =
true;
211 set(inst->predictedTarget, inst_pc);
214 DPRINTF(
Branch,
"Not attempting prediction for inst: %s\n", *inst);
218 if (inst->predictedTaken) {
226 *inst->predictedTarget, inst);
234 " new predictionSeqNum: %d\n",
243 if (!
inp.outputWire->isBubble())
259 DPRINTF(Fetch,
"Dumping all input as a stream changing branch"
268 for (
ThreadID tid = 0; tid <
cpu.numThreads; tid++) {
279 DPRINTF(Fetch,
"Discarding line %s"
280 " due to predictionSeqNum mismatch (expected: %d)\n",
296 DPRINTF(Fetch,
"Scheduled Thread: %d\n", tid);
304 unsigned int output_index = 0;
330 if (!discard_line && (!fetch_info.
havePC || set_pc)) {
334 (line_in->
pc->instAddr() &
decoder->pcMask()) -
336 DPRINTF(Fetch,
"Setting new PC value: %s inputIndex: 0x%x"
337 " lineBaseAddr: 0x%x lineWidth: 0x%x\n",
340 set(fetch_info.
pc, line_in->
pc);
352 DPRINTF(Fetch,
"Discarding line %s (from inputIndex: %d)"
353 " due to predictionSeqNum mismatch (expected: %d)\n",
356 }
else if (line_in->
isFault()) {
368 assert(dyn_inst->id.execSeqNum == 0);
370 set(dyn_inst->pc, fetch_info.
pc);
375 dyn_inst->fault = line_in->
fault;
376 DPRINTF(Fetch,
"Fault being passed output_index: "
377 "%d: %s\n", output_index, dyn_inst->fault->name());
379 uint8_t *line = line_in->
line;
388 DPRINTF(Fetch,
"Offering MachInst to decoder addr: 0x%x\n",
411 assert(dyn_inst->id.execSeqNum == 0);
413 set(dyn_inst->pc, fetch_info.
pc);
414 DPRINTF(Fetch,
"decoder inst %s\n", *dyn_inst);
417 if (decoded_inst->
isLoad()) {
418 stats.loadInstructions++;
419 }
else if (decoded_inst->
isStore()) {
420 stats.storeInstructions++;
421 }
else if (decoded_inst->
isAtomic()) {
422 stats.amoInstructions++;
423 }
else if (decoded_inst->
isVector()) {
424 stats.vecInstructions++;
426 stats.fpInstructions++;
428 stats.intInstructions++;
431 stats.totalInstructions++;
432 cpu.fetchStats[tid]->numInsts++;
434 DPRINTF(Fetch,
"Instruction extracted from line %s"
435 " lineWidth: %d output_index: %d inputIndex: %d"
436 " pc: %s inst: %s\n",
439 *fetch_info.
pc, *dyn_inst);
455 fetch_info.
pc->uReset();
464 DPRINTF(Fetch,
"Inst not ready yet\n");
469 if (
decoder->needMoreBytes()) {
472 DPRINTF(Fetch,
"Updated inputIndex value PC: %s"
473 " inputIndex: 0x%x lineBaseAddr: 0x%x lineWidth: 0x%x\n",
484 if (output_index == 0) {
488 insts_out.
insts[output_index] = dyn_inst;
493 if (debug::MinorTrace && !dyn_inst->isFault() &&
494 dyn_inst->staticInst->isMacroop()) {
495 dyn_inst->minorTraceInst(*
this);
508 DPRINTF(Fetch,
"Discarding all input on branch/fault\n");
510 fetch_info.
havePC =
false;
512 }
else if (discard_line) {
517 fetch_info.
havePC =
false;
545 cpu.activityRecorder->activity();
561 if (!
inp.outputWire->isBubble())
571 switch (
cpu.threadPolicy) {
572 case enums::SingleThreaded:
573 priority_list.push_back(0);
575 case enums::RoundRobin:
579 priority_list =
cpu.randomPriority();
582 panic(
"Unknown fetch policy");
585 for (
auto tid : priority_list) {
603 return (*
inp.outputWire).isBubble() &&
610 "Total number of instructions successfully decoded"),
612 "Number of integer instructions successfully decoded"),
614 "Number of floating point instructions successfully decoded"),
616 "Number of SIMD instructions successfully decoded"),
618 "Number of memory load instructions successfully decoded"),
620 "Number of memory store instructions successfully decoded"),
622 "Number of memory atomic instructions successfully decoded")
636 std::ostringstream
data;
641 (*
out.inputWire).reportData(
data);
MinorCPU is an in-order CPU model with four fixed pipeline stages:
Named(std::string_view name_)
virtual std::string name() const
Base class for branch operations.
virtual void advancePC(PCStateBase &pc_state) const =0
ThreadContext is the external interface to all thread state for anything outside of the CPU.
virtual InstDecoder * getDecoderPtr()=0
Forward data betwen Execute and Fetch1 carrying change-of-address/stream information.
MinorDynInstPtr inst
Instruction which caused this branch.
static bool isStreamChange(const BranchData::Reason reason)
Is a request with this reason actually a request to change the PC rather than a bubble or branch pred...
@ BadlyPredictedBranchTarget
@ CorrectlyPredictedBranch
Reason reason
Explanation for this branch.
ThreadID threadId
ThreadID associated with branch.
std::unique_ptr< PCStateBase > target
Starting PC of that stream.
gem5::minor::Fetch2::Fetch2Stats stats
Fetch2(const std::string &name, MinorCPU &cpu_, const BaseMinorCPUParams ¶ms, Latch< ForwardLineData >::Output inp_, Latch< BranchData >::Output branchInp_, Latch< BranchData >::Input predictionOut_, Latch< ForwardInstData >::Input out_, std::vector< InputBuffer< ForwardInstData > > &next_stage_input_buffer)
void evaluate()
Pass on input/buffer data to the output if you can.
void updateBranchPrediction(const BranchData &branch)
Update local branch prediction structures from feedback from Execute.
const ForwardLineData * getInput(ThreadID tid)
Get a piece of data to work on from the inputBuffer, or 0 if there is no data.
std::vector< InputBuffer< ForwardLineData > > inputBuffer
void popInput(ThreadID tid)
Pop an element off the input buffer, if there are any.
MinorCPU & cpu
Pointer back to the containing CPU.
branch_prediction::BPredUnit & branchPredictor
Branch predictor passed from Python configuration.
Latch< ForwardLineData >::Output inp
Input port carrying lines from Fetch1.
bool isDrained()
Is this stage drained?
Latch< BranchData >::Input predictionOut
Output port carrying predictions back to Fetch1.
ThreadID getScheduledThread()
Use the current threading policy to determine the next thread to fetch from.
Latch< ForwardInstData >::Input out
Output port carrying instructions into Decode.
void dumpAllInput(ThreadID tid)
Dump the whole contents of the input buffer.
void predictBranch(MinorDynInstPtr inst, BranchData &branch)
Predicts branches for the given instruction.
std::vector< InputBuffer< ForwardInstData > > & nextStageReserve
Interface to reserve space in the next stage.
bool processMoreThanOneInput
If true, more than one input word can be processed each cycle if there is room in the output to conta...
std::vector< Fetch2ThreadInfo > fetchInfo
unsigned int outputWidth
Width of output of this stage/input of next in instructions.
Latch< BranchData >::Output branchInp
Input port carrying branches from Execute.
Forward flowing data between Fetch2,Decode,Execute carrying a packet of instructions of a width appro...
ThreadID threadId
Thread associated with these instructions.
void resize(unsigned int width)
Resize a bubble/empty ForwardInstData and fill with bubbles.
bool isBubble() const
BubbleIF interface.
MinorDynInstPtr insts[MAX_FORWARD_INSTS]
Array of carried insts, ref counted.
Line fetch data in the forward direction.
unsigned int lineWidth
Explicit line width, don't rely on data.size.
InstId id
Thread, stream, prediction ... id of this line.
std::unique_ptr< PCStateBase > pc
PC of the first inst within this sequence.
bool isFault() const
This is a fault, not a line.
Fault fault
This line has a fault.
Addr lineBaseAddr
First byte address in the line.
ThreadID threadId
The thread to which this line/instruction belongs.
InstSeqNum streamSeqNum
The 'stream' this instruction belongs to.
InstSeqNum predictionSeqNum
The predicted qualifier to stream, attached by Fetch2 as a consequence of branch prediction.
Dynamic instruction for Minor.
Fetch2 receives lines of data from Fetch1, separates them into instructions and passes them to Decode...
#define ADD_STAT(n,...)
Convenience macro to add a stat to a statistics group.
#define panic(...)
This implements a cprintf based panic() function.
#define fatal(...)
This implements a cprintf based fatal() function.
RefCountingPtr< MinorDynInst > MinorDynInstPtr
MinorDynInsts are currently reference counted.
void minorTrace(const char *fmt, Args ...args)
DPRINTFN for MinorTrace reporting.
const FlagsType total
Print the total.
Copyright (c) 2024 Arm Limited All rights reserved.
int16_t ThreadID
Thread index/ID type.
const ThreadID InvalidThreadID
RefCountingPtr< StaticInst > StaticInstPtr
const StaticInstPtr nullStaticInstPtr
Statically allocated null StaticInstPtr.
The constructed pipeline.
statistics::Scalar loadInstructions
statistics::Scalar fpInstructions
Fetch2Stats(MinorCPU *cpu)
statistics::Scalar intInstructions
statistics::Scalar storeInstructions
statistics::Scalar amoInstructions
statistics::Scalar totalInstructions
Stats.
statistics::Scalar vecInstructions
Data members after this line are cycle-to-cycle state.
InstSeqNum expectedStreamSeqNum
Stream sequence number remembered from last time the predictionSeqNum changed.
InstSeqNum fetchSeqNum
Fetch2 is the source of fetch sequence numbers.
bool havePC
PC is currently valid.
InstSeqNum lastStreamSeqNum
Stream sequence number of the last seen line used to identify changes of instruction stream.
std::unique_ptr< PCStateBase > pc
Remembered program counter value.
unsigned int inputIndex
Index into an incompletely processed input line that instructions are to be extracted from.
InstSeqNum predictionSeqNum
Fetch2 is the source of prediction sequence numbers.
bool blocked
Blocked indication for report.