48#include "debug/Branch.hh"
49#include "debug/Fetch.hh"
50#include "debug/MinorTrace.hh"
60 const BaseMinorCPUParams ¶ms,
82 if (params.fetch2InputBufferSize < 1) {
83 fatal(
"%s: fetch2InputBufferSize must be >= 1 (%d)\n", name,
84 params.fetch2InputBufferSize);
88 for (
ThreadID tid = 0; tid < params.numThreads; tid++) {
91 name +
".inputBuffer" + std::to_string(tid),
"lines",
92 params.fetch2InputBufferSize));
121 DPRINTF(Fetch,
"Dumping whole input buffer\n");
134 if (inst->isFault() || !inst->triedToPredict)
156 DPRINTF(
Branch,
"Unpredicted branch seen inst: %s\n", *inst);
158 *branch.
target,
true, inst->id.threadId);
166 DPRINTF(
Branch,
"Branch predicted correctly inst: %s\n", *inst);
174 *branch.
target ,
false, inst->id.threadId);
182 DPRINTF(
Branch,
"Branch mis-predicted target inst: %s target: %s\n",
185 *branch.
target,
true, inst->id.threadId);
195 assert(!inst->predictedTaken);
198 if (inst->staticInst->isControl() || inst->staticInst->isSyscall()){
199 std::unique_ptr<PCStateBase> inst_pc(inst->pc->clone());
202 inst->triedToPredict =
true;
207 inst->id.fetchSeqNum, *inst_pc, inst->id.threadId)) {
209 inst->predictedTaken =
true;
210 set(inst->predictedTarget, inst_pc);
213 DPRINTF(
Branch,
"Not attempting prediction for inst: %s\n", *inst);
217 if (inst->predictedTaken) {
225 *inst->predictedTarget, inst);
233 " new predictionSeqNum: %d\n",
242 if (!
inp.outputWire->isBubble())
258 DPRINTF(Fetch,
"Dumping all input as a stream changing branch"
267 for (
ThreadID tid = 0; tid <
cpu.numThreads; tid++) {
278 DPRINTF(Fetch,
"Discarding line %s"
279 " due to predictionSeqNum mismatch (expected: %d)\n",
295 DPRINTF(Fetch,
"Scheduled Thread: %d\n", tid);
303 unsigned int output_index = 0;
328 if (!discard_line && (!fetch_info.
havePC || set_pc)) {
332 (line_in->
pc->instAddr() &
decoder->pcMask()) -
334 DPRINTF(Fetch,
"Setting new PC value: %s inputIndex: 0x%x"
335 " lineBaseAddr: 0x%x lineWidth: 0x%x\n",
338 set(fetch_info.
pc, line_in->
pc);
350 DPRINTF(Fetch,
"Discarding line %s (from inputIndex: %d)"
351 " due to predictionSeqNum mismatch (expected: %d)\n",
354 }
else if (line_in->
isFault()) {
366 assert(dyn_inst->id.execSeqNum == 0);
368 set(dyn_inst->pc, fetch_info.
pc);
373 dyn_inst->fault = line_in->
fault;
374 DPRINTF(Fetch,
"Fault being passed output_index: "
375 "%d: %s\n", output_index, dyn_inst->fault->name());
377 uint8_t *line = line_in->
line;
386 DPRINTF(Fetch,
"Offering MachInst to decoder addr: 0x%x\n",
409 assert(dyn_inst->id.execSeqNum == 0);
411 set(dyn_inst->pc, fetch_info.
pc);
412 DPRINTF(Fetch,
"decoder inst %s\n", *dyn_inst);
415 if (decoded_inst->
isLoad())
416 stats.loadInstructions++;
417 else if (decoded_inst->
isStore())
418 stats.storeInstructions++;
420 stats.amoInstructions++;
422 stats.vecInstructions++;
424 stats.fpInstructions++;
426 stats.intInstructions++;
428 DPRINTF(Fetch,
"Instruction extracted from line %s"
429 " lineWidth: %d output_index: %d inputIndex: %d"
430 " pc: %s inst: %s\n",
433 *fetch_info.
pc, *dyn_inst);
449 fetch_info.
pc->uReset();
458 DPRINTF(Fetch,
"Inst not ready yet\n");
463 if (
decoder->needMoreBytes()) {
466 DPRINTF(Fetch,
"Updated inputIndex value PC: %s"
467 " inputIndex: 0x%x lineBaseAddr: 0x%x lineWidth: 0x%x\n",
478 if (output_index == 0) {
482 insts_out.
insts[output_index] = dyn_inst;
487 if (debug::MinorTrace && !dyn_inst->isFault() &&
488 dyn_inst->staticInst->isMacroop()) {
489 dyn_inst->minorTraceInst(*
this);
502 DPRINTF(Fetch,
"Discarding all input on branch/fault\n");
504 fetch_info.
havePC =
false;
506 }
else if (discard_line) {
511 fetch_info.
havePC =
false;
539 cpu.activityRecorder->activity();
555 if (!
inp.outputWire->isBubble())
565 switch (
cpu.threadPolicy) {
566 case enums::SingleThreaded:
567 priority_list.push_back(0);
569 case enums::RoundRobin:
573 priority_list =
cpu.randomPriority();
576 panic(
"Unknown fetch policy");
579 for (
auto tid : priority_list) {
597 return (*
inp.outputWire).isBubble() &&
604 "Number of integer instructions successfully decoded"),
606 "Number of floating point instructions successfully decoded"),
608 "Number of SIMD instructions successfully decoded"),
610 "Number of memory load instructions successfully decoded"),
612 "Number of memory store instructions successfully decoded"),
614 "Number of memory atomic instructions successfully decoded")
633 std::ostringstream
data;
638 (*
out.inputWire).reportData(
data);
MinorCPU is an in-order CPU model with four fixed pipeline stages:
Named(std::string_view name_)
virtual std::string name() const
Base class for branch operations.
virtual void advancePC(PCStateBase &pc_state) const =0
ThreadContext is the external interface to all thread state for anything outside of the CPU.
virtual InstDecoder * getDecoderPtr()=0
Forward data betwen Execute and Fetch1 carrying change-of-address/stream information.
MinorDynInstPtr inst
Instruction which caused this branch.
static bool isStreamChange(const BranchData::Reason reason)
Is a request with this reason actually a request to change the PC rather than a bubble or branch pred...
@ BadlyPredictedBranchTarget
@ CorrectlyPredictedBranch
Reason reason
Explanation for this branch.
ThreadID threadId
ThreadID associated with branch.
std::unique_ptr< PCStateBase > target
Starting PC of that stream.
gem5::minor::Fetch2::Fetch2Stats stats
Fetch2(const std::string &name, MinorCPU &cpu_, const BaseMinorCPUParams ¶ms, Latch< ForwardLineData >::Output inp_, Latch< BranchData >::Output branchInp_, Latch< BranchData >::Input predictionOut_, Latch< ForwardInstData >::Input out_, std::vector< InputBuffer< ForwardInstData > > &next_stage_input_buffer)
void evaluate()
Pass on input/buffer data to the output if you can.
void updateBranchPrediction(const BranchData &branch)
Update local branch prediction structures from feedback from Execute.
const ForwardLineData * getInput(ThreadID tid)
Get a piece of data to work on from the inputBuffer, or 0 if there is no data.
std::vector< InputBuffer< ForwardLineData > > inputBuffer
void popInput(ThreadID tid)
Pop an element off the input buffer, if there are any.
MinorCPU & cpu
Pointer back to the containing CPU.
branch_prediction::BPredUnit & branchPredictor
Branch predictor passed from Python configuration.
Latch< ForwardLineData >::Output inp
Input port carrying lines from Fetch1.
bool isDrained()
Is this stage drained?
Latch< BranchData >::Input predictionOut
Output port carrying predictions back to Fetch1.
ThreadID getScheduledThread()
Use the current threading policy to determine the next thread to fetch from.
Latch< ForwardInstData >::Input out
Output port carrying instructions into Decode.
void dumpAllInput(ThreadID tid)
Dump the whole contents of the input buffer.
void predictBranch(MinorDynInstPtr inst, BranchData &branch)
Predicts branches for the given instruction.
std::vector< InputBuffer< ForwardInstData > > & nextStageReserve
Interface to reserve space in the next stage.
bool processMoreThanOneInput
If true, more than one input word can be processed each cycle if there is room in the output to conta...
std::vector< Fetch2ThreadInfo > fetchInfo
unsigned int outputWidth
Width of output of this stage/input of next in instructions.
Latch< BranchData >::Output branchInp
Input port carrying branches from Execute.
Forward flowing data between Fetch2,Decode,Execute carrying a packet of instructions of a width appro...
ThreadID threadId
Thread associated with these instructions.
void resize(unsigned int width)
Resize a bubble/empty ForwardInstData and fill with bubbles.
bool isBubble() const
BubbleIF interface.
MinorDynInstPtr insts[MAX_FORWARD_INSTS]
Array of carried insts, ref counted.
Line fetch data in the forward direction.
unsigned int lineWidth
Explicit line width, don't rely on data.size.
InstId id
Thread, stream, prediction ... id of this line.
std::unique_ptr< PCStateBase > pc
PC of the first inst within this sequence.
bool isFault() const
This is a fault, not a line.
Fault fault
This line has a fault.
Addr lineBaseAddr
First byte address in the line.
ThreadID threadId
The thread to which this line/instruction belongs.
InstSeqNum streamSeqNum
The 'stream' this instruction belongs to.
InstSeqNum predictionSeqNum
The predicted qualifier to stream, attached by Fetch2 as a consequence of branch prediction.
Dynamic instruction for Minor.
Fetch2 receives lines of data from Fetch1, separates them into instructions and passes them to Decode...
#define ADD_STAT(n,...)
Convenience macro to add a stat to a statistics group.
#define panic(...)
This implements a cprintf based panic() function.
#define fatal(...)
This implements a cprintf based fatal() function.
RefCountingPtr< MinorDynInst > MinorDynInstPtr
MinorDynInsts are currently reference counted.
void minorTrace(const char *fmt, Args ...args)
DPRINTFN for MinorTrace reporting.
const FlagsType total
Print the total.
Copyright (c) 2024 Arm Limited All rights reserved.
int16_t ThreadID
Thread index/ID type.
const ThreadID InvalidThreadID
RefCountingPtr< StaticInst > StaticInstPtr
const StaticInstPtr nullStaticInstPtr
Statically allocated null StaticInstPtr.
The constructed pipeline.
statistics::Scalar loadInstructions
statistics::Scalar fpInstructions
Fetch2Stats(MinorCPU *cpu)
statistics::Scalar intInstructions
Stats.
statistics::Scalar storeInstructions
statistics::Scalar amoInstructions
statistics::Scalar vecInstructions
Data members after this line are cycle-to-cycle state.
InstSeqNum expectedStreamSeqNum
Stream sequence number remembered from last time the predictionSeqNum changed.
InstSeqNum fetchSeqNum
Fetch2 is the source of fetch sequence numbers.
bool havePC
PC is currently valid.
InstSeqNum lastStreamSeqNum
Stream sequence number of the last seen line used to identify changes of instruction stream.
std::unique_ptr< PCStateBase > pc
Remembered program counter value.
unsigned int inputIndex
Index into an incompletely processed input line that instructions are to be extracted from.
InstSeqNum predictionSeqNum
Fetch2 is the source of prediction sequence numbers.
bool blocked
Blocked indication for report.