Go to the documentation of this file.
42 #include "arch/decoder.hh"
43 #include "arch/utility.hh"
46 #include "debug/Branch.hh"
47 #include "debug/Fetch.hh"
48 #include "debug/MinorTrace.hh"
55 MinorCPUParams ¶ms,
64 branchInp(branchInp_),
65 predictionOut(predictionOut_),
67 nextStageReserve(next_stage_input_buffer),
68 outputWidth(params.decodeInputWidth),
69 processMoreThanOneInput(params.fetch2CycleInput),
70 branchPredictor(*params.branchPred),
71 fetchInfo(params.numThreads),
72 threadPriority(0), stats(&cpu_)
77 if (params.fetch2InputBufferSize < 1) {
78 fatal(
"%s: fetch2InputBufferSize must be >= 1 (%d)\n",
name,
79 params.fetch2InputBufferSize);
83 for (
ThreadID tid = 0; tid < params.numThreads; tid++) {
87 params.fetch2InputBufferSize));
116 DPRINTF(Fetch,
"Dumping whole input buffer\n");
129 if (inst->isFault() || !inst->triedToPredict)
151 DPRINTF(Branch,
"Unpredicted branch seen inst: %s\n", *inst);
153 branch.
target,
true, inst->id.threadId);
161 DPRINTF(Branch,
"Branch predicted correctly inst: %s\n", *inst);
167 DPRINTF(Branch,
"Branch mis-predicted inst: %s\n", *inst);
169 branch.
target ,
false, inst->id.threadId);
177 DPRINTF(Branch,
"Branch mis-predicted target inst: %s target: %s\n",
180 branch.
target,
true, inst->id.threadId);
191 assert(!inst->predictedTaken);
194 if (inst->staticInst->isControl() ||
195 inst->staticInst->isSyscall())
198 inst->triedToPredict =
true;
200 DPRINTF(Branch,
"Trying to predict for inst: %s\n", *inst);
203 inst->id.fetchSeqNum, inst_pc,
206 inst->predictedTaken =
true;
207 inst->predictedTarget = inst_pc;
211 DPRINTF(Branch,
"Not attempting prediction for inst: %s\n", *inst);
215 if (inst->predictedTaken) {
223 inst->predictedTarget, inst);
230 DPRINTF(Branch,
"Branch predicted taken inst: %s target: %s"
231 " new predictionSeqNum: %d\n",
240 if (!
inp.outputWire->isBubble())
256 DPRINTF(Fetch,
"Dumping all input as a stream changing branch"
276 DPRINTF(Fetch,
"Discarding line %s"
277 " due to predictionSeqNum mismatch (expected: %d)\n",
293 DPRINTF(Fetch,
"Scheduled Thread: %d\n", tid);
301 unsigned int output_index = 0;
326 if (!discard_line && (!fetch_info.
havePC || set_pc)) {
332 DPRINTF(Fetch,
"Setting new PC value: %s inputIndex: 0x%x"
333 " lineBaseAddr: 0x%x lineWidth: 0x%x\n",
336 fetch_info.
pc = line_in->
pc;
348 DPRINTF(Fetch,
"Discarding line %s (from inputIndex: %d)"
349 " due to predictionSeqNum mismatch (expected: %d)\n",
352 }
else if (line_in->
isFault()) {
364 assert(dyn_inst->id.execSeqNum == 0);
366 dyn_inst->pc = fetch_info.
pc;
371 dyn_inst->fault = line_in->
fault;
372 DPRINTF(Fetch,
"Fault being passed output_index: "
373 "%d: %s\n", output_index, dyn_inst->fault->name());
375 uint8_t *line = line_in->
line;
386 DPRINTF(Fetch,
"Offering MachInst to decoder addr: 0x%x\n",
403 assert(dyn_inst->id.execSeqNum == 0);
409 dyn_inst->staticInst = decoded_inst;
411 dyn_inst->pc = fetch_info.
pc;
412 DPRINTF(Fetch,
"decoder inst %s\n", *dyn_inst);
415 if (decoded_inst->
isLoad())
417 else if (decoded_inst->
isStore())
428 DPRINTF(Fetch,
"Instruction extracted from line %s"
429 " lineWidth: %d output_index: %d inputIndex: %d"
430 " pc: %s inst: %s\n",
433 fetch_info.
pc, *dyn_inst);
435 #if THE_ISA == X86_ISA || THE_ISA == ARM_ISA
450 fetch_info.
pc.upc(0);
451 fetch_info.
pc.nupc(1);
461 DPRINTF(Fetch,
"Inst not ready yet\n");
466 if (
decoder->needMoreBytes()) {
469 DPRINTF(Fetch,
"Updated inputIndex value PC: %s"
470 " inputIndex: 0x%x lineBaseAddr: 0x%x lineWidth: 0x%x\n",
481 if (output_index == 0) {
485 insts_out.
insts[output_index] = dyn_inst;
490 if (
DTRACE(MinorTrace) && !dyn_inst->isFault() &&
491 dyn_inst->staticInst->isMacroop())
493 dyn_inst->minorTraceInst(*
this);
506 DPRINTF(Fetch,
"Discarding all input on branch/fault\n");
508 fetch_info.
havePC =
false;
510 }
else if (discard_line) {
515 fetch_info.
havePC =
false;
559 if (!
inp.outputWire->isBubble())
570 case Enums::SingleThreaded:
571 priority_list.push_back(0);
573 case Enums::RoundRobin:
580 panic(
"Unknown fetch policy");
583 for (
auto tid : priority_list) {
601 return (*
inp.outputWire).isBubble() &&
606 :
Stats::Group(cpu,
"fetch2"),
608 "Number of integer instructions successfully decoded"),
610 "Number of floating point instructions successfully decoded"),
612 "Number of SIMD instructions successfully decoded"),
614 "Number of memory load instructions successfully decoded"),
616 "Number of memory store instructions successfully decoded"),
618 "Number of memory atomic instructions successfully decoded")
637 std::ostringstream
data;
642 (*
out.inputWire).reportData(
data);
644 MINORTRACE(
"inputIndex=%d havePC=%d predictionSeqNum=%d insts=%s\n",
const ThreadID InvalidThreadID
#define fatal(...)
This implements a cprintf based fatal() function.
ThreadID threadId
ThreadID associated with branch.
TheISA::PCState target
Starting PC of that stream.
ThreadID threadId
Thread associated with these instructions.
InstSeqNum predictionSeqNum
Fetch2 is the source of prediction sequence numbers.
std::vector< Fetch2ThreadInfo > fetchInfo
MinorDynInstPtr inst
Instruction which caused this branch.
Forward flowing data between Fetch2,Decode,Execute carrying a packet of instructions of a width appro...
MinorCPU & cpu
Pointer back to the containing CPU.
int16_t ThreadID
Thread index/ID type.
ThreadID getScheduledThread()
Use the current threading policy to determine the next thread to fetch from.
Minor::MinorActivityRecorder * activityRecorder
Activity recording for pipeline.
InstSeqNum lastStreamSeqNum
Stream sequence number of the last seen line used to identify changes of instruction stream.
InstSeqNum fetchSeqNum
Fetch2 is the source of fetch sequence numbers.
BPredUnit & branchPredictor
Branch predictor passed from Python configuration.
Line fetch data in the forward direction.
const std::string to_string(sc_enc enc)
Latch< ForwardInstData >::Input out
Output port carrying instructions into Decode.
Enums::ThreadPolicy threadPolicy
Thread Scheduling Policy (RoundRobin, Random, etc)
Reason reason
Explanation for this branch.
#define MINORTRACE(...)
DPRINTFN for MinorTrace reporting.
Data members after this line are cycle-to-cycle state.
InstId id
Thread, stream, prediction ...
virtual ThreadContext * getContext(int tn)
Given a thread num get tho thread context for it.
static bool isStreamChange(const BranchData::Reason reason)
Is a request with this reason actually a request to change the PC rather than a bubble or branch pred...
void predictBranch(MinorDynInstPtr inst, BranchData &branch)
Predicts branches for the given instruction.
@ BadlyPredictedBranchTarget
Stats::Scalar loadInstructions
void update(const InstSeqNum &done_sn, ThreadID tid)
Tells the branch predictor to commit any updates until the given sequence number.
void advancePC(PCState &pc, const StaticInstPtr &inst)
@ CorrectlyPredictedBranch
unsigned int lineWidth
Explicit line width, don't rely on data.size.
Derived & flags(Flags _flags)
Set the flags and marks this stat to print at the end of simulation.
void evaluate()
Pass on input/buffer data to the output if you can.
Latch< ForwardLineData >::Output inp
Input port carrying lines from Fetch1.
Latch< BranchData >::Output branchInp
Input port carrying branches from Execute.
void popInput(ThreadID tid)
Pop an element off the input buffer, if there are any.
Fault fault
This line has a fault.
bool havePC
PC is currently valid.
const ForwardLineData * getInput(ThreadID tid)
Get a piece of data to work on from the inputBuffer, or 0 if there is no data.
ThreadContext is the external interface to all thread state for anything outside of the CPU.
InstSeqNum streamSeqNum
The 'stream' this instruction belongs to.
Dynamic instruction for Minor.
MinorCPU is an in-order CPU model with four fixed pipeline stages:
bool blocked
Blocked indication for report.
bool processMoreThanOneInput
If true, more than one input word can be processed each cycle if there is room in the output to conta...
Addr lineBaseAddr
First byte address in the line.
#define ADD_STAT(n,...)
Convenience macro to add a stat to a statistics group.
unsigned int outputWidth
Width of output of this stage/input of next in instructions.
InstSeqNum expectedStreamSeqNum
Stream sequence number remembered from last time the predictionSeqNum changed.
Fetch2(const std::string &name, MinorCPU &cpu_, MinorCPUParams ¶ms, Latch< ForwardLineData >::Output inp_, Latch< BranchData >::Output branchInp_, Latch< BranchData >::Input predictionOut_, Latch< ForwardInstData >::Input out_, std::vector< InputBuffer< ForwardInstData >> &next_stage_input_buffer)
Stats::Scalar storeInstructions
Stats::Scalar vecInstructions
bool isBubble() const
BubbleIF interface.
void updateBranchPrediction(const BranchData &branch)
Update local branch prediction structures from feedback from Execute.
MinorDynInstPtr insts[MAX_FORWARD_INSTS]
Array of carried insts, ref counted.
Minor::Fetch2::Fetch2Stats stats
void dumpAllInput(ThreadID tid)
Dump the whole contents of the input buffer.
ThreadID threadId
The thread to which this line/instruction belongs.
const std::string & name()
bool isFault() const
This is a fault, not a line.
Fetch2Stats(MinorCPU *cpu)
Latch< BranchData >::Input predictionOut
Output port carrying predictions back to Fetch1.
Forward data betwen Execute and Fetch1 carrying change-of-address/stream information.
Stats::Scalar amoInstructions
GenericISA::DelaySlotPCState< MachInst > PCState
void activity()
Records that there is activity this cycle.
std::vector< ThreadID > roundRobinPriority(ThreadID priority)
Thread scheduling utility functions.
std::vector< InputBuffer< ForwardLineData > > inputBuffer
TheISA::PCState pc
PC of the first requested inst within this line.
TheISA::PCState pc
Remembered program counter value.
unsigned int inputIndex
Index into an incompletely processed input line that instructions are to be extracted from.
bool predict(const StaticInstPtr &inst, const InstSeqNum &seqNum, TheISA::PCState &pc, ThreadID tid)
Predicts whether or not the instruction is a taken branch, and the target of the branch if it is take...
ThreadID numThreads
Number of threads we're actually simulating (<= SMT_MAX_THREADS).
bool isDrained()
Is this stage drained? For Fetch2, draining is initiated by Execute halting Fetch1 causing Fetch2 to ...
Stats::Scalar intInstructions
Stats.
virtual TheISA::Decoder * getDecoderPtr()=0
void activateStage(const int idx)
Marks a stage as active.
std::vector< ThreadID > randomPriority()
void resize(unsigned int width)
Resize a bubble/empty ForwardInstData and fill with bubbles.
const std::string & name() const
InstSeqNum predictionSeqNum
The predicted qualifier to stream, attached by Fetch2 as a consequence of branch prediction.
const FlagsType total
Print the total.
void squash(const InstSeqNum &squashed_sn, ThreadID tid)
Squashes all outstanding updates until a given sequence number.
Stats::Scalar fpInstructions
std::vector< InputBuffer< ForwardInstData > > & nextStageReserve
Interface to reserve space in the next stage.
#define panic(...)
This implements a cprintf based panic() function.
Generated on Wed Sep 30 2020 14:02:08 for gem5 by doxygen 1.8.17