xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision a8f296e332c4476e2e13d601d1df0af1136204c2)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9
10trait HasIFUConst { this: XSModule =>
11  val resetVector = 0x80000000L//TODO: set reset vec
12  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
13  val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size
14  val groupOffsetBits = log2Ceil(groupBytes)
15  val bankBytes = PredictWidth
16  val nBanks = groupBytes / bankBytes
17  val bankWidth = bankBytes / 2
18  val bankOffsetBits = log2Ceil(bankBytes)
19  // (0, nBanks-1)
20  def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits)
21  def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanks-1).U
22  // (0, bankBytes/2-1)
23  def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1)
24  def bankAligned(pc: UInt)  = align(pc, bankBytes)
25  def groupAligned(pc: UInt) = align(pc, groupBytes)
26  // each 1 bit in mask stands for 2 Bytes
27  // 8 bits, in which only the first 7 bits could be 0
28  def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0)
29  def maskLastHalf(pc: UInt): UInt = Mux(isInLastBank, 0.U(bankWidth.W), ~0.U(bankWidth.W))
30  def mask(pc: UInt): UInt = Cat(maskFirstHalf(pc), maskLastHalf(pc))
31  def snpc(pc: UInt): UInt = pc + (PopCount(mask(pc)) << 1)
32
33  val IFUDebug = true
34}
35
36class GlobalHistoryInfo() extends XSBundle {
37  val sawNTBr = Bool()
38  val takenOnBr = Bool()
39  val saveHalfRVI = Bool()
40  def shifted = takenOnBr || sawNTBr
41  def newPtr(ptr: UInt) = Mux(shifted, ptr - 1.U, ptr)
42
43  final def === (that: GlobalHistoryInfo): Bool = {
44    this.shifted === that.shifted &&
45    this.takenOnBr === that.takenOnBr
46  }
47
48  implicit val name = "IFU"
49  def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d\n", sawNTBr, takenOnBr, saveHalfRVI)
50  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
51}
52
53class IFUIO extends XSBundle
54{
55  val fetchPacket = DecoupledIO(new FetchPacket)
56  val redirect = Flipped(ValidIO(new Redirect))
57  val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
58  val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
59  val icacheReq = DecoupledIO(new ICacheReq)
60  val icacheResp = Flipped(DecoupledIO(new ICacheResp))
61  val icacheFlush = Output(UInt(2.W))
62  // val loopBufPar = Flipped(new LoopBufferParameters)
63}
64
65class IFU extends XSModule with HasIFUConst
66{
67  val io = IO(new IFUIO)
68  val bpu = BPU(EnableBPU)
69  val pd = Module(new PreDecode)
70  val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) }
71
72  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
73  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
74
75  val loopBufPar = loopBuffer.io.loopBufPar
76  val inLoop = WireInit(loopBuffer.io.out.valid)
77  val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits))
78
79  if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid
80  if3_flush := if4_flush || if4_redirect
81  if2_flush := if3_flush || if3_redirect
82  if1_flush := if2_flush || if2_redirect
83
84  loopBuffer.io.flush := io.redirect.valid
85
86  //********************** IF1 ****************************//
87  val if1_valid = !reset.asBool && GTimer() > 500.U
88  val if1_npc = WireInit(0.U(VAddrBits.W))
89  val if2_ready = WireInit(false.B)
90  val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready)
91
92
93  val if1_histPtr, if2_histPtr, if3_histPtr, if4_histPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
94  val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
95
96  val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W))))
97  val shiftPtr = WireInit(false.B)
98  val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
99  val ptr = Mux(shiftPtr, newPtr, if1_histPtr)
100  val hist = Wire(Vec(HistoryLength, UInt(1.W)))
101  for (i <- 0 until HistoryLength) {
102    hist(i) := extHist(ptr + i.U)
103  }
104
105  shiftPtr := false.B
106  newPtr := if1_histPtr
107
108  def wrapGHInfo(bp: BranchPrediction) = {
109    val ghi = Wire(new GlobalHistoryInfo())
110    ghi.sawNTBr     := bp.hasNotTakenBrs
111    ghi.takenOnBr   := bp.takenOnBr
112    ghi.saveHalfRVI := bp.saveHalfRVI
113    ghi
114  }
115
116  //********************** IF2 ****************************//
117  val if2_valid = RegInit(init = false.B)
118  val if3_ready = WireInit(false.B)
119  val if2_fire = if2_valid && if3_ready && !if2_flush
120  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire)
121  val if2_snpc = snpc(if2_pc)
122  val if2_predHistPtr = RegEnable(ptr, enable=if1_fire)
123  if2_ready := if2_fire || !if2_valid || if2_flush
124  when (if1_fire)       { if2_valid := if1_valid }
125  .elsewhen (if2_flush) { if2_valid := false.B }
126  .elsewhen (if2_fire)  { if2_valid := false.B }
127
128  when (RegNext(reset.asBool) && !reset.asBool) {
129    if1_npc := resetVector.U(VAddrBits.W)
130  }.elsewhen (if2_fire) {
131    if1_npc := if2_snpc
132  }.otherwise {
133    if1_npc := RegNext(if1_npc)
134  }
135
136  val if2_bp = bpu.io.out(0)
137  // if taken, bp_redirect should be true
138  // when taken on half RVI, we suppress this redirect signal
139  if2_redirect := if2_fire && if2_bp.redirect && !if2_bp.saveHalfRVI
140  when (if2_redirect) {
141    if1_npc := if2_bp.target
142  }
143
144  val if2_GHInfo = wrapGHInfo(if2_bp)
145
146  when (if2_fire && if2_GHInfo.shifted) {
147    shiftPtr := true.B
148    newPtr := if2_newPtr
149  }
150  when (if2_GHInfo.shifted && if2_newPtr >= ptr) {
151    hist(if2_newPtr-ptr) := if2_GHInfo.takenOnBr.asUInt
152  }
153
154
155
156  //********************** IF3 ****************************//
157  val if3_valid = RegInit(init = false.B)
158  val if4_ready = WireInit(false.B)
159  val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush
160  val if3_pc = RegEnable(if2_pc, if2_fire)
161  val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire)
162  if3_ready := if3_fire || !if3_valid || if3_flush
163  when (if3_flush)     { if3_valid := false.B }
164  .elsewhen (if2_fire) { if3_valid := true.B }
165  .elsewhen (if3_fire) { if3_valid := false.B }
166
167  val if3_bp = bpu.io.out(1)
168
169  val if3_GHInfo = wrapGHInfo(if3_bp)
170
171  class PrevHalfInstr extends Bundle {
172    val valid = Bool()
173    val taken = Bool()
174    val ghInfo = new GlobalHistoryInfo()
175    val fetchpc = UInt(VAddrBits.W) // only for debug
176    val idx = UInt(VAddrBits.W) // only for debug
177    val pc = UInt(VAddrBits.W)
178    val target = UInt(VAddrBits.W)
179    val instr = UInt(16.W)
180    val ipf = Bool()
181    val newPtr = UInt(log2Up(ExtHistoryLength).W)
182  }
183
184  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
185  val if4_prevHalfInstr = Wire(new PrevHalfInstr)
186  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
187  val crossPageIPF = WireInit(false.B)
188  when (if4_prevHalfInstr.valid) {
189    if3_prevHalfInstr := if4_prevHalfInstr
190  }
191  val prevHalfInstr = Mux(if4_prevHalfInstr.valid, if4_prevHalfInstr, if3_prevHalfInstr)
192
193  // the previous half of RVI instruction waits until it meets its last half
194  val if3_hasPrevHalfInstr = prevHalfInstr.valid && (prevHalfInstr.pc + 2.U) === if3_pc
195  // set to invalid once consumed or redirect from backend
196  val prevHalfConsumed = if3_hasPrevHalfInstr && if3_fire || if4_flush
197  when (prevHalfConsumed) {
198    if3_prevHalfInstr.valid := false.B
199  }
200
201  // when bp signal a redirect, we distinguish between taken and not taken
202  // if taken and saveHalfRVI is true, we do not redirect to the target
203  if3_redirect := if3_fire && (if3_hasPrevHalfInstr && prevHalfInstr.taken || if3_bp.redirect && (if3_bp.taken && !if3_bp.saveHalfRVI || !if3_bp.taken) )
204
205  when (if3_redirect) {
206    when (!(if3_hasPrevHalfInstr && prevHalfInstr.taken)) {
207      if1_npc := if3_bp.target
208      when (if3_GHInfo.shifted){
209        shiftPtr := true.B
210        newPtr := if3_newPtr
211      }
212    }
213  }
214
215  // when it does not redirect, we still need to modify hist(wire)
216  when(if3_GHInfo.shifted && if3_newPtr >= ptr) {
217    hist(if3_newPtr-ptr) := if3_GHInfo.takenOnBr
218  }
219  when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted && prevHalfInstr.newPtr >= ptr) {
220    hist(prevHalfInstr.newPtr-ptr) := prevHalfInstr.ghInfo.takenOnBr
221  }
222
223  //********************** IF4 ****************************//
224  val if4_pd = RegEnable(pd.io.out, if3_fire)
225  val if4_ipf = RegEnable(icacheResp.ipf || if3_hasPrevHalfInstr && prevHalfInstr.ipf, if3_fire)
226  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
227  val if4_valid = RegInit(false.B)
228  val if4_fire = if4_valid && io.fetchPacket.ready
229  val if4_pc = RegEnable(if3_pc, if3_fire)
230
231  val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire)
232  if4_ready := (if4_fire || !if4_valid || if4_flush) && GTimer() > 500.U
233  when (if4_flush)     { if4_valid := false.B }
234  .elsewhen (if3_fire) { if4_valid := true.B }
235  .elsewhen (if4_fire) { if4_valid := false.B }
236
237  val if4_bp = Wire(new BranchPrediction)
238  if4_bp := bpu.io.out(2)
239
240  val if4_GHInfo = wrapGHInfo(if4_bp)
241
242  val if4_cfi_jal = if4_pd.instrs(if4_bp.jmpIdx)
243  val if4_cfi_jal_tgt = if4_pd.pc(if4_bp.jmpIdx) + Mux(if4_pd.pd(if4_bp.jmpIdx).isRVC,
244    SignExt(Cat(if4_cfi_jal(12), if4_cfi_jal(8), if4_cfi_jal(10, 9), if4_cfi_jal(6), if4_cfi_jal(7), if4_cfi_jal(2), if4_cfi_jal(11), if4_cfi_jal(5, 3), 0.U(1.W)), XLEN),
245    SignExt(Cat(if4_cfi_jal(31), if4_cfi_jal(19, 12), if4_cfi_jal(20), if4_cfi_jal(30, 21), 0.U(1.W)), XLEN))
246  if4_bp.target := Mux(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, if4_cfi_jal_tgt, bpu.io.out(2).target)
247  if4_bp.redirect := bpu.io.out(2).redirect || if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken && if4_cfi_jal_tgt =/= bpu.io.out(2).target
248
249  if4_prevHalfInstr := 0.U.asTypeOf(new PrevHalfInstr)
250  when (if4_fire && if4_bp.saveHalfRVI) {
251    if4_prevHalfInstr.valid := true.B
252    if4_prevHalfInstr.taken := if4_bp.taken
253    if4_prevHalfInstr.ghInfo := if4_GHInfo
254    // Make sure shifted can work
255    if4_prevHalfInstr.ghInfo.saveHalfRVI := false.B
256    if4_prevHalfInstr.newPtr := if4_newPtr
257    if4_prevHalfInstr.fetchpc := if4_pc
258    if4_prevHalfInstr.idx := PopCount(mask(if4_pc)) - 1.U
259    if4_prevHalfInstr.pc := if4_pd.pc(if4_prevHalfInstr.idx)
260    if4_prevHalfInstr.target := if4_bp.target
261    if4_prevHalfInstr.instr := if4_pd.instrs(if4_prevHalfInstr.idx)(15, 0)
262    if4_prevHalfInstr.ipf := if4_ipf
263  }
264
265  // Redirect and npc logic for if4
266  when (if4_fire && if4_bp.redirect) {
267    if4_redirect := true.B
268    when (if4_bp.saveHalfRVI) {
269      if1_npc := snpc(if4_pc)
270    }.otherwise {
271      if1_npc := if4_bp.target
272    }
273  }
274
275  // This should cover the if4 redirect to snpc when saveHalfRVI
276  when (if3_redirect) {
277    when (if3_hasPrevHalfInstr && prevHalfInstr.taken) {
278      if1_npc := prevHalfInstr.target
279    }
280  }
281
282  // history logic for if4
283  when (if4_fire && if4_bp.redirect) {
284    shiftPtr := true.B
285    newPtr := if4_newPtr
286  }
287
288  when (if4_GHInfo.shifted && if4_newPtr >= ptr) {
289    hist(if4_newPtr-ptr) := if4_GHInfo.takenOnBr
290  }
291
292  when (if3_redirect) {
293    // when redirect and if3_hasPrevHalfInstr, this prevHalfInstr should only be taken
294    when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted) {
295      shiftPtr := true.B
296      newPtr := prevHalfInstr.newPtr
297      extHist(prevHalfInstr.newPtr) := prevHalfInstr.ghInfo.takenOnBr
298    }
299  }
300
301  // modify GHR at the end of a prediction lifetime
302  when (if4_fire && if4_GHInfo.shifted) {
303    extHist(if4_newPtr) := if4_GHInfo.takenOnBr
304  }
305
306  // This is a histPtr which is only modified when a prediction
307  // is sent, so that it can get the final prediction info
308  val finalPredHistPtr = RegInit(0.U(log2Up(ExtHistoryLength).W))
309  if4_histPtr := finalPredHistPtr
310  if4_newPtr  := if3_histPtr
311  when (if4_fire && if4_GHInfo.shifted) {
312    finalPredHistPtr := if4_newPtr
313  }
314
315  if3_histPtr := Mux(if4_GHInfo.shifted && if4_valid && !if4_flush, if4_histPtr - 1.U, if4_histPtr)
316  if3_newPtr  := if2_histPtr
317
318  if2_histPtr := Mux(if3_GHInfo.shifted && if3_valid && !if3_flush, if3_histPtr - 1.U, if3_histPtr)
319  if2_newPtr  := if1_histPtr
320
321  if1_histPtr := Mux(if2_GHInfo.shifted && if2_valid && !if2_flush, if2_histPtr - 1.U, if2_histPtr)
322
323
324
325
326  when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) {
327    val b = io.outOfOrderBrInfo.bits
328    val oldPtr = b.brInfo.histPtr
329    shiftPtr := true.B
330    when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) {
331      // If mispredicted cfi is not a branch,
332      // and there wasn't any not taken branch before it,
333      // we should only recover the pointer to an unshifted state
334      newPtr := oldPtr
335      finalPredHistPtr := oldPtr
336    }.otherwise {
337      newPtr := oldPtr - 1.U
338      finalPredHistPtr := oldPtr - 1.U
339      hist(0) := Mux(b.pd.isBr, b.taken, 0.U)
340      extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U)
341    }
342  }
343
344  when (loopBufPar.LBredirect.valid) {
345    if1_npc := loopBufPar.LBredirect.bits
346  }
347
348  when (io.redirect.valid) {
349    if1_npc := io.redirect.bits.target
350  }
351
352  when(inLoop) {
353    io.icacheReq.valid := if4_flush
354  }.otherwise {
355    io.icacheReq.valid := if1_valid && if2_ready
356  }
357  io.icacheResp.ready := if4_ready
358  io.icacheReq.bits.addr := if1_npc
359
360  // when(if4_bp.taken) {
361  //   when(if4_bp.saveHalfRVI) {
362  //     io.loopBufPar.LBReq := snpc(if4_pc)
363  //   }.otherwise {
364  //     io.loopBufPar.LBReq := if4_bp.target
365  //   }
366  // }.otherwise {
367  //   io.loopBufPar.LBReq := snpc(if4_pc)
368  //   XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n")
369  // }
370  loopBufPar.fetchReq := if3_pc
371
372  io.icacheReq.bits.mask := mask(if1_npc)
373
374  io.icacheFlush := Cat(if3_flush, if2_flush)
375
376  val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W)))
377  (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr))
378  bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid
379  bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt)
380  bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid
381  bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist
382
383  // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush)
384  bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush)
385  bpu.io.inFire(0) := if1_fire
386  bpu.io.inFire(1) := if2_fire
387  bpu.io.inFire(2) := if3_fire
388  bpu.io.inFire(3) := if4_fire
389  bpu.io.stageValid(0) := if2_valid
390  bpu.io.stageValid(1) := if3_valid
391  bpu.io.stageValid(2) := if4_valid
392  bpu.io.in.pc := if1_npc
393  bpu.io.in.hist := hist.asUInt
394  bpu.io.in.histPtr := ptr
395  bpu.io.in.inMask := mask(if1_npc)
396  bpu.io.predecode.mask := if4_pd.mask
397  bpu.io.predecode.pd := if4_pd.pd
398  bpu.io.predecode.isFetchpcEqualFirstpc := if4_pc === if4_pd.pc(0)
399
400  pd.io.in := icacheResp
401  when(inLoop) {
402    pd.io.in.mask := loopBuffer.io.out.bits.mask & mask(loopBuffer.io.out.bits.pc) // TODO: Maybe this is unnecessary
403    // XSDebug("Fetch from LB\n")
404    // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n")
405    // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n")
406    // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n")
407  }
408
409  pd.io.prev.valid := if3_hasPrevHalfInstr
410  pd.io.prev.bits := prevHalfInstr.instr
411  // if a fetch packet triggers page fault, set the pf instruction to nop
412  when (!if3_hasPrevHalfInstr && icacheResp.ipf) {
413    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
414    (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop
415    pd.io.in.data := instrs.asUInt
416  }.elsewhen (if3_hasPrevHalfInstr && (prevHalfInstr.ipf || icacheResp.ipf)) {
417    pd.io.prev.bits := ZeroExt("b0010011".U, 16)
418    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
419    (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W))))
420    pd.io.in.data := instrs.asUInt
421
422    when (icacheResp.ipf && !prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault
423  }
424
425  //Performance Counter
426  // if (!env.FPGAPlatform ) {
427  //   ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf)
428  //   ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf)
429  // }
430
431  val fetchPacketValid = if4_valid && !io.redirect.valid
432  val fetchPacketWire = Wire(new FetchPacket)
433
434  // io.fetchPacket.valid := if4_valid && !io.redirect.valid
435  fetchPacketWire.instrs := if4_pd.instrs
436  fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
437  loopBufPar.noTakenMask := if4_pd.mask
438  fetchPacketWire.pc := if4_pd.pc
439  (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
440  when (if4_bp.taken) {
441    fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target
442  }
443  fetchPacketWire.brInfo := bpu.io.branchInfo
444  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := finalPredHistPtr)
445  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr)
446  fetchPacketWire.pd := if4_pd.pd
447  fetchPacketWire.ipf := if4_ipf
448  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
449
450  // predTaken Vec
451  fetchPacketWire.predTaken := if4_bp.taken
452
453  loopBuffer.io.in.bits := fetchPacketWire
454  io.fetchPacket.bits := fetchPacketWire
455  io.fetchPacket.valid := fetchPacketValid
456  loopBuffer.io.in.valid := io.fetchPacket.fire
457
458  // debug info
459  if (IFUDebug) {
460    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
461    XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n")
462    XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n")
463    XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n",
464      io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc)
465    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n")
466
467    XSDebug("[IF1] v=%d     fire=%d            flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc))
468    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_histPtr, if2_snpc)
469    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_histPtr, crossPageIPF, if3_GHInfo.sawNTBr)
470    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_histPtr, if4_crossPageIPF, if4_GHInfo.sawNTBr)
471    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr)
472    XSDebug("[IF1][ghr] headPtr=%d shiftPtr=%d newPtr=%d ptr=%d\n", if1_histPtr, shiftPtr, newPtr, ptr)
473    XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt)
474    XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt)
475
476    XSDebug("[IF2][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.redirect, if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
477    if2_GHInfo.debug
478
479    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask)
480    XSDebug("[IF3][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.redirect, if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
481    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
482    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
483    XSDebug("[IF3][    prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
484      prevHalfInstr.valid, prevHalfInstr.taken, prevHalfInstr.fetchpc, prevHalfInstr.idx, prevHalfInstr.pc, prevHalfInstr.target, prevHalfInstr.instr, prevHalfInstr.ipf)
485    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n",
486      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
487    if3_GHInfo.debug
488
489    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
490    XSDebug("[IF4][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.redirect, if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
491    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_cfi_jal, if4_cfi_jal_tgt)
492    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
493      if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf)
494    if4_GHInfo.debug
495    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n",
496      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix)
497    for (i <- 0 until PredictWidth) {
498      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
499        io.fetchPacket.bits.mask(i),
500        io.fetchPacket.bits.instrs(i),
501        io.fetchPacket.bits.pc(i),
502        io.fetchPacket.bits.pnpc(i),
503        io.fetchPacket.bits.pd(i).isRVC,
504        io.fetchPacket.bits.pd(i).brType,
505        io.fetchPacket.bits.pd(i).isCall,
506        io.fetchPacket.bits.pd(i).isRet
507      )
508    }
509  }
510}