xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision 5152a864d39dccf4203fa56b62f58b40e7ee64d7)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import chisel3.util.experimental.BoringUtils
6import device.RAMHelper
7import xiangshan._
8import utils._
9import xiangshan.cache._
10
11trait HasIFUConst { this: XSModule =>
12  val resetVector = 0x80000000L//TODO: set reset vec
13  val groupAlign = log2Up(FetchWidth * 4 * 2)
14  def groupPC(pc: UInt): UInt = Cat(pc(VAddrBits-1, groupAlign), 0.U(groupAlign.W))
15  // each 1 bit in mask stands for 2 Bytes
16  def mask(pc: UInt): UInt = (Fill(PredictWidth * 2, 1.U(1.W)) >> pc(groupAlign - 1, 1))(PredictWidth - 1, 0)
17  def snpc(pc: UInt): UInt = pc + (PopCount(mask(pc)) << 1)
18
19  val IFUDebug = true
20}
21
22class GlobalHistoryInfo() extends XSBundle {
23  val sawNTBr = Bool()
24  val takenOnBr = Bool()
25  val saveHalfRVI = Bool()
26  def shifted = takenOnBr || sawNTBr
27  def newPtr(ptr: UInt) = Mux(shifted, ptr - 1.U, ptr)
28  implicit val name = "IFU"
29  def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d\n", sawNTBr, takenOnBr, saveHalfRVI)
30  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
31}
32
33class IFUIO extends XSBundle
34{
35  val fetchPacket = DecoupledIO(new FetchPacket)
36  val redirect = Flipped(ValidIO(new Redirect))
37  val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
38  val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
39  val icacheReq = DecoupledIO(new ICacheReq)
40  val icacheResp = Flipped(DecoupledIO(new ICacheResp))
41  val icacheFlush = Output(UInt(2.W))
42  val LBFetch = Flipped(new IFUFetchIO)
43  val LBredirect = Flipped(ValidIO(UInt(VAddrBits.W)))
44  val tgtpc = Output(UInt(VAddrBits.W))
45  val inLoop = Input(Bool())
46}
47
48class IFU extends XSModule with HasIFUConst
49{
50  val io = IO(new IFUIO)
51  val bpu = BPU(EnableBPU)
52  val pd = Module(new PreDecode)
53
54  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
55  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
56
57  val icacheResp = WireInit(Mux(io.inLoop, io.LBFetch.LBResp, io.icacheResp.bits))
58
59  if4_flush := io.redirect.valid || io.LBredirect.valid
60  if3_flush := if4_flush || if4_redirect
61  if2_flush := if3_flush || if3_redirect
62  if1_flush := if2_flush || if2_redirect
63
64  //********************** IF1 ****************************//
65  val if1_valid = !reset.asBool && GTimer() > 500.U
66  val if1_npc = WireInit(0.U(VAddrBits.W))
67  val if2_ready = WireInit(false.B)
68  val if1_fire = if1_valid && (if2_ready || if1_flush) && (io.inLoop || io.icacheReq.ready)
69
70
71  val if1_histPtr, if2_histPtr, if3_histPtr, if4_histPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
72  val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
73
74  val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W))))
75  val shiftPtr = WireInit(false.B)
76  val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
77  val ptr = Mux(shiftPtr, newPtr, if1_histPtr)
78  val hist = Wire(Vec(HistoryLength, UInt(1.W)))
79  for (i <- 0 until HistoryLength) {
80    hist(i) := extHist(ptr + i.U)
81  }
82
83  shiftPtr := false.B
84  newPtr := if1_histPtr
85
86
87
88  val if1_GHInfo = Wire(new GlobalHistoryInfo())
89  if1_GHInfo := 0.U.asTypeOf(new GlobalHistoryInfo)
90
91  //********************** IF2 ****************************//
92  val if2_valid = RegEnable(next = if1_valid, init = false.B, enable = if1_fire)
93  val if3_ready = WireInit(false.B)
94  val if2_fire = if2_valid && if3_ready && !if2_flush
95  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire)
96  val if2_snpc = snpc(if2_pc)
97  val if2_GHInfo = RegEnable(if1_GHInfo, if1_fire)
98  val if2_predHistPtr = RegEnable(ptr, enable=if1_fire)
99  if2_ready := if2_fire || !if2_valid || if2_flush
100  when (if2_flush) { if2_valid := if1_fire }
101  .elsewhen (if1_fire) { if2_valid := if1_valid }
102  .elsewhen (if2_fire) { if2_valid := false.B }
103
104  when (RegNext(reset.asBool) && !reset.asBool) {
105    if1_npc := resetVector.U(VAddrBits.W)
106  }.elsewhen (if2_fire) {
107    if1_npc := if2_snpc
108  }.otherwise {
109    if1_npc := RegNext(if1_npc)
110  }
111
112  val if2_bp = bpu.io.out(0).bits
113  // if taken, bp_redirect should be true
114  // when taken on half RVI, we suppress this redirect signal
115  if2_redirect := if2_fire && bpu.io.out(0).valid && if2_bp.redirect && !if2_bp.saveHalfRVI
116  when (if2_redirect) {
117    if1_npc := if2_bp.target
118  }
119
120  val if2_realGHInfo = Wire(new GlobalHistoryInfo())
121  if2_realGHInfo.sawNTBr     := if2_bp.hasNotTakenBrs
122  if2_realGHInfo.takenOnBr   := if2_bp.takenOnBr
123  if2_realGHInfo.saveHalfRVI := if2_bp.saveHalfRVI
124
125  when (if2_fire && if2_realGHInfo.shifted) {
126    shiftPtr := true.B
127    newPtr := if2_newPtr
128  }
129  when (if2_realGHInfo.shifted && if2_newPtr >= ptr) {
130    hist(if2_newPtr-ptr) := if2_realGHInfo.takenOnBr.asUInt
131  }
132
133
134
135  //********************** IF3 ****************************//
136  val if3_valid = RegEnable(next = if2_valid, init = false.B, enable = if2_fire)
137  val if4_ready = WireInit(false.B)
138  val if3_fire = if3_valid && if4_ready && (io.inLoop || io.icacheResp.valid) && !if3_flush
139  val if3_pc = RegEnable(if2_pc, if2_fire)
140  val if3_GHInfo = RegEnable(if2_realGHInfo, if2_fire)
141  val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire)
142  if3_ready := if3_fire || !if3_valid || if3_flush
143  when (if3_flush) { if3_valid := false.B }
144  .elsewhen (if2_fire) { if3_valid := if2_valid }
145  .elsewhen (if3_fire) { if3_valid := false.B }
146
147  val if3_bp = bpu.io.out(1).bits
148
149  val if3_realGHInfo = Wire(new GlobalHistoryInfo())
150  if3_realGHInfo.sawNTBr     := if3_bp.hasNotTakenBrs
151  if3_realGHInfo.takenOnBr   := if3_bp.takenOnBr
152  if3_realGHInfo.saveHalfRVI := if3_bp.saveHalfRVI
153
154  class PrevHalfInstr extends Bundle {
155    val valid = Bool()
156    val taken = Bool()
157    val ghInfo = new GlobalHistoryInfo()
158    val fetchpc = UInt(VAddrBits.W) // only for debug
159    val idx = UInt(VAddrBits.W) // only for debug
160    val pc = UInt(VAddrBits.W)
161    val target = UInt(VAddrBits.W)
162    val instr = UInt(16.W)
163    val ipf = Bool()
164    val newPtr = UInt(log2Up(ExtHistoryLength).W)
165  }
166
167  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
168  val if4_prevHalfInstr = Wire(new PrevHalfInstr)
169  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
170  val crossPageIPF = WireInit(false.B)
171  when (if4_prevHalfInstr.valid) {
172    if3_prevHalfInstr := if4_prevHalfInstr
173  }
174  val prevHalfInstr = Mux(if4_prevHalfInstr.valid, if4_prevHalfInstr, if3_prevHalfInstr)
175
176  // the previous half of RVI instruction waits until it meets its last half
177  val if3_hasPrevHalfInstr = prevHalfInstr.valid && (prevHalfInstr.pc + 2.U) === if3_pc
178  // set to invalid once consumed
179  val prevHalfConsumed = if3_hasPrevHalfInstr && if3_fire
180  when (prevHalfConsumed) {
181    if3_prevHalfInstr.valid := false.B
182  }
183
184  // when bp signal a redirect, we distinguish between taken and not taken
185  // if taken and saveHalfRVI is true, we do not redirect to the target
186  if3_redirect := if3_fire && bpu.io.out(1).valid && (if3_hasPrevHalfInstr && prevHalfInstr.taken || if3_bp.redirect && (if3_bp.taken && !if3_bp.saveHalfRVI || !if3_bp.taken) )
187
188  when (if3_redirect) {
189    when (!(if3_hasPrevHalfInstr && prevHalfInstr.taken)) {
190      if1_npc := if3_bp.target
191      when (if3_realGHInfo.shifted){
192        shiftPtr := true.B
193        newPtr := if3_newPtr
194      }
195    }
196  }
197
198  // when it does not redirect, we still need to modify hist(wire)
199  when(if3_realGHInfo.shifted && if3_newPtr >= ptr) {
200    hist(if3_newPtr-ptr) := if3_realGHInfo.takenOnBr
201  }
202  when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted && prevHalfInstr.newPtr >= ptr) {
203    hist(prevHalfInstr.newPtr-ptr) := prevHalfInstr.ghInfo.takenOnBr
204  }
205
206  //********************** IF4 ****************************//
207  val if4_pd = RegEnable(pd.io.out, if3_fire)
208  val if4_ipf = RegEnable(icacheResp.ipf || if3_hasPrevHalfInstr && prevHalfInstr.ipf, if3_fire)
209  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
210  val if4_valid = RegInit(false.B)
211  val if4_fire = if4_valid && io.fetchPacket.ready
212  val if4_pc = RegEnable(if3_pc, if3_fire)
213
214  val if4_GHInfo = RegEnable(if3_realGHInfo, if3_fire)
215  val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire)
216  if4_ready := (if4_fire || !if4_valid || if4_flush) && GTimer() > 500.U
217  when (if4_flush)     { if4_valid := false.B }
218  .elsewhen (if3_fire) { if4_valid := if3_valid }
219  .elsewhen(if4_fire)  { if4_valid := false.B }
220
221  val if4_bp = Wire(new BranchPrediction)
222  if4_bp := bpu.io.out(2).bits
223
224  val if4_realGHInfo = Wire(new GlobalHistoryInfo())
225  if4_realGHInfo.sawNTBr     := if4_bp.hasNotTakenBrs
226  if4_realGHInfo.takenOnBr   := if4_bp.takenOnBr
227  if4_realGHInfo.saveHalfRVI := if4_bp.saveHalfRVI
228
229
230  val if4_cfi_jal = if4_pd.instrs(if4_bp.jmpIdx)
231  val if4_cfi_jal_tgt = if4_pd.pc(if4_bp.jmpIdx) + Mux(if4_pd.pd(if4_bp.jmpIdx).isRVC,
232    SignExt(Cat(if4_cfi_jal(12), if4_cfi_jal(8), if4_cfi_jal(10, 9), if4_cfi_jal(6), if4_cfi_jal(7), if4_cfi_jal(2), if4_cfi_jal(11), if4_cfi_jal(5, 3), 0.U(1.W)), XLEN),
233    SignExt(Cat(if4_cfi_jal(31), if4_cfi_jal(19, 12), if4_cfi_jal(20), if4_cfi_jal(30, 21), 0.U(1.W)), XLEN))
234  if4_bp.target := Mux(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, if4_cfi_jal_tgt, bpu.io.out(2).bits.target)
235  if4_bp.redirect := bpu.io.out(2).bits.redirect || if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken && if4_cfi_jal_tgt =/= bpu.io.out(2).bits.target
236
237  if4_prevHalfInstr := 0.U.asTypeOf(new PrevHalfInstr)
238  when (bpu.io.out(2).valid && if4_fire && if4_bp.saveHalfRVI) {
239    if4_prevHalfInstr.valid := true.B
240    if4_prevHalfInstr.taken := if4_bp.taken
241    if4_prevHalfInstr.ghInfo := if4_realGHInfo
242    // Make sure shifted can work
243    if4_prevHalfInstr.ghInfo.saveHalfRVI := false.B
244    if4_prevHalfInstr.newPtr := if4_newPtr
245    if4_prevHalfInstr.fetchpc := if4_pc
246    if4_prevHalfInstr.idx := PopCount(mask(if4_pc)) - 1.U
247    if4_prevHalfInstr.pc := if4_pd.pc(if4_prevHalfInstr.idx)
248    if4_prevHalfInstr.target := if4_bp.target
249    if4_prevHalfInstr.instr := if4_pd.instrs(if4_prevHalfInstr.idx)(15, 0)
250    if4_prevHalfInstr.ipf := if4_ipf
251  }
252
253  // Redirect and npc logic for if4
254  when (bpu.io.out(2).valid && if4_fire && if4_bp.redirect) {
255    if4_redirect := true.B
256    when (if4_bp.saveHalfRVI) {
257      if1_npc := snpc(if4_pc)
258    }.otherwise {
259      if1_npc := if4_bp.target
260    }
261  }
262  // }.elsewhen (bpu.io.out(2).valid && if4_fire/* && !if4_bp.redirect*/) {
263  //   // We redirect the pipeline to the next fetch packet,
264  //   // which contains the last half of the RVI instruction
265  //   when (if4_bp.saveHalfRVI && if4_bp.taken) {
266  //     if4_redirect := true.B
267  //     if1_npc := snpc(if4_pc)
268  //   }
269  // }
270
271  // This should cover the if4 redirect to snpc when saveHalfRVI
272  when (if3_redirect) {
273    when (if3_hasPrevHalfInstr && prevHalfInstr.taken) {
274      if1_npc := prevHalfInstr.target
275    }
276  }
277
278  // history logic for if4
279  when (bpu.io.out(2).valid && if4_fire && if4_bp.redirect) {
280    shiftPtr := true.B
281    newPtr := if4_newPtr
282  // }.elsewhen (bpu.io.out(2).valid && if4_fire/* && !if4_bp.redirect*/) {
283  //   // only if we hasn't seen not taken branches and
284  //   // see a not taken branch in if4 should we tell
285  //   // if3 and if4 to update histptr
286  //   // We do not shift global history pointer unless we have the full
287  //   // RVI instruction
288  //   when (if4_newSawNTBrs && !if4_bp.takenOnBr) {
289  //     shiftPtr := true.B
290  //     // newPtr := if4_realGHInfo.newPtr
291  //   }
292  }
293
294  when (if4_realGHInfo.shifted && if4_newPtr >= ptr) {
295    hist(if4_newPtr-ptr) := if4_realGHInfo.takenOnBr
296  }
297
298  when (if3_redirect) {
299    // when redirect and if3_hasPrevHalfInstr, this prevHalfInstr should only be taken
300    when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted) {
301      shiftPtr := true.B
302      newPtr := prevHalfInstr.newPtr
303      extHist(prevHalfInstr.newPtr) := prevHalfInstr.ghInfo.takenOnBr
304    }
305  }
306
307  // modify GHR at the end of a prediction lifetime
308  when (if4_fire && if4_realGHInfo.shifted) {
309    extHist(if4_newPtr) := if4_realGHInfo.takenOnBr
310  }
311
312  // This is a histPtr which is only modified when a prediction
313  // is sent, so that it can get the final prediction info
314  val finalPredHistPtr = RegInit(0.U(log2Up(ExtHistoryLength).W))
315  if4_histPtr := finalPredHistPtr
316  if4_newPtr  := if3_histPtr
317  when (if4_fire && if4_realGHInfo.shifted) {
318    finalPredHistPtr := if4_newPtr
319  }
320
321  if3_histPtr := Mux(if4_realGHInfo.shifted && if4_valid && !if4_flush, if4_histPtr - 1.U, if4_histPtr)
322  if3_newPtr  := if2_histPtr
323
324  if2_histPtr := Mux(if3_realGHInfo.shifted && if3_valid && !if3_flush, if3_histPtr - 1.U, if3_histPtr)
325  if2_newPtr  := if1_histPtr
326
327  if1_histPtr := Mux(if2_realGHInfo.shifted && if2_valid && !if2_flush, if2_histPtr - 1.U, if2_histPtr)
328
329
330
331
332  when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) {
333    val b = io.outOfOrderBrInfo.bits
334    val oldPtr = b.brInfo.histPtr
335    shiftPtr := true.B
336    when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) {
337      // If mispredicted cfi is not a branch,
338      // and there wasn't any not taken branch before it,
339      // we should only recover the pointer to an unshifted state
340      newPtr := oldPtr
341      finalPredHistPtr := oldPtr
342    }.otherwise {
343      newPtr := oldPtr - 1.U
344      finalPredHistPtr := oldPtr - 1.U
345      hist(0) := Mux(b.pd.isBr, b.taken, 0.U)
346      extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U)
347    }
348  }
349
350  when (io.LBredirect.valid) {
351    if1_npc := io.LBredirect.bits
352  }
353
354  when (io.redirect.valid) {
355    if1_npc := io.redirect.bits.target
356  }
357
358  when(io.inLoop) {
359    io.icacheReq.valid := if4_flush
360    io.icacheResp.ready := false.B
361  }.otherwise {
362    io.icacheReq.valid := if1_valid && if2_ready
363    // io.icacheResp.ready := if3_ready
364    io.icacheResp.ready := if4_ready
365  //io.icacheResp.ready := if3_valid
366  }
367  io.icacheReq.bits.addr := if1_npc
368
369  // when(if4_bp.taken) {
370  //   when(if4_bp.saveHalfRVI) {
371  //     io.LBFetch.LBReq := snpc(if4_pc)
372  //   }.otherwise {
373  //     io.LBFetch.LBReq := if4_bp.target
374  //   }
375  // }.otherwise {
376  //   io.LBFetch.LBReq := snpc(if4_pc)
377  //   XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n")
378  // }
379  io.LBFetch.LBReq := if3_pc
380  io.tgtpc := if4_bp.target
381
382  io.icacheReq.bits.mask := mask(if1_npc)
383
384  io.icacheFlush := Cat(if3_flush, if2_flush)
385
386  val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W)))
387  (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr))
388  bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid
389  bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt)
390  bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid
391  bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist
392
393  // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush)
394  bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush)
395  bpu.io.cacheValid := (io.inLoop || io.icacheResp.valid)
396  bpu.io.in.valid := if1_fire
397  bpu.io.in.bits.pc := if1_npc
398  bpu.io.in.bits.hist := hist.asUInt
399  bpu.io.in.bits.histPtr := ptr
400  bpu.io.in.bits.inMask := mask(if1_npc)
401  bpu.io.out(0).ready := if2_fire
402  bpu.io.out(1).ready := if3_fire
403  bpu.io.out(2).ready := if4_fire
404  bpu.io.predecode.valid := if4_valid
405  bpu.io.predecode.bits.mask := if4_pd.mask
406  bpu.io.predecode.bits.pd := if4_pd.pd
407  bpu.io.predecode.bits.isFetchpcEqualFirstpc := if4_pc === if4_pd.pc(0)
408  bpu.io.branchInfo.ready := if4_fire
409
410  when(io.inLoop) {
411    pd.io.in := io.LBFetch.LBResp
412    pd.io.in.mask := io.LBFetch.LBResp.mask & mask(io.LBFetch.LBResp.pc)
413    XSDebug("Fetch from LB\n")
414    XSDebug(p"pc=${Hexadecimal(io.LBFetch.LBResp.pc)}\n")
415    XSDebug(p"data=${Hexadecimal(io.LBFetch.LBResp.data)}\n")
416    XSDebug(p"mask=${Hexadecimal(io.LBFetch.LBResp.mask)}\n")
417  }.otherwise {
418    pd.io.in := icacheResp
419  }
420  pd.io.prev.valid := if3_hasPrevHalfInstr
421  pd.io.prev.bits := prevHalfInstr.instr
422  // if a fetch packet triggers page fault, set the pf instruction to nop
423  when (!if3_hasPrevHalfInstr && icacheResp.ipf) {
424    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
425    (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop
426    pd.io.in.data := instrs.asUInt
427  }.elsewhen (if3_hasPrevHalfInstr && (prevHalfInstr.ipf || icacheResp.ipf)) {
428    pd.io.prev.bits := ZeroExt("b0010011".U, 16)
429    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
430    (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W))))
431    pd.io.in.data := instrs.asUInt
432
433    when (icacheResp.ipf && !prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault
434  }
435
436  io.fetchPacket.valid := if4_valid && !io.redirect.valid
437  io.fetchPacket.bits.instrs := if4_pd.instrs
438  io.fetchPacket.bits.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
439  io.fetchPacket.bits.pc := if4_pd.pc
440  (0 until PredictWidth).foreach(i => io.fetchPacket.bits.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
441  when (if4_bp.taken) {
442    io.fetchPacket.bits.pnpc(if4_bp.jmpIdx) := if4_bp.target
443  }
444  io.fetchPacket.bits.brInfo := bpu.io.branchInfo.bits
445  (0 until PredictWidth).foreach(i => io.fetchPacket.bits.brInfo(i).histPtr := finalPredHistPtr)
446  (0 until PredictWidth).foreach(i => io.fetchPacket.bits.brInfo(i).predHistPtr := if4_predHistPtr)
447  io.fetchPacket.bits.pd := if4_pd.pd
448  io.fetchPacket.bits.ipf := if4_ipf
449  io.fetchPacket.bits.crossPageIPFFix := if4_crossPageIPF
450
451  // predTaken Vec
452  io.fetchPacket.bits.predTaken := if4_bp.taken
453
454  // debug info
455  if (IFUDebug) {
456    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
457    XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n")
458    XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n")
459    XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n",
460      io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc)
461    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n")
462
463    XSDebug("[IF1] v=%d     fire=%d            flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc))
464    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_histPtr, if2_snpc)
465    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_histPtr, crossPageIPF, if3_realGHInfo.sawNTBr)
466    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_histPtr, if4_crossPageIPF, if4_realGHInfo.sawNTBr)
467    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr)
468    XSDebug("[IF1][ghr] headPtr=%d shiftPtr=%d newPtr=%d ptr=%d\n", if1_histPtr, shiftPtr, newPtr, ptr)
469    XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt)
470    XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt)
471
472    XSDebug("[IF2][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.redirect, if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
473    // XSDebug("[IF2][GHInfo]: %s\n", if2_realGHInfo)
474    if2_realGHInfo.debug
475
476    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask)
477    XSDebug("[IF3][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.redirect, if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
478    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
479    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
480    XSDebug("[IF3][    prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
481      prevHalfInstr.valid, prevHalfInstr.taken, prevHalfInstr.fetchpc, prevHalfInstr.idx, prevHalfInstr.pc, prevHalfInstr.target, prevHalfInstr.instr, prevHalfInstr.ipf)
482    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n",
483      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
484    // XSDebug("[IF3][GHInfo]: %s\n", if3_realGHInfo)
485    if3_realGHInfo.debug
486
487    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
488    XSDebug("[IF4][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.redirect, if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
489    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_cfi_jal, if4_cfi_jal_tgt)
490    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
491      if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf)
492    // XSDebug("[IF4][GHInfo]: %s\n", if4_realGHInfo)
493    if4_realGHInfo.debug
494    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n",
495      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix)
496    for (i <- 0 until PredictWidth) {
497      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
498        io.fetchPacket.bits.mask(i),
499        io.fetchPacket.bits.instrs(i),
500        io.fetchPacket.bits.pc(i),
501        io.fetchPacket.bits.pnpc(i),
502        io.fetchPacket.bits.pd(i).isRVC,
503        io.fetchPacket.bits.pd(i).brType,
504        io.fetchPacket.bits.pd(i).isCall,
505        io.fetchPacket.bits.pd(i).isRet
506      )
507    }
508  }
509}