xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision 79370ea62a964d3e81a07ea2ed9b06e766082198)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9import chisel3.experimental.chiselName
10
11trait HasIFUConst extends HasXSParameter {
12  val resetVector = 0x80000000L//TODO: set reset vec
13  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
14  val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size
15  val groupOffsetBits = log2Ceil(groupBytes)
16  val nBanksInPacket = 2
17  val bankBytes = PredictWidth * 2 / nBanksInPacket
18  val nBanksInGroup = groupBytes / bankBytes
19  val bankWidth = PredictWidth / nBanksInPacket
20  val bankOffsetBits = log2Ceil(bankBytes)
21  // (0, nBanksInGroup-1)
22  def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits)
23  def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanksInGroup-1).U
24  // (0, bankBytes/2-1)
25  def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1)
26  def bankAligned(pc: UInt)  = align(pc, bankBytes)
27  def groupAligned(pc: UInt) = align(pc, groupBytes)
28  // each 1 bit in mask stands for 2 Bytes
29  // 8 bits, in which only the first 7 bits could be 0
30  def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0)
31  // when in loop(buffer), we need to make use of the full packet
32  // and get the real mask in iCacheResp from loop buffer
33  // we may make predictions on more instructions than we could get from loop buffer
34  // and this will be handled in if4
35  def maskLastHalf(pc: UInt, inLoop: Bool = false.B): UInt = Mux(isInLastBank(pc) && !inLoop, 0.U(bankWidth.W), ~0.U(bankWidth.W))
36  def mask(pc: UInt, inLoop: Bool = false.B): UInt = Reverse(Cat(maskFirstHalf(pc), maskLastHalf(pc, inLoop)))
37  def snpc(pc: UInt, inLoop: Bool = false.B): UInt = pc + (PopCount(mask(pc, inLoop)) << 1)
38
39  val enableGhistRepair = true
40  val IFUDebug = true
41}
42
43class GlobalHistory extends XSBundle {
44  val predHist = UInt(HistoryLength.W)
45  // val sawNTBr = Bool()
46  // val takenOnBr = Bool()
47  // val saveHalfRVI = Bool()
48  // def shifted = takenOnBr || sawNTBr
49  // def newPtr(ptr: UInt = nowPtr): UInt = Mux(shifted, ptr - 1.U, ptr)
50  def update(sawNTBr: Bool, takenOnBr: Bool, hist: UInt = predHist): GlobalHistory = {
51    val g = Wire(new GlobalHistory)
52    val shifted = takenOnBr || sawNTBr
53    g.predHist := Mux(shifted, (hist << 1) | takenOnBr.asUInt, hist)
54    g
55  }
56
57  final def === (that: GlobalHistory): Bool = {
58    predHist === that.predHist
59  }
60
61  final def =/= (that: GlobalHistory): Bool = !(this === that)
62
63  implicit val name = "IFU"
64  def debug(where: String) = XSDebug(p"[${where}_GlobalHistory] hist=${Binary(predHist)}\n")
65  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
66}
67
68
69class IFUIO extends XSBundle
70{
71  val fetchPacket = DecoupledIO(new FetchPacket)
72  val redirect = Flipped(ValidIO(UInt(VAddrBits.W)))
73  val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
74  val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
75  val icacheReq = DecoupledIO(new ICacheReq)
76  val icacheResp = Flipped(DecoupledIO(new ICacheResp))
77  val icacheFlush = Output(UInt(2.W))
78  // val loopBufPar = Flipped(new LoopBufferParameters)
79}
80
81class PrevHalfInstr extends XSBundle {
82  val valid = Bool()
83  val taken = Bool()
84  val ghInfo = new GlobalHistory()
85  val fetchpc = UInt(VAddrBits.W) // only for debug
86  val idx = UInt(VAddrBits.W) // only for debug
87  val pc = UInt(VAddrBits.W)
88  val target = UInt(VAddrBits.W)
89  val instr = UInt(16.W)
90  val ipf = Bool()
91  val newPtr = UInt(log2Up(ExtHistoryLength).W)
92}
93
94@chiselName
95class IFU extends XSModule with HasIFUConst
96{
97  val io = IO(new IFUIO)
98  val bpu = BPU(EnableBPU)
99  val pd = Module(new PreDecode)
100  val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) }
101
102  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
103  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
104
105  val loopBufPar = loopBuffer.io.loopBufPar
106  val inLoop = WireInit(loopBuffer.io.out.valid)
107  val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits))
108
109  if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid
110  if3_flush := if4_flush || if4_redirect
111  if2_flush := if3_flush || if3_redirect
112  if1_flush := if2_flush || if2_redirect
113
114  loopBuffer.io.flush := io.redirect.valid
115
116  //********************** IF1 ****************************//
117  val if1_valid = !reset.asBool && GTimer() > 500.U
118  val if1_npc = WireInit(0.U(VAddrBits.W))
119  val if2_ready = WireInit(false.B)
120  val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready)
121
122
123  // val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
124
125  val if1_gh, if2_gh, if3_gh, if4_gh = Wire(new GlobalHistory)
126  val if2_predicted_gh, if3_predicted_gh, if4_predicted_gh = Wire(new GlobalHistory)
127  val final_gh = RegInit(0.U.asTypeOf(new GlobalHistory))
128  val final_gh_bypass = WireInit(0.U.asTypeOf(new GlobalHistory))
129  val flush_final_gh = WireInit(false.B)
130
131  //********************** IF2 ****************************//
132  val if2_valid = RegInit(init = false.B)
133  val if3_ready = WireInit(false.B)
134  val if2_fire = if2_valid && if3_ready && !if2_flush
135  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire)
136  val if2_snpc = snpc(if2_pc, inLoop)
137  val if2_predHist = RegEnable(if1_gh.predHist, enable=if1_fire)
138  if2_ready := if2_fire || !if2_valid || if2_flush
139  when (if1_fire)       { if2_valid := if1_valid }
140  .elsewhen (if2_flush) { if2_valid := false.B }
141  .elsewhen (if2_fire)  { if2_valid := false.B }
142
143  when (RegNext(reset.asBool) && !reset.asBool) {
144    if1_npc := resetVector.U(VAddrBits.W)
145  }.elsewhen (if2_fire) {
146    if1_npc := if2_snpc
147  }.otherwise {
148    if1_npc := RegNext(if1_npc)
149  }
150
151  val if2_bp = bpu.io.out(0)
152
153  // val if2_GHInfo = wrapGHInfo(if2_bp, if2_predHist)
154  // if taken, bp_redirect should be true
155  // when taken on half RVI, we suppress this redirect signal
156  if2_redirect := if2_fire && if2_bp.taken
157  when (if2_redirect) {
158    if1_npc := if2_bp.target
159  }
160
161  if2_predicted_gh := if2_gh.update(if2_bp.hasNotTakenBrs, if2_bp.takenOnBr)
162
163  //********************** IF3 ****************************//
164  val if3_valid = RegInit(init = false.B)
165  val if4_ready = WireInit(false.B)
166  val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush
167  val if3_pc = RegEnable(if2_pc, if2_fire)
168  val if3_predHist = RegEnable(if2_predHist, enable=if2_fire)
169  // val if3_nextValidPC = Mux(if2_valid)
170  if3_ready := if3_fire || !if3_valid || if3_flush
171  when (if3_flush)     { if3_valid := false.B }
172  .elsewhen (if2_fire) { if3_valid := true.B }
173  .elsewhen (if3_fire) { if3_valid := false.B }
174
175  val if3_bp = bpu.io.out(1)
176  if3_predicted_gh := if3_gh.update(if3_bp.hasNotTakenBrs, if3_bp.takenOnBr)
177
178
179  val prevHalfInstrReq = Wire(new PrevHalfInstr)
180  // only valid when if4_fire
181  val hasPrevHalfInstrReq = prevHalfInstrReq.valid
182
183  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
184  // val if4_prevHalfInstr = Wire(new PrevHalfInstr)
185  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
186  val crossPageIPF = WireInit(false.B)
187
188  val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid
189
190  // the previous half of RVI instruction waits until it meets its last half
191  val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && (if3_prevHalfInstr.pc + 2.U) === if3_pc && if3_valid
192  // set to invalid once consumed or redirect from backend
193  val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire
194  val if3_prevHalfFlush = if4_flush
195  when (hasPrevHalfInstrReq) {
196    if3_prevHalfInstr := prevHalfInstrReq
197  }.elsewhen (if3_prevHalfConsumed || if3_prevHalfFlush) {
198    if3_prevHalfInstr.valid := false.B
199  }
200
201  // when bp signal a redirect, we distinguish between taken and not taken
202  // if taken and saveHalfRVI is true, we do not redirect to the target
203
204  def if3_nextValidPCNotEquals(pc: UInt) = !if2_valid || if2_valid && if2_pc =/= pc
205  val if3_prevHalfMetRedirect    = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.target)
206  val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.pc + 2.U)
207  val if3_predTakenRedirect    = !if3_pendingPrevHalfInstr && if3_bp.taken && if3_nextValidPCNotEquals(if3_bp.target)
208  val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(snpc(if3_pc, inLoop))
209  // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr
210  // val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo && enableGhistRepair.B
211
212  if3_redirect := if3_fire && (
213                    // prevHalf is consumed but the next packet is not where it meant to be
214                    // we do not handle this condition because of the burden of building a correct GHInfo
215                    // prevHalfMetRedirect ||
216                    // prevHalf does not match if3_pc and the next fetch packet is not snpc
217                    if3_prevHalfNotMetRedirect ||
218                    // pred taken and next fetch packet is not the predicted target
219                    if3_predTakenRedirect ||
220                    // pred not taken and next fetch packet is not snpc
221                    if3_predNotTakenRedirect
222                    // GHInfo from last pred does not corresponds with this packet
223                    // if3_ghInfoNotIdenticalRedirect
224                  )
225
226  val if3_target = WireInit(snpc(if3_pc))
227
228  /* when (prevHalfMetRedirect) {
229    if1_npc := if3_prevHalfInstr.target
230  }.else */
231  when (if3_prevHalfNotMetRedirect) {
232    if3_target := if3_prevHalfInstr.pc + 2.U
233  }.elsewhen (if3_predTakenRedirect) {
234    if3_target := if3_bp.target
235  }.elsewhen (if3_predNotTakenRedirect) {
236    if3_target := snpc(if3_pc)
237  }
238  // }.elsewhen (if3_ghInfoNotIdenticalRedirect) {
239  //   if3_target := Mux(if3_bp.taken, if3_bp.target, snpc(if3_pc))
240  // }
241
242  when (if3_redirect) {
243    if1_npc := if3_target
244  }
245
246  //********************** IF4 ****************************//
247  val if4_pd = RegEnable(pd.io.out, if3_fire)
248  val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.ipf, if3_fire)
249  val if4_acf = RegEnable(icacheResp.acf, if3_fire)
250  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
251  val if4_valid = RegInit(false.B)
252  val if4_fire = if4_valid && io.fetchPacket.ready
253  val if4_pc = RegEnable(if3_pc, if3_fire)
254  // This is the real mask given from icache or loop buffer
255  val if4_mask = RegEnable(icacheResp.mask, if3_fire)
256  val if4_snpc = Mux(inLoop, if4_pc + (PopCount(if4_mask) << 1), snpc(if4_pc))
257
258
259  val if4_predHist = RegEnable(if3_predHist, enable=if3_fire)
260  // wait until prevHalfInstr written into reg
261  if4_ready := (if4_fire && !hasPrevHalfInstrReq || !if4_valid || if4_flush) && GTimer() > 500.U
262  when (if4_flush)     { if4_valid := false.B }
263  .elsewhen (if3_fire) { if4_valid := true.B }
264  .elsewhen (if4_fire) { if4_valid := false.B }
265
266  val if4_bp = Wire(new BranchPrediction)
267  if4_bp := bpu.io.out(2)
268  if4_bp.takens  := bpu.io.out(2).takens & if4_mask
269  if4_bp.brMask  := bpu.io.out(2).brMask & if4_mask
270  if4_bp.jalMask := bpu.io.out(2).jalMask & if4_mask
271
272  if4_predicted_gh := if4_gh.update(if4_bp.hasNotTakenBrs, if4_bp.takenOnBr)
273
274  def cal_jal_tgt(inst: UInt, rvc: Bool): UInt = {
275    Mux(rvc,
276      SignExt(Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)), XLEN),
277      SignExt(Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)), XLEN)
278    )
279  }
280  val if4_instrs = if4_pd.instrs
281  val if4_jals = if4_bp.jalMask
282  val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => if4_pd.pc(i) + cal_jal_tgt(if4_instrs(i), if4_pd.pd(i).isRVC)))
283
284  (0 until PredictWidth).foreach {i =>
285    when (if4_jals(i)) {
286      if4_bp.targets(i) := if4_jal_tgts(i)
287    }
288  }
289
290  // we need this to tell BPU the prediction of prev half
291  // because the prediction is with the start of each inst
292  val if4_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
293  val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid
294  val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && (if4_prevHalfInstr.pc + 2.U) === if4_pc && if4_valid
295  val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire
296  val if4_prevHalfFlush = if4_flush
297
298  val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.taken)
299  when (if3_prevHalfConsumed) {
300    if4_prevHalfInstr := if3_prevHalfInstr
301  }.elsewhen (if4_prevHalfConsumed || if4_prevHalfFlush) {
302    if4_prevHalfInstr.valid := false.B
303  }
304
305  prevHalfInstrReq := 0.U.asTypeOf(new PrevHalfInstr)
306  when (if4_fire && if4_bp.saveHalfRVI) {
307    val idx = if4_bp.lastHalfRVIIdx
308    prevHalfInstrReq.valid := true.B
309    // this is result of the last half RVI
310    prevHalfInstrReq.taken := if4_bp.lastHalfRVITaken
311    prevHalfInstrReq.ghInfo := if4_gh
312    prevHalfInstrReq.newPtr := DontCare
313    prevHalfInstrReq.fetchpc := if4_pc
314    prevHalfInstrReq.idx := idx
315    prevHalfInstrReq.pc := if4_pd.pc(idx)
316    prevHalfInstrReq.target := if4_bp.lastHalfRVITarget
317    prevHalfInstrReq.instr := if4_pd.instrs(idx)(15, 0)
318    prevHalfInstrReq.ipf := if4_ipf
319  }
320
321  def if4_nextValidPCNotEquals(pc: UInt) = if3_valid  && if3_pc =/= pc ||
322                                           !if3_valid && (if2_valid && if2_pc =/= pc) ||
323                                           !if3_valid && !if2_valid
324
325  val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.pc+2.U)
326  val if4_predTakenRedirect = !hasPrevHalfInstrReq && if4_bp.taken && if4_nextValidPCNotEquals(if4_bp.target)
327  val if4_predNotTakenRedirect = !hasPrevHalfInstrReq && !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc)
328  // val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo && enableGhistRepair.B
329
330  if4_redirect := if4_fire && (
331                    // when if4 has a lastHalfRVI, but the next fetch packet is not snpc
332                    if4_prevHalfNextNotMet ||
333                    // when if4 preds taken, but the pc of next fetch packet is not the target
334                    if4_predTakenRedirect ||
335                    // when if4 preds not taken, but the pc of next fetch packet is not snpc
336                    if4_predNotTakenRedirect
337                    // GHInfo from last pred does not corresponds with this packet
338                    // if4_ghInfoNotIdenticalRedirect
339                  )
340
341  val if4_target = WireInit(if4_snpc)
342
343  when (if4_prevHalfNextNotMet) {
344    if4_target := prevHalfInstrReq.pc+2.U
345  }.elsewhen (if4_predTakenRedirect) {
346    if4_target := if4_bp.target
347  }.elsewhen (if4_predNotTakenRedirect) {
348    if4_target := if4_snpc
349  }
350  // }.elsewhen (if4_ghInfoNotIdenticalRedirect) {
351  //   if4_target := Mux(if4_bp.taken, if4_bp.target, if4_snpc)
352  // }
353  when (if4_redirect) {
354    if1_npc := if4_target
355  }
356
357  when (if4_fire) {
358    final_gh := if4_predicted_gh
359  }
360  if4_gh := Mux(flush_final_gh, final_gh_bypass, final_gh)
361  if3_gh := Mux(if4_valid && !if4_flush, if4_predicted_gh, if4_gh)
362  if2_gh := Mux(if3_valid && !if3_flush, if3_predicted_gh, if3_gh)
363  if1_gh := Mux(if2_valid && !if2_flush, if2_predicted_gh, if2_gh)
364
365
366
367
368
369  when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) {
370    val b = io.outOfOrderBrInfo.bits
371    val oldGh = b.brInfo.hist
372    val sawNTBr = b.brInfo.sawNotTakenBranch
373    val isBr = b.pd.isBr
374    val taken = b.taken
375    val updatedGh = oldGh.update(sawNTBr, isBr && taken)
376    final_gh := updatedGh
377    final_gh_bypass := updatedGh
378    flush_final_gh := true.B
379  }
380
381  when (loopBufPar.LBredirect.valid) {
382    if1_npc := loopBufPar.LBredirect.bits
383  }
384
385  when (io.redirect.valid) {
386    if1_npc := io.redirect.bits
387  }
388
389  when(inLoop) {
390    io.icacheReq.valid := if4_flush
391  }.otherwise {
392    io.icacheReq.valid := if1_valid && if2_ready
393  }
394  io.icacheResp.ready := if4_ready
395  io.icacheReq.bits.addr := if1_npc
396
397  // when(if4_bp.taken) {
398  //   when(if4_bp.saveHalfRVI) {
399  //     io.loopBufPar.LBReq := snpc(if4_pc)
400  //   }.otherwise {
401  //     io.loopBufPar.LBReq := if4_bp.target
402  //   }
403  // }.otherwise {
404  //   io.loopBufPar.LBReq := snpc(if4_pc)
405  //   XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n")
406  // }
407  loopBufPar.fetchReq := if3_pc
408
409  io.icacheReq.bits.mask := mask(if1_npc)
410
411  io.icacheFlush := Cat(if3_flush, if2_flush)
412
413  val inOrderBrHist = io.inOrderBrInfo.bits.brInfo.predHist
414  bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid
415  bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt)
416  bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid
417  bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist
418
419  // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush)
420  bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush)
421  bpu.io.inFire(0) := if1_fire
422  bpu.io.inFire(1) := if2_fire
423  bpu.io.inFire(2) := if3_fire
424  bpu.io.inFire(3) := if4_fire
425  bpu.io.in.pc := if1_npc
426  bpu.io.in.hist := if1_gh.asUInt
427  // bpu.io.in.histPtr := ptr
428  bpu.io.in.inMask := mask(if1_npc)
429  bpu.io.predecode.mask := if4_pd.mask
430  bpu.io.predecode.lastHalf := if4_pd.lastHalf
431  bpu.io.predecode.pd := if4_pd.pd
432  bpu.io.predecode.hasLastHalfRVI := if4_pc =/= if4_pd.pc(0)
433  bpu.io.realMask := if4_mask
434  bpu.io.prevHalf := if4_prevHalfInstr
435
436  pd.io.in := icacheResp
437  when(inLoop) {
438    pd.io.in.mask := loopBuffer.io.out.bits.mask // TODO: Maybe this is unnecessary
439    // XSDebug("Fetch from LB\n")
440    // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n")
441    // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n")
442    // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n")
443  }
444
445  pd.io.prev.valid := if3_prevHalfInstrMet
446  pd.io.prev.bits := if3_prevHalfInstr.instr
447  // if a fetch packet triggers page fault, set the pf instruction to nop
448  when (!if3_prevHalfInstrMet && icacheResp.ipf) {
449    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
450    (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop
451    pd.io.in.data := instrs.asUInt
452  }.elsewhen (if3_prevHalfInstrMet && (if3_prevHalfInstr.ipf || icacheResp.ipf)) {
453    pd.io.prev.bits := ZeroExt("b0010011".U, 16)
454    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
455    (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W))))
456    pd.io.in.data := instrs.asUInt
457
458    when (icacheResp.ipf && !if3_prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault
459  }
460
461  //Performance Counter
462  // if (!env.FPGAPlatform ) {
463  //   ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf)
464  //   ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf)
465  // }
466
467  val fetchPacketValid = if4_valid && !io.redirect.valid
468  val fetchPacketWire = Wire(new FetchPacket)
469
470  // io.fetchPacket.valid := if4_valid && !io.redirect.valid
471  fetchPacketWire.instrs := if4_pd.instrs
472  fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
473
474  loopBufPar.noTakenMask := if4_pd.mask
475  fetchPacketWire.pc := if4_pd.pc
476  (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
477  when (if4_bp.taken) {
478    fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target
479  }
480  fetchPacketWire.brInfo := bpu.io.branchInfo
481  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).hist := final_gh)
482  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHist := if4_predHist.asTypeOf(new GlobalHistory))
483  fetchPacketWire.pd := if4_pd.pd
484  fetchPacketWire.ipf := if4_ipf
485  fetchPacketWire.acf := if4_acf
486  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
487
488  // predTaken Vec
489  fetchPacketWire.predTaken := if4_bp.taken
490
491  loopBuffer.io.in.bits := fetchPacketWire
492  io.fetchPacket.bits := fetchPacketWire
493  io.fetchPacket.valid := fetchPacketValid
494  loopBuffer.io.in.valid := io.fetchPacket.fire
495
496  // debug info
497  if (IFUDebug) {
498    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
499    XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n")
500    XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n")
501    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits)}\n")
502
503    XSDebug("[IF1] v=%d     fire=%d            flush=%d pc=%x mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, mask(if1_npc))
504    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_snpc)
505    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, crossPageIPF, if3_bp.hasNotTakenBrs)
506    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_crossPageIPF, if4_bp.hasNotTakenBrs)
507    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr)
508    XSDebug("[IF1][ghr] hist=%b\n", if1_gh.asUInt)
509    XSDebug("[IF1][ghr] extHist=%b\n\n", if1_gh.asUInt)
510
511    XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
512    if2_gh.debug("if2")
513
514    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask)
515    XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
516    XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect)
517    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
518    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
519    XSDebug("[IF3][    prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
520      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
521    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n",
522      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
523    if3_gh.debug("if3")
524
525    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
526    XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask)
527    XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
528    XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect)
529    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx))
530    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
531      if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf)
532    if4_gh.debug("if4")
533    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d acf=%d crossPageIPF=%d\n",
534      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.acf, io.fetchPacket.bits.crossPageIPFFix)
535    for (i <- 0 until PredictWidth) {
536      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
537        io.fetchPacket.bits.mask(i),
538        io.fetchPacket.bits.instrs(i),
539        io.fetchPacket.bits.pc(i),
540        io.fetchPacket.bits.pnpc(i),
541        io.fetchPacket.bits.pd(i).isRVC,
542        io.fetchPacket.bits.pd(i).brType,
543        io.fetchPacket.bits.pd(i).isCall,
544        io.fetchPacket.bits.pd(i).isRet
545      )
546    }
547  }
548}