xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision a700653711734f2f3a3033dbee6c7a0abe56d021)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9import chisel3.experimental.chiselName
10
11trait HasIFUConst extends HasXSParameter {
12  val resetVector = 0x80000000L//TODO: set reset vec
13  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
14  val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size
15  val groupOffsetBits = log2Ceil(groupBytes)
16  val nBanksInPacket = 2
17  val bankBytes = PredictWidth * 2 / nBanksInPacket
18  val nBanksInGroup = groupBytes / bankBytes
19  val bankWidth = PredictWidth / nBanksInPacket
20  val bankOffsetBits = log2Ceil(bankBytes)
21  // (0, nBanksInGroup-1)
22  def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits)
23  def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanksInGroup-1).U
24  // (0, bankBytes/2-1)
25  def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1)
26  def bankAligned(pc: UInt)  = align(pc, bankBytes)
27  def groupAligned(pc: UInt) = align(pc, groupBytes)
28  // each 1 bit in mask stands for 2 Bytes
29  // 8 bits, in which only the first 7 bits could be 0
30  def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0)
31  // when in loop(buffer), we need to make use of the full packet
32  // and get the real mask in iCacheResp from loop buffer
33  // we may make predictions on more instructions than we could get from loop buffer
34  // and this will be handled in if4
35  def maskLastHalf(pc: UInt, inLoop: Bool = false.B): UInt = Mux(isInLastBank(pc) && !inLoop, 0.U(bankWidth.W), ~0.U(bankWidth.W))
36  def mask(pc: UInt, inLoop: Bool = false.B): UInt = Reverse(Cat(maskFirstHalf(pc), maskLastHalf(pc, inLoop)))
37  def snpc(pc: UInt, inLoop: Bool = false.B): UInt = pc + (PopCount(mask(pc, inLoop)) << 1)
38
39  val enableGhistRepair = true
40  val IFUDebug = true
41}
42
43class GlobalHistoryInfo() extends XSBundle {
44  val nowPtr = UInt(log2Ceil(ExtHistoryLength).W)
45  val sawNTBr = Bool()
46  val takenOnBr = Bool()
47  // val saveHalfRVI = Bool()
48  def shifted = takenOnBr || sawNTBr
49  def newPtr(ptr: UInt = nowPtr): UInt = Mux(shifted, ptr - 1.U, ptr)
50
51  final def === (that: GlobalHistoryInfo): Bool = {
52    shifted === that.shifted &&
53    takenOnBr === that.takenOnBr &&
54    nowPtr === that.nowPtr
55  }
56
57  final def =/= (that: GlobalHistoryInfo): Bool = !(this === that)
58
59  // def update(): GlobalHistoryInfo = {
60  //   val g = WireInit(this)
61  //   g.nowPtr := nowPtr - Mux(shifted, 1.U, 0.U)
62  //   g.sawNTBr := Mux(saveHalfRVI, sawNTBr, false.B)
63  //   g.takenOnBr := Mux(saveHalfRVI, takenOnBr, false.B)
64  //   // g.saveHalfRVI := false.B
65  //   g
66  // }
67
68  implicit val name = "IFU"
69  def debug(where: String) = XSDebug(p"[${where}_GHInfo] sawNTBr=${sawNTBr}, takenOnBr=${takenOnBr}\n")
70  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
71}
72
73class IFUIO extends XSBundle
74{
75  val fetchPacket = DecoupledIO(new FetchPacket)
76  val redirect = Flipped(ValidIO(UInt(VAddrBits.W)))
77  val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
78  val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
79  val icacheReq = DecoupledIO(new ICacheReq)
80  val icacheResp = Flipped(DecoupledIO(new ICacheResp))
81  val icacheFlush = Output(UInt(2.W))
82  // val loopBufPar = Flipped(new LoopBufferParameters)
83}
84
85class PrevHalfInstr extends XSBundle {
86  val valid = Bool()
87  val taken = Bool()
88  val ghInfo = new GlobalHistoryInfo()
89  val fetchpc = UInt(VAddrBits.W) // only for debug
90  val idx = UInt(VAddrBits.W) // only for debug
91  val pc = UInt(VAddrBits.W)
92  val target = UInt(VAddrBits.W)
93  val instr = UInt(16.W)
94  val ipf = Bool()
95  val newPtr = UInt(log2Up(ExtHistoryLength).W)
96}
97
98@chiselName
99class IFU extends XSModule with HasIFUConst
100{
101  val io = IO(new IFUIO)
102  val bpu = BPU(EnableBPU)
103  val pd = Module(new PreDecode)
104  val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) }
105
106  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
107  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
108
109  val loopBufPar = loopBuffer.io.loopBufPar
110  val inLoop = WireInit(loopBuffer.io.out.valid)
111  val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits))
112
113  if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid
114  if3_flush := if4_flush || if4_redirect
115  if2_flush := if3_flush || if3_redirect
116  if1_flush := if2_flush || if2_redirect
117
118  loopBuffer.io.flush := io.redirect.valid
119
120  //********************** IF1 ****************************//
121  val if1_valid = !reset.asBool && GTimer() > 500.U
122  val if1_npc = WireInit(0.U(VAddrBits.W))
123  val if2_ready = WireInit(false.B)
124  val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready)
125
126
127  // val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
128
129  val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W))))
130  val updatePtr = WireInit(false.B)
131  val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
132  val if1_histPtr = RegEnable(next=newPtr, init=0.U(log2Up(ExtHistoryLength).W), enable=updatePtr)
133  val ptr = Mux(updatePtr, newPtr, if1_histPtr)
134  val hist = Wire(Vec(HistoryLength, UInt(1.W)))
135  for (i <- 0 until HistoryLength) {
136    hist(i) := extHist(ptr + i.U)
137  }
138
139  updatePtr := false.B
140  newPtr := if1_histPtr
141
142
143
144  def wrapGHInfo(bp: BranchPrediction, ptr: UInt) = {
145    val ghi = Wire(new GlobalHistoryInfo())
146    ghi.sawNTBr     := bp.hasNotTakenBrs
147    ghi.takenOnBr   := bp.takenOnBr
148    // ghi.saveHalfRVI := bp.saveHalfRVI
149    ghi.nowPtr      := ptr
150    ghi
151  }
152
153  //********************** IF2 ****************************//
154  val if2_valid = RegInit(init = false.B)
155  val if3_ready = WireInit(false.B)
156  val if2_fire = if2_valid && if3_ready && !if2_flush
157  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire)
158  val if2_snpc = snpc(if2_pc, inLoop)
159  val if2_predHistPtr = RegEnable(ptr, enable=if1_fire)
160  if2_ready := if2_fire || !if2_valid || if2_flush
161  when (if1_fire)       { if2_valid := if1_valid }
162  .elsewhen (if2_flush) { if2_valid := false.B }
163  .elsewhen (if2_fire)  { if2_valid := false.B }
164
165  when (RegNext(reset.asBool) && !reset.asBool) {
166    if1_npc := resetVector.U(VAddrBits.W)
167  }.elsewhen (if2_fire) {
168    if1_npc := if2_snpc
169  }.otherwise {
170    if1_npc := RegNext(if1_npc)
171  }
172
173  val if2_bp = bpu.io.out(0)
174
175  val if2_GHInfo = wrapGHInfo(if2_bp, if2_predHistPtr)
176  // if taken, bp_redirect should be true
177  // when taken on half RVI, we suppress this redirect signal
178  if2_redirect := if2_fire && if2_bp.taken
179  when (if2_redirect) {
180    if1_npc := if2_bp.target
181  }
182  when (if2_fire && if2_GHInfo.shifted) {
183    val if2_newPtr = if2_GHInfo.newPtr()
184    updatePtr := true.B
185    newPtr := if2_newPtr
186    extHist(if2_newPtr) := if2_GHInfo.takenOnBr.asUInt
187  }
188
189
190
191  //********************** IF3 ****************************//
192  val if3_valid = RegInit(init = false.B)
193  val if4_ready = WireInit(false.B)
194  val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush
195  val if3_pc = RegEnable(if2_pc, if2_fire)
196  val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire)
197  val if3_lastGHInfo = RegEnable(if2_GHInfo, enable=if2_fire)
198  // val if3_nextValidPC = Mux(if2_valid)
199  if3_ready := if3_fire || !if3_valid || if3_flush
200  when (if3_flush)     { if3_valid := false.B }
201  .elsewhen (if2_fire) { if3_valid := true.B }
202  .elsewhen (if3_fire) { if3_valid := false.B }
203
204  val if3_bp = bpu.io.out(1)
205
206  val if3_GHInfo = wrapGHInfo(if3_bp, if3_predHistPtr)
207
208  val prevHalfInstrReq = Wire(new PrevHalfInstr)
209  // only valid when if4_fire
210  val hasPrevHalfInstrReq = prevHalfInstrReq.valid
211
212  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
213  // val if4_prevHalfInstr = Wire(new PrevHalfInstr)
214  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
215  val crossPageIPF = WireInit(false.B)
216
217  val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid
218
219  // the previous half of RVI instruction waits until it meets its last half
220  val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && (if3_prevHalfInstr.pc + 2.U) === if3_pc && if3_valid
221  // set to invalid once consumed or redirect from backend
222  val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire
223  val if3_prevHalfFlush = if4_flush
224  when (hasPrevHalfInstrReq) {
225    if3_prevHalfInstr := prevHalfInstrReq
226  }.elsewhen (if3_prevHalfConsumed || if3_prevHalfFlush) {
227    if3_prevHalfInstr.valid := false.B
228  }
229
230  // when bp signal a redirect, we distinguish between taken and not taken
231  // if taken and saveHalfRVI is true, we do not redirect to the target
232
233  def if3_nextValidPCNotEquals(pc: UInt) = !if2_valid || if2_valid && if2_pc =/= pc
234  val if3_prevHalfMetRedirect    = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.target)
235  val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.pc + 2.U)
236  val if3_predTakenRedirect    = !if3_pendingPrevHalfInstr && if3_bp.taken && if3_nextValidPCNotEquals(if3_bp.target)
237  val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(snpc(if3_pc, inLoop))
238  // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr
239  val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo && enableGhistRepair.B
240
241  if3_redirect := if3_fire && (
242                    // prevHalf is consumed but the next packet is not where it meant to be
243                    // we do not handle this condition because of the burden of building a correct GHInfo
244                    // prevHalfMetRedirect ||
245                    // prevHalf does not match if3_pc and the next fetch packet is not snpc
246                    if3_prevHalfNotMetRedirect ||
247                    // pred taken and next fetch packet is not the predicted target
248                    if3_predTakenRedirect ||
249                    // pred not taken and next fetch packet is not snpc
250                    if3_predNotTakenRedirect ||
251                    // GHInfo from last pred does not corresponds with this packet
252                    if3_ghInfoNotIdenticalRedirect
253                  )
254
255  val if3_target = WireInit(snpc(if3_pc))
256
257  /* when (prevHalfMetRedirect) {
258    if1_npc := if3_prevHalfInstr.target
259  }.else */
260  when (if3_prevHalfNotMetRedirect) {
261    if3_target := if3_prevHalfInstr.pc + 2.U
262  }.elsewhen (if3_predTakenRedirect) {
263    if3_target := if3_bp.target
264  }.elsewhen (if3_predNotTakenRedirect) {
265    if3_target := snpc(if3_pc)
266  }.elsewhen (if3_ghInfoNotIdenticalRedirect) {
267    if3_target := Mux(if3_bp.taken, if3_bp.target, snpc(if3_pc))
268  }
269
270  when (if3_redirect) {
271    if1_npc := if3_target
272    val if3_newPtr = if3_GHInfo.newPtr()
273    updatePtr := true.B
274    newPtr := if3_newPtr
275    extHist(if3_newPtr) := if3_GHInfo.takenOnBr.asUInt
276  }
277
278  //********************** IF4 ****************************//
279  val if4_pd = RegEnable(pd.io.out, if3_fire)
280  val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.ipf, if3_fire)
281  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
282  val if4_valid = RegInit(false.B)
283  val if4_fire = if4_valid && io.fetchPacket.ready
284  val if4_pc = RegEnable(if3_pc, if3_fire)
285  val if4_lastGHInfo = RegEnable(if3_GHInfo, if3_fire)
286  // This is the real mask given from icache or loop buffer
287  val if4_mask = RegEnable(icacheResp.mask, if3_fire)
288  val if4_snpc = Mux(inLoop, if4_pc + (PopCount(if4_mask) << 1), snpc(if4_pc))
289
290
291  val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire)
292  // wait until prevHalfInstr written into reg
293  if4_ready := (if4_fire && !hasPrevHalfInstrReq || !if4_valid || if4_flush) && GTimer() > 500.U
294  when (if4_flush)     { if4_valid := false.B }
295  .elsewhen (if3_fire) { if4_valid := true.B }
296  .elsewhen (if4_fire) { if4_valid := false.B }
297
298  val if4_bp = Wire(new BranchPrediction)
299  if4_bp := bpu.io.out(2)
300  if4_bp.takens  := bpu.io.out(2).takens & if4_mask
301  if4_bp.brMask  := bpu.io.out(2).brMask & if4_mask
302  if4_bp.jalMask := bpu.io.out(2).jalMask & if4_mask
303
304  val if4_GHInfo = wrapGHInfo(if4_bp, if4_predHistPtr)
305
306  def cal_jal_tgt(inst: UInt, rvc: Bool): UInt = {
307    Mux(rvc,
308      SignExt(Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)), XLEN),
309      SignExt(Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)), XLEN)
310    )
311  }
312  val if4_instrs = if4_pd.instrs
313  val if4_jals = if4_bp.jalMask
314  val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => if4_pd.pc(i) + cal_jal_tgt(if4_instrs(i), if4_pd.pd(i).isRVC)))
315
316  (0 until PredictWidth).foreach {i =>
317    when (if4_jals(i)) {
318      if4_bp.targets(i) := if4_jal_tgts(i)
319    }
320  }
321
322  // we need this to tell BPU the prediction of prev half
323  // because the prediction is with the start of each inst
324  val if4_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
325  val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid
326  val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && (if4_prevHalfInstr.pc + 2.U) === if4_pc && if4_valid
327  val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire
328  val if4_prevHalfFlush = if4_flush
329
330  val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.taken)
331  when (if3_prevHalfConsumed) {
332    if4_prevHalfInstr := if3_prevHalfInstr
333  }.elsewhen (if4_prevHalfConsumed || if4_prevHalfFlush) {
334    if4_prevHalfInstr.valid := false.B
335  }
336
337  prevHalfInstrReq := 0.U.asTypeOf(new PrevHalfInstr)
338  when (if4_fire && if4_bp.saveHalfRVI) {
339    val idx = if4_bp.lastHalfRVIIdx
340    prevHalfInstrReq.valid := true.B
341    // this is result of the last half RVI
342    prevHalfInstrReq.taken := if4_bp.lastHalfRVITaken
343    prevHalfInstrReq.ghInfo := if4_GHInfo
344    prevHalfInstrReq.newPtr := if4_GHInfo.newPtr()
345    prevHalfInstrReq.fetchpc := if4_pc
346    prevHalfInstrReq.idx := idx
347    prevHalfInstrReq.pc := if4_pd.pc(idx)
348    prevHalfInstrReq.target := if4_bp.lastHalfRVITarget
349    prevHalfInstrReq.instr := if4_pd.instrs(idx)(15, 0)
350    prevHalfInstrReq.ipf := if4_ipf
351  }
352
353  def if4_nextValidPCNotEquals(pc: UInt) = if3_valid  && if3_pc =/= pc ||
354                                           !if3_valid && (if2_valid && if2_pc =/= pc) ||
355                                           !if3_valid && !if2_valid
356
357  val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.pc+2.U)
358  val if4_predTakenRedirect = !hasPrevHalfInstrReq && if4_bp.taken && if4_nextValidPCNotEquals(if4_bp.target)
359  val if4_predNotTakenRedirect = !hasPrevHalfInstrReq && !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc)
360  val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo && enableGhistRepair.B
361
362  if4_redirect := if4_fire && (
363                    // when if4 has a lastHalfRVI, but the next fetch packet is not snpc
364                    if4_prevHalfNextNotMet ||
365                    // when if4 preds taken, but the pc of next fetch packet is not the target
366                    if4_predTakenRedirect ||
367                    // when if4 preds not taken, but the pc of next fetch packet is not snpc
368                    if4_predNotTakenRedirect ||
369                    // GHInfo from last pred does not corresponds with this packet
370                    if4_ghInfoNotIdenticalRedirect
371                  )
372
373  val if4_target = WireInit(if4_snpc)
374
375  when (if4_prevHalfNextNotMet) {
376    if4_target := prevHalfInstrReq.pc+2.U
377  }.elsewhen (if4_predTakenRedirect) {
378    if4_target := if4_bp.target
379  }.elsewhen (if4_predNotTakenRedirect) {
380    if4_target := if4_snpc
381  }.elsewhen (if4_ghInfoNotIdenticalRedirect) {
382    if4_target := Mux(if4_bp.taken, if4_bp.target, if4_snpc)
383  }
384  when (if4_redirect) {
385    if1_npc := if4_target
386    val if4_newPtr = if4_GHInfo.newPtr()
387    updatePtr := true.B
388    newPtr := if4_newPtr
389    extHist(if4_newPtr) := if4_GHInfo.takenOnBr.asUInt
390  }
391
392
393  when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) {
394    val b = io.outOfOrderBrInfo.bits
395    val oldPtr = b.brInfo.histPtr
396    updatePtr := true.B
397    when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) {
398      // If mispredicted cfi is not a branch,
399      // and there wasn't any not taken branch before it,
400      // we should only recover the pointer to an unshifted state
401      newPtr := oldPtr
402      // finalPredHistPtr := oldPtr
403    }.otherwise {
404      newPtr := oldPtr - 1.U
405      // finalPredHistPtr := oldPtr - 1.U
406      // hist(0) := Mux(b.pd.isBr, b.taken, 0.U)
407      extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U)
408    }
409  }
410
411  when (loopBufPar.LBredirect.valid) {
412    if1_npc := loopBufPar.LBredirect.bits
413  }
414
415  when (io.redirect.valid) {
416    if1_npc := io.redirect.bits
417  }
418
419  when(inLoop) {
420    io.icacheReq.valid := if4_flush
421  }.otherwise {
422    io.icacheReq.valid := if1_valid && if2_ready
423  }
424  io.icacheResp.ready := if4_ready
425  io.icacheReq.bits.addr := if1_npc
426
427  // when(if4_bp.taken) {
428  //   when(if4_bp.saveHalfRVI) {
429  //     io.loopBufPar.LBReq := snpc(if4_pc)
430  //   }.otherwise {
431  //     io.loopBufPar.LBReq := if4_bp.target
432  //   }
433  // }.otherwise {
434  //   io.loopBufPar.LBReq := snpc(if4_pc)
435  //   XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n")
436  // }
437  loopBufPar.fetchReq := if3_pc
438
439  io.icacheReq.bits.mask := mask(if1_npc)
440
441  io.icacheFlush := Cat(if3_flush, if2_flush)
442
443  val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W)))
444  (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr))
445  bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid
446  bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt)
447  bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid
448  bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist
449
450  // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush)
451  bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush)
452  bpu.io.inFire(0) := if1_fire
453  bpu.io.inFire(1) := if2_fire
454  bpu.io.inFire(2) := if3_fire
455  bpu.io.inFire(3) := if4_fire
456  bpu.io.in.pc := if1_npc
457  bpu.io.in.hist := hist.asUInt
458  bpu.io.in.histPtr := ptr
459  bpu.io.in.inMask := mask(if1_npc)
460  bpu.io.predecode.mask := if4_pd.mask
461  bpu.io.predecode.lastHalf := if4_pd.lastHalf
462  bpu.io.predecode.pd := if4_pd.pd
463  bpu.io.predecode.hasLastHalfRVI := if4_pc =/= if4_pd.pc(0)
464  bpu.io.realMask := if4_mask
465  bpu.io.prevHalf := if4_prevHalfInstr
466
467  pd.io.in := icacheResp
468  when(inLoop) {
469    pd.io.in.mask := loopBuffer.io.out.bits.mask // TODO: Maybe this is unnecessary
470    // XSDebug("Fetch from LB\n")
471    // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n")
472    // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n")
473    // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n")
474  }
475
476  pd.io.prev.valid := if3_prevHalfInstrMet
477  pd.io.prev.bits := if3_prevHalfInstr.instr
478  // if a fetch packet triggers page fault, set the pf instruction to nop
479  when (!if3_prevHalfInstrMet && icacheResp.ipf) {
480    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
481    (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop
482    pd.io.in.data := instrs.asUInt
483  }.elsewhen (if3_prevHalfInstrMet && (if3_prevHalfInstr.ipf || icacheResp.ipf)) {
484    pd.io.prev.bits := ZeroExt("b0010011".U, 16)
485    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
486    (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W))))
487    pd.io.in.data := instrs.asUInt
488
489    when (icacheResp.ipf && !if3_prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault
490  }
491
492  //Performance Counter
493  // if (!env.FPGAPlatform ) {
494  //   ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf)
495  //   ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf)
496  // }
497
498  val fetchPacketValid = if4_valid && !io.redirect.valid
499  val fetchPacketWire = Wire(new FetchPacket)
500
501  // io.fetchPacket.valid := if4_valid && !io.redirect.valid
502  fetchPacketWire.instrs := if4_pd.instrs
503  fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
504
505  loopBufPar.noTakenMask := if4_pd.mask
506  fetchPacketWire.pc := if4_pd.pc
507  (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
508  when (if4_bp.taken) {
509    fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target
510  }
511  fetchPacketWire.brInfo := bpu.io.branchInfo
512  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := if4_predHistPtr)
513  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr)
514  fetchPacketWire.pd := if4_pd.pd
515  fetchPacketWire.ipf := if4_ipf
516  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
517
518  // predTaken Vec
519  fetchPacketWire.predTaken := if4_bp.taken
520
521  loopBuffer.io.in.bits := fetchPacketWire
522  io.fetchPacket.bits := fetchPacketWire
523  io.fetchPacket.valid := fetchPacketValid
524  loopBuffer.io.in.valid := io.fetchPacket.fire
525
526  // debug info
527  if (IFUDebug) {
528    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
529    XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n")
530    XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n")
531    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits)}\n")
532
533    XSDebug("[IF1] v=%d     fire=%d            flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc))
534    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_predHistPtr, if2_snpc)
535    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_predHistPtr, crossPageIPF, if3_GHInfo.sawNTBr)
536    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_predHistPtr, if4_crossPageIPF, if4_GHInfo.sawNTBr)
537    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr)
538    XSDebug("[IF1][ghr] headPtr=%d updatePtr=%d newPtr=%d ptr=%d\n", if1_histPtr, updatePtr, newPtr, ptr)
539    XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt)
540    XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt)
541
542    XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
543    if2_GHInfo.debug("if2")
544
545    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask)
546    XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
547    XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d, ghInfo=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect, if3_ghInfoNotIdenticalRedirect)
548    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
549    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
550    XSDebug("[IF3][    prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
551      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
552    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n",
553      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
554    if3_GHInfo.debug("if3")
555
556    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
557    XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask)
558    XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
559    XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d, ghInfo=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect, if4_ghInfoNotIdenticalRedirect)
560    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx))
561    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
562      if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf)
563    if4_GHInfo.debug("if4")
564    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n",
565      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix)
566    for (i <- 0 until PredictWidth) {
567      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
568        io.fetchPacket.bits.mask(i),
569        io.fetchPacket.bits.instrs(i),
570        io.fetchPacket.bits.pc(i),
571        io.fetchPacket.bits.pnpc(i),
572        io.fetchPacket.bits.pd(i).isRVC,
573        io.fetchPacket.bits.pd(i).brType,
574        io.fetchPacket.bits.pd(i).isCall,
575        io.fetchPacket.bits.pd(i).isRet
576      )
577    }
578  }
579}