xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision 04fb04ef0050f7ec094471626295309b281d4e66)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9
10trait HasIFUConst extends HasXSParameter {
11  val resetVector = 0x80000000L//TODO: set reset vec
12  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
13  val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size
14  val groupOffsetBits = log2Ceil(groupBytes)
15  val nBanksInPacket = 2
16  val bankBytes = PredictWidth * 2 / nBanksInPacket
17  val nBanksInGroup = groupBytes / bankBytes
18  val bankWidth = PredictWidth / nBanksInPacket
19  val bankOffsetBits = log2Ceil(bankBytes)
20  // (0, nBanksInGroup-1)
21  def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits)
22  def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanksInGroup-1).U
23  // (0, bankBytes/2-1)
24  def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1)
25  def bankAligned(pc: UInt)  = align(pc, bankBytes)
26  def groupAligned(pc: UInt) = align(pc, groupBytes)
27  // each 1 bit in mask stands for 2 Bytes
28  // 8 bits, in which only the first 7 bits could be 0
29  def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0)
30  // when in loop(buffer), we need to make use of the full packet
31  // and get the real mask in iCacheResp from loop buffer
32  // we may make predictions on more instructions than we could get from loop buffer
33  // and this will be handled in if4
34  def maskLastHalf(pc: UInt, inLoop: Bool = false.B): UInt = Mux(isInLastBank(pc) && !inLoop, 0.U(bankWidth.W), ~0.U(bankWidth.W))
35  def mask(pc: UInt, inLoop: Bool = false.B): UInt = Reverse(Cat(maskFirstHalf(pc), maskLastHalf(pc, inLoop)))
36  def snpc(pc: UInt, inLoop: Bool = false.B): UInt = pc + (PopCount(mask(pc, inLoop)) << 1)
37
38  val IFUDebug = true
39}
40
41class GlobalHistoryInfo() extends XSBundle {
42  val nowPtr = UInt(log2Ceil(ExtHistoryLength).W)
43  val sawNTBr = Bool()
44  val takenOnBr = Bool()
45  // val saveHalfRVI = Bool()
46  def shifted = takenOnBr || sawNTBr
47  def newPtr(ptr: UInt = nowPtr): UInt = Mux(shifted, ptr - 1.U, ptr)
48
49  final def === (that: GlobalHistoryInfo): Bool = {
50    shifted === that.shifted &&
51    takenOnBr === that.takenOnBr &&
52    nowPtr === that.nowPtr
53  }
54
55  final def =/= (that: GlobalHistoryInfo): Bool = !(this === that)
56
57  // def update(): GlobalHistoryInfo = {
58  //   val g = WireInit(this)
59  //   g.nowPtr := nowPtr - Mux(shifted, 1.U, 0.U)
60  //   g.sawNTBr := Mux(saveHalfRVI, sawNTBr, false.B)
61  //   g.takenOnBr := Mux(saveHalfRVI, takenOnBr, false.B)
62  //   // g.saveHalfRVI := false.B
63  //   g
64  // }
65
66  implicit val name = "IFU"
67  def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d\n", sawNTBr, takenOnBr)
68  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
69}
70
71class IFUIO extends XSBundle
72{
73  val fetchPacket = DecoupledIO(new FetchPacket)
74  val redirect = Flipped(ValidIO(new Redirect))
75  val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
76  val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
77  val icacheReq = DecoupledIO(new ICacheReq)
78  val icacheResp = Flipped(DecoupledIO(new ICacheResp))
79  val icacheFlush = Output(UInt(2.W))
80  // val loopBufPar = Flipped(new LoopBufferParameters)
81}
82
83class PrevHalfInstr extends XSBundle {
84  val valid = Bool()
85  val taken = Bool()
86  val ghInfo = new GlobalHistoryInfo()
87  val fetchpc = UInt(VAddrBits.W) // only for debug
88  val idx = UInt(VAddrBits.W) // only for debug
89  val pc = UInt(VAddrBits.W)
90  val target = UInt(VAddrBits.W)
91  val instr = UInt(16.W)
92  val ipf = Bool()
93  val newPtr = UInt(log2Up(ExtHistoryLength).W)
94}
95
96class IFU extends XSModule with HasIFUConst
97{
98  val io = IO(new IFUIO)
99  val bpu = BPU(EnableBPU)
100  val pd = Module(new PreDecode)
101  val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) }
102
103  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
104  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
105
106  val loopBufPar = loopBuffer.io.loopBufPar
107  val inLoop = WireInit(loopBuffer.io.out.valid)
108  val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits))
109
110  if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid
111  if3_flush := if4_flush || if4_redirect
112  if2_flush := if3_flush || if3_redirect
113  if1_flush := if2_flush || if2_redirect
114
115  loopBuffer.io.flush := io.redirect.valid
116
117  //********************** IF1 ****************************//
118  val if1_valid = !reset.asBool && GTimer() > 500.U
119  val if1_npc = WireInit(0.U(VAddrBits.W))
120  val if2_ready = WireInit(false.B)
121  val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready)
122
123
124  // val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
125
126  val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W))))
127  val updatePtr = WireInit(false.B)
128  val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W))
129  val if1_histPtr = RegEnable(next=newPtr, init=0.U(log2Up(ExtHistoryLength).W), enable=updatePtr)
130  val ptr = Mux(updatePtr, newPtr, if1_histPtr)
131  val hist = Wire(Vec(HistoryLength, UInt(1.W)))
132  for (i <- 0 until HistoryLength) {
133    hist(i) := extHist(ptr + i.U)
134  }
135
136  updatePtr := false.B
137  newPtr := if1_histPtr
138
139
140
141  def wrapGHInfo(bp: BranchPrediction, ptr: UInt) = {
142    val ghi = Wire(new GlobalHistoryInfo())
143    ghi.sawNTBr     := bp.hasNotTakenBrs
144    ghi.takenOnBr   := bp.takenOnBr
145    // ghi.saveHalfRVI := bp.saveHalfRVI
146    ghi.nowPtr      := ptr
147    ghi
148  }
149
150  //********************** IF2 ****************************//
151  val if2_valid = RegInit(init = false.B)
152  val if3_ready = WireInit(false.B)
153  val if2_fire = if2_valid && if3_ready && !if2_flush
154  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire)
155  val if2_snpc = snpc(if2_pc, inLoop)
156  val if2_predHistPtr = RegEnable(ptr, enable=if1_fire)
157  if2_ready := if2_fire || !if2_valid || if2_flush
158  when (if1_fire)       { if2_valid := if1_valid }
159  .elsewhen (if2_flush) { if2_valid := false.B }
160  .elsewhen (if2_fire)  { if2_valid := false.B }
161
162  when (RegNext(reset.asBool) && !reset.asBool) {
163    if1_npc := resetVector.U(VAddrBits.W)
164  }.elsewhen (if2_fire) {
165    if1_npc := if2_snpc
166  }.otherwise {
167    if1_npc := RegNext(if1_npc)
168  }
169
170  val if2_bp = bpu.io.out(0)
171
172  val if2_GHInfo = wrapGHInfo(if2_bp, if2_predHistPtr)
173  // if taken, bp_redirect should be true
174  // when taken on half RVI, we suppress this redirect signal
175  if2_redirect := if2_fire && if2_bp.taken
176  when (if2_redirect) {
177    if1_npc := if2_bp.target
178  }
179  when (if2_fire && if2_GHInfo.shifted) {
180    val if2_newPtr = if2_GHInfo.newPtr()
181    updatePtr := true.B
182    newPtr := if2_newPtr
183    extHist(if2_newPtr) := if2_GHInfo.takenOnBr.asUInt
184  }
185
186
187
188  //********************** IF3 ****************************//
189  val if3_valid = RegInit(init = false.B)
190  val if4_ready = WireInit(false.B)
191  val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush
192  val if3_pc = RegEnable(if2_pc, if2_fire)
193  val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire)
194  val if3_lastGHInfo = RegEnable(if2_GHInfo, enable=if2_fire)
195  // val if3_nextValidPC = Mux(if2_valid)
196  if3_ready := if3_fire || !if3_valid || if3_flush
197  when (if3_flush)     { if3_valid := false.B }
198  .elsewhen (if2_fire) { if3_valid := true.B }
199  .elsewhen (if3_fire) { if3_valid := false.B }
200
201  val if3_bp = bpu.io.out(1)
202
203  val if3_GHInfo = wrapGHInfo(if3_bp, if3_predHistPtr)
204
205  val prevHalfInstrReq = Wire(new PrevHalfInstr)
206  // only valid when if4_fire
207  val hasPrevHalfInstrReq = prevHalfInstrReq.valid
208
209  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
210  // val if4_prevHalfInstr = Wire(new PrevHalfInstr)
211  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
212  val crossPageIPF = WireInit(false.B)
213
214  val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid
215
216  // the previous half of RVI instruction waits until it meets its last half
217  val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && (if3_prevHalfInstr.pc + 2.U) === if3_pc && if3_valid
218  // set to invalid once consumed or redirect from backend
219  val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire
220  val if3_prevHalfFlush = if4_flush
221  when (hasPrevHalfInstrReq) {
222    if3_prevHalfInstr := prevHalfInstrReq
223  }.elsewhen (if3_prevHalfConsumed || if3_prevHalfFlush) {
224    if3_prevHalfInstr.valid := false.B
225  }
226
227  // when bp signal a redirect, we distinguish between taken and not taken
228  // if taken and saveHalfRVI is true, we do not redirect to the target
229
230  def if3_nextValidPCNotEquals(pc: UInt) = !if2_valid || if2_valid && if2_pc =/= pc
231  val if3_prevHalfMetRedirect    = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.target)
232  val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.pc + 2.U)
233  val if3_predTakenRedirect    = !if3_pendingPrevHalfInstr && if3_bp.taken && if3_nextValidPCNotEquals(if3_bp.target)
234  val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(snpc(if3_pc, inLoop))
235  // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr
236  val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo
237
238  if3_redirect := if3_fire && (
239                    // prevHalf is consumed but the next packet is not where it meant to be
240                    // we do not handle this condition because of the burden of building a correct GHInfo
241                    // prevHalfMetRedirect ||
242                    // prevHalf does not match if3_pc and the next fetch packet is not snpc
243                    if3_prevHalfNotMetRedirect ||
244                    // pred taken and next fetch packet is not the predicted target
245                    if3_predTakenRedirect ||
246                    // pred not taken and next fetch packet is not snpc
247                    if3_predNotTakenRedirect ||
248                    // GHInfo from last pred does not corresponds with this packet
249                    if3_ghInfoNotIdenticalRedirect
250                  )
251
252  when (if3_redirect) {
253    /* when (prevHalfMetRedirect) {
254      if1_npc := if3_prevHalfInstr.target
255    }.else */
256    when (if3_prevHalfNotMetRedirect) {
257      if1_npc := if3_prevHalfInstr.pc + 2.U
258    }.elsewhen (if3_predTakenRedirect) {
259      if1_npc := if3_bp.target
260    }.elsewhen (if3_predNotTakenRedirect) {
261      if1_npc := snpc(if3_pc)
262    }.elsewhen (if3_ghInfoNotIdenticalRedirect) {
263      if1_npc := Mux(if3_bp.taken, if3_bp.target, snpc(if3_pc))
264    }
265    val if3_newPtr = if3_GHInfo.newPtr()
266    updatePtr := true.B
267    newPtr := if3_newPtr
268    extHist(if3_newPtr) := if3_GHInfo.takenOnBr.asUInt
269  }
270
271  //********************** IF4 ****************************//
272  val if4_pd = RegEnable(pd.io.out, if3_fire)
273  val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.ipf, if3_fire)
274  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
275  val if4_valid = RegInit(false.B)
276  val if4_fire = if4_valid && io.fetchPacket.ready
277  val if4_pc = RegEnable(if3_pc, if3_fire)
278  val if4_lastGHInfo = RegEnable(if3_GHInfo, if3_fire)
279  // This is the real mask given from icache or loop buffer
280  val if4_mask = RegEnable(icacheResp.mask, if3_fire)
281  val if4_snpc = Mux(inLoop, if4_pc + (PopCount(if4_mask) << 1), snpc(if4_pc))
282
283
284  val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire)
285  // wait until prevHalfInstr written into reg
286  if4_ready := (if4_fire && !hasPrevHalfInstrReq || !if4_valid || if4_flush) && GTimer() > 500.U
287  when (if4_flush)     { if4_valid := false.B }
288  .elsewhen (if3_fire) { if4_valid := true.B }
289  .elsewhen (if4_fire) { if4_valid := false.B }
290
291  val if4_bp = Wire(new BranchPrediction)
292  if4_bp := bpu.io.out(2)
293  if4_bp.takens  := bpu.io.out(2).takens & if4_mask
294  if4_bp.brMask  := bpu.io.out(2).brMask & if4_mask
295  if4_bp.jalMask := bpu.io.out(2).jalMask & if4_mask
296
297  val if4_GHInfo = wrapGHInfo(if4_bp, if4_predHistPtr)
298
299  def cal_jal_tgt(inst: UInt, rvc: Bool): UInt = {
300    Mux(rvc,
301      SignExt(Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)), XLEN),
302      SignExt(Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)), XLEN)
303    )
304  }
305  val if4_instrs = if4_pd.instrs
306  val if4_jals = if4_bp.jalMask
307  val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => if4_pd.pc(i) + cal_jal_tgt(if4_instrs(i), if4_pd.pd(i).isRVC)))
308
309  (0 until PredictWidth).foreach {i =>
310    when (if4_jals(i)) {
311      if4_bp.targets(i) := if4_jal_tgts(i)
312    }
313  }
314
315  // we need this to tell BPU the prediction of prev half
316  // because the prediction is with the start of each inst
317  val if4_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr))
318  val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid
319  val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && (if4_prevHalfInstr.pc + 2.U) === if4_pc && if4_valid
320  val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire
321  val if4_prevHalfFlush = if4_flush
322
323  val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.taken)
324  when (if3_prevHalfConsumed) {
325    if4_prevHalfInstr := if3_prevHalfInstr
326  }.elsewhen (if4_prevHalfConsumed || if4_prevHalfFlush) {
327    if4_prevHalfInstr.valid := false.B
328  }
329
330  prevHalfInstrReq := 0.U.asTypeOf(new PrevHalfInstr)
331  when (if4_fire && if4_bp.saveHalfRVI) {
332    val idx = if4_bp.lastHalfRVIIdx
333    prevHalfInstrReq.valid := true.B
334    // this is result of the last half RVI
335    prevHalfInstrReq.taken := if4_bp.lastHalfRVITaken
336    prevHalfInstrReq.ghInfo := if4_GHInfo
337    prevHalfInstrReq.newPtr := if4_GHInfo.newPtr()
338    prevHalfInstrReq.fetchpc := if4_pc
339    prevHalfInstrReq.idx := idx
340    prevHalfInstrReq.pc := if4_pd.pc(idx)
341    prevHalfInstrReq.target := if4_bp.lastHalfRVITarget
342    prevHalfInstrReq.instr := if4_pd.instrs(idx)(15, 0)
343    prevHalfInstrReq.ipf := if4_ipf
344  }
345
346  def if4_nextValidPCNotEquals(pc: UInt) = if3_valid  && if3_pc =/= pc ||
347                                           !if3_valid && (if2_valid && if2_pc =/= pc) ||
348                                           !if3_valid && !if2_valid
349
350  val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.pc+2.U)
351  val if4_predTakenRedirect = !hasPrevHalfInstrReq && if4_bp.taken && if4_nextValidPCNotEquals(if4_bp.target)
352  val if4_predNotTakenRedirect = !hasPrevHalfInstrReq && !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc)
353  val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo
354
355  if4_redirect := if4_fire && (
356                    // when if4 has a lastHalfRVI, but the next fetch packet is not snpc
357                    if4_prevHalfNextNotMet ||
358                    // when if4 preds taken, but the pc of next fetch packet is not the target
359                    if4_predTakenRedirect ||
360                    // when if4 preds not taken, but the pc of next fetch packet is not snpc
361                    if4_predNotTakenRedirect ||
362                    // GHInfo from last pred does not corresponds with this packet
363                    if4_ghInfoNotIdenticalRedirect
364                  )
365
366  when (if4_redirect) {
367    when (if4_prevHalfNextNotMet) {
368      if1_npc := prevHalfInstrReq.pc+2.U
369    }.elsewhen (if4_predTakenRedirect) {
370      if1_npc := if4_bp.target
371    }.elsewhen (if4_predNotTakenRedirect) {
372      if1_npc := if4_snpc
373    }.elsewhen (if4_ghInfoNotIdenticalRedirect) {
374      if1_npc := Mux(if4_bp.taken, if4_bp.target, if4_snpc)
375    }
376    val if4_newPtr = if4_GHInfo.newPtr()
377    updatePtr := true.B
378    newPtr := if4_newPtr
379    extHist(if4_newPtr) := if4_GHInfo.takenOnBr.asUInt
380  }
381
382
383  when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) {
384    val b = io.outOfOrderBrInfo.bits
385    val oldPtr = b.brInfo.histPtr
386    updatePtr := true.B
387    when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) {
388      // If mispredicted cfi is not a branch,
389      // and there wasn't any not taken branch before it,
390      // we should only recover the pointer to an unshifted state
391      newPtr := oldPtr
392      // finalPredHistPtr := oldPtr
393    }.otherwise {
394      newPtr := oldPtr - 1.U
395      // finalPredHistPtr := oldPtr - 1.U
396      // hist(0) := Mux(b.pd.isBr, b.taken, 0.U)
397      extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U)
398    }
399  }
400
401  when (loopBufPar.LBredirect.valid) {
402    if1_npc := loopBufPar.LBredirect.bits
403  }
404
405  when (io.redirect.valid) {
406    if1_npc := io.redirect.bits.target
407  }
408
409  when(inLoop) {
410    io.icacheReq.valid := if4_flush
411  }.otherwise {
412    io.icacheReq.valid := if1_valid && if2_ready
413  }
414  io.icacheResp.ready := if4_ready
415  io.icacheReq.bits.addr := if1_npc
416
417  // when(if4_bp.taken) {
418  //   when(if4_bp.saveHalfRVI) {
419  //     io.loopBufPar.LBReq := snpc(if4_pc)
420  //   }.otherwise {
421  //     io.loopBufPar.LBReq := if4_bp.target
422  //   }
423  // }.otherwise {
424  //   io.loopBufPar.LBReq := snpc(if4_pc)
425  //   XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n")
426  // }
427  loopBufPar.fetchReq := if3_pc
428
429  io.icacheReq.bits.mask := mask(if1_npc)
430
431  io.icacheFlush := Cat(if3_flush, if2_flush)
432
433  val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W)))
434  (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr))
435  bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid
436  bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt)
437  bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid
438  bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist
439
440  // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush)
441  bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush)
442  bpu.io.inFire(0) := if1_fire
443  bpu.io.inFire(1) := if2_fire
444  bpu.io.inFire(2) := if3_fire
445  bpu.io.inFire(3) := if4_fire
446  bpu.io.in.pc := if1_npc
447  bpu.io.in.hist := hist.asUInt
448  bpu.io.in.histPtr := ptr
449  bpu.io.in.inMask := mask(if1_npc)
450  bpu.io.predecode.mask := if4_pd.mask
451  bpu.io.predecode.endMask := if4_pd.endMask
452  bpu.io.predecode.pd := if4_pd.pd
453  bpu.io.predecode.hasLastHalfRVI := if4_pc =/= if4_pd.pc(0)
454  bpu.io.realMask := if4_mask
455  bpu.io.prevHalf := if4_prevHalfInstr
456
457  pd.io.in := icacheResp
458  when(inLoop) {
459    pd.io.in.mask := loopBuffer.io.out.bits.mask // TODO: Maybe this is unnecessary
460    // XSDebug("Fetch from LB\n")
461    // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n")
462    // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n")
463    // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n")
464  }
465
466  pd.io.prev.valid := if3_prevHalfInstrMet
467  pd.io.prev.bits := if3_prevHalfInstr.instr
468  // if a fetch packet triggers page fault, set the pf instruction to nop
469  when (!if3_prevHalfInstrMet && icacheResp.ipf) {
470    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
471    (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop
472    pd.io.in.data := instrs.asUInt
473  }.elsewhen (if3_prevHalfInstrMet && (if3_prevHalfInstr.ipf || icacheResp.ipf)) {
474    pd.io.prev.bits := ZeroExt("b0010011".U, 16)
475    val instrs = Wire(Vec(FetchWidth, UInt(32.W)))
476    (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W))))
477    pd.io.in.data := instrs.asUInt
478
479    when (icacheResp.ipf && !if3_prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault
480  }
481
482  //Performance Counter
483  // if (!env.FPGAPlatform ) {
484  //   ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf)
485  //   ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf)
486  // }
487
488  val fetchPacketValid = if4_valid && !io.redirect.valid
489  val fetchPacketWire = Wire(new FetchPacket)
490
491  // io.fetchPacket.valid := if4_valid && !io.redirect.valid
492  fetchPacketWire.instrs := if4_pd.instrs
493  fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
494
495  loopBufPar.noTakenMask := if4_pd.mask
496  fetchPacketWire.pc := if4_pd.pc
497  (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
498  when (if4_bp.taken) {
499    fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target
500  }
501  fetchPacketWire.brInfo := bpu.io.branchInfo
502  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := if4_predHistPtr)
503  (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr)
504  fetchPacketWire.pd := if4_pd.pd
505  fetchPacketWire.ipf := if4_ipf
506  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
507
508  // predTaken Vec
509  fetchPacketWire.predTaken := if4_bp.taken
510
511  loopBuffer.io.in.bits := fetchPacketWire
512  io.fetchPacket.bits := fetchPacketWire
513  io.fetchPacket.valid := fetchPacketValid
514  loopBuffer.io.in.valid := io.fetchPacket.fire
515
516  // debug info
517  if (IFUDebug) {
518    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
519    XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n")
520    XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n")
521    XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n",
522      io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc)
523    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n")
524
525    XSDebug("[IF1] v=%d     fire=%d            flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc))
526    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_predHistPtr, if2_snpc)
527    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_predHistPtr, crossPageIPF, if3_GHInfo.sawNTBr)
528    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_predHistPtr, if4_crossPageIPF, if4_GHInfo.sawNTBr)
529    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr)
530    XSDebug("[IF1][ghr] headPtr=%d updatePtr=%d newPtr=%d ptr=%d\n", if1_histPtr, updatePtr, newPtr, ptr)
531    XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt)
532    XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt)
533
534    XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
535    if2_GHInfo.debug
536
537    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask)
538    XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
539    XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d, ghInfo=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect, if3_ghInfoNotIdenticalRedirect)
540    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
541    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
542    XSDebug("[IF3][    prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
543      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
544    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n",
545      if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf)
546    if3_GHInfo.debug
547
548    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
549    XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask)
550    XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
551    XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d, ghInfo=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect, if4_ghInfoNotIdenticalRedirect)
552    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx))
553    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n",
554      if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf)
555    if4_GHInfo.debug
556    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n",
557      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix)
558    for (i <- 0 until PredictWidth) {
559      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
560        io.fetchPacket.bits.mask(i),
561        io.fetchPacket.bits.instrs(i),
562        io.fetchPacket.bits.pc(i),
563        io.fetchPacket.bits.pnpc(i),
564        io.fetchPacket.bits.pd(i).isRVC,
565        io.fetchPacket.bits.pd(i).brType,
566        io.fetchPacket.bits.pd(i).isCall,
567        io.fetchPacket.bits.pd(i).isRet
568      )
569    }
570  }
571}