xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision d479a3a838f93713e8d569af098b6da7fc3c5905)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9import chisel3.experimental.chiselName
10import freechips.rocketchip.tile.HasLazyRoCC
11import chisel3.ExcitingUtils._
12import xiangshan.backend.ftq.FtqPtr
13import xiangshan.backend.decode.WaitTableParameters
14
15trait HasInstrMMIOConst extends HasXSParameter with HasIFUConst{
16  def mmioBusWidth = 64
17  def mmioBusBytes = mmioBusWidth /8
18  def mmioBeats = FetchWidth * 4 * 8 / mmioBusWidth
19  def mmioMask  = VecInit(List.fill(PredictWidth)(true.B)).asUInt
20  def mmioBusAligned(pc :UInt): UInt = align(pc, mmioBusBytes)
21}
22
23trait HasIFUConst extends HasXSParameter {
24  val resetVector = 0x10000000L//TODO: set reset vec
25  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
26  val groupBytes = 64 // correspond to cache line size
27  val groupOffsetBits = log2Ceil(groupBytes)
28  val groupWidth = groupBytes / instBytes
29  val packetBytes = PredictWidth * instBytes
30  val packetOffsetBits = log2Ceil(packetBytes)
31  def offsetInPacket(pc: UInt) = pc(packetOffsetBits-1, instOffsetBits)
32  def packetIdx(pc: UInt) = pc(VAddrBits-1, log2Ceil(packetBytes))
33  def groupAligned(pc: UInt)  = align(pc, groupBytes)
34  def packetAligned(pc: UInt) = align(pc, packetBytes)
35  def mask(pc: UInt): UInt = ((~(0.U(PredictWidth.W))) << offsetInPacket(pc))(PredictWidth-1,0)
36  def snpc(pc: UInt): UInt = packetAligned(pc) + packetBytes.U
37
38  val enableGhistRepair = true
39  val IFUDebug = true
40}
41
42class GlobalHistory extends XSBundle {
43  val predHist = UInt(HistoryLength.W)
44  def update(sawNTBr: Bool, takenOnBr: Bool, hist: UInt = predHist): GlobalHistory = {
45    val g = Wire(new GlobalHistory)
46    val shifted = takenOnBr || sawNTBr
47    g.predHist := Mux(shifted, (hist << 1) | takenOnBr.asUInt, hist)
48    g
49  }
50
51  final def === (that: GlobalHistory): Bool = {
52    predHist === that.predHist
53  }
54
55  final def =/= (that: GlobalHistory): Bool = !(this === that)
56
57  implicit val name = "IFU"
58  def debug(where: String) = XSDebug(p"[${where}_GlobalHistory] hist=${Binary(predHist)}\n")
59  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
60}
61
62
63class IFUIO extends XSBundle
64{
65  // to ibuffer
66  val fetchPacket = DecoupledIO(new FetchPacket)
67  // from backend
68  val redirect = Flipped(ValidIO(new Redirect))
69  val bp_ctrl = Input(new BPUCtrl)
70  val commitUpdate = Flipped(ValidIO(new FtqEntry))
71  val ftqEnqPtr = Input(new FtqPtr)
72  val ftqLeftOne = Input(Bool())
73  // to backend
74  val toFtq = DecoupledIO(new FtqEntry)
75  // to icache
76  val icacheMemGrant = Flipped(DecoupledIO(new L1plusCacheResp))
77  val fencei = Input(Bool())
78  // from icache
79  val icacheMemAcq = DecoupledIO(new L1plusCacheReq)
80  val l1plusFlush = Output(Bool())
81  val prefetchTrainReq = ValidIO(new IcacheMissReq)
82  // to tlb
83  val sfence = Input(new SfenceBundle)
84  val tlbCsr = Input(new TlbCsrBundle)
85  // from tlb
86  val ptw = new TlbPtwIO
87  // icache uncache
88  val mmio_acquire = DecoupledIO(new InsUncacheReq)
89  val mmio_grant  = Flipped(DecoupledIO(new InsUncacheResp))
90  val mmio_flush = Output(Bool())
91}
92
93class PrevHalfInstr extends XSBundle {
94  val pc = UInt(VAddrBits.W)
95  val npc = UInt(VAddrBits.W)
96  val instr = UInt(16.W)
97  val ipf = Bool()
98}
99
100@chiselName
101class IFU extends XSModule with HasIFUConst with HasCircularQueuePtrHelper with WaitTableParameters
102{
103  val io = IO(new IFUIO)
104  val bpu = BPU(EnableBPU)
105  val icache = Module(new ICache)
106
107  io.ptw <> TLB(
108    in = Seq(icache.io.tlb),
109    sfence = io.sfence,
110    csr = io.tlbCsr,
111    width = 1,
112    isDtlb = false,
113    shouldBlock = true
114  )
115
116  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
117  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
118
119  val icacheResp = icache.io.resp.bits
120
121  if4_flush := io.redirect.valid
122  if3_flush := if4_flush || if4_redirect
123  if2_flush := if3_flush || if3_redirect
124  if1_flush := if2_flush || if2_redirect
125
126  //********************** IF1 ****************************//
127  val if1_valid = !reset.asBool && GTimer() > 500.U
128  val if1_npc = WireInit(0.U(VAddrBits.W))
129  val if2_ready = WireInit(false.B)
130  val if2_valid = RegInit(init = false.B)
131  val if2_allReady = WireInit(if2_ready && icache.io.req.ready)
132  val if1_fire = if1_valid &&  if2_allReady
133
134  val if1_gh, if2_gh, if3_gh, if4_gh = Wire(new GlobalHistory)
135  val if2_predicted_gh, if3_predicted_gh, if4_predicted_gh = Wire(new GlobalHistory)
136  val final_gh = RegInit(0.U.asTypeOf(new GlobalHistory))
137
138  //********************** IF2 ****************************//
139  val if2_allValid = if2_valid && icache.io.tlb.resp.valid
140  val if3_ready = WireInit(false.B)
141  val if2_fire = if2_allValid && if3_ready
142  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire)
143  val if2_snpc = snpc(if2_pc)
144  val if2_predHist = RegEnable(if1_gh.predHist, enable=if1_fire)
145  if2_ready := if3_ready && icache.io.tlb.resp.valid || !if2_valid
146  when (if1_fire)       { if2_valid := true.B }
147  .elsewhen (if2_flush) { if2_valid := false.B }
148  .elsewhen (if2_fire)  { if2_valid := false.B }
149
150  val npcGen = new PriorityMuxGenerator[UInt]
151  npcGen.register(true.B, RegNext(if1_npc), Some("stallPC"))
152  val if2_bp = bpu.io.out(0)
153
154  // if taken, bp_redirect should be true
155  // when taken on half RVI, we suppress this redirect signal
156
157  npcGen.register(if2_valid, Mux(if2_bp.taken, if2_bp.target, if2_snpc), Some("if2_target"))
158
159  if2_predicted_gh := if2_gh.update(if2_bp.hasNotTakenBrs, if2_bp.takenOnBr)
160
161  //********************** IF3 ****************************//
162  // if3 should wait for instructions resp to arrive
163  val if3_valid = RegInit(init = false.B)
164  val if4_ready = WireInit(false.B)
165  val if3_allValid = if3_valid && icache.io.resp.valid
166  val if3_fire = if3_allValid && if4_ready
167  val if3_pc = RegEnable(if2_pc, if2_fire)
168  val if3_snpc = RegEnable(if2_snpc, if2_fire)
169  val if3_predHist = RegEnable(if2_predHist, enable=if2_fire)
170  if3_ready := if4_ready && icache.io.resp.valid || !if3_valid
171  when (if3_flush) {
172    if3_valid := false.B
173  }.elsewhen (if2_fire && !if2_flush) {
174    if3_valid := true.B
175  }.elsewhen (if3_fire) {
176    if3_valid := false.B
177  }
178
179  val if3_bp = bpu.io.out(1)
180  if3_predicted_gh := if3_gh.update(if3_bp.hasNotTakenBrs, if3_bp.takenOnBr)
181
182
183  val prevHalfInstrReq = WireInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
184  // only valid when if4_fire
185  val hasPrevHalfInstrReq = prevHalfInstrReq.valid && HasCExtension.B
186
187  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
188
189  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
190  val crossPageIPF = WireInit(false.B)
191
192  val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid && HasCExtension.B
193
194  // the previous half of RVI instruction waits until it meets its last half
195  val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && if3_prevHalfInstr.bits.npc === if3_pc && if3_valid
196  // set to invalid once consumed or redirect from backend
197  val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire
198  val if3_prevHalfFlush = if4_flush
199  when (if3_prevHalfFlush) {
200    if3_prevHalfInstr.valid := false.B
201  }.elsewhen (hasPrevHalfInstrReq) {
202    if3_prevHalfInstr.valid := true.B
203  }.elsewhen (if3_prevHalfConsumed) {
204    if3_prevHalfInstr.valid := false.B
205  }
206  when (hasPrevHalfInstrReq) {
207    if3_prevHalfInstr.bits := prevHalfInstrReq.bits
208  }
209  // when bp signal a redirect, we distinguish between taken and not taken
210  // if taken and saveHalfRVI is true, we do not redirect to the target
211
212  class IF3_PC_COMP extends XSModule {
213    val io = IO(new Bundle {
214      val if2_pc = Input(UInt(VAddrBits.W))
215      val pc     = Input(UInt(VAddrBits.W))
216      val if2_valid = Input(Bool())
217      val res = Output(Bool())
218    })
219    io.res := !io.if2_valid || io.if2_valid && io.if2_pc =/= io.pc
220  }
221  def if3_nextValidPCNotEquals(pc: UInt) = {
222    val comp = Module(new IF3_PC_COMP)
223    comp.io.if2_pc := if2_pc
224    comp.io.pc     := pc
225    comp.io.if2_valid := if2_valid
226    comp.io.res
227  }
228
229  val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.npc)
230  val if3_predTakenRedirect    = !if3_pendingPrevHalfInstr && if3_bp.taken && if3_nextValidPCNotEquals(if3_bp.target)
231  val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(if3_snpc)
232  // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr
233  // val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo && enableGhistRepair.B
234
235  if3_redirect := if3_valid && (
236                    // prevHalf does not match if3_pc and the next fetch packet is not snpc
237                    if3_prevHalfNotMetRedirect && HasCExtension.B ||
238                    // pred taken and next fetch packet is not the predicted target
239                    if3_predTakenRedirect ||
240                    // pred not taken and next fetch packet is not snpc
241                    if3_predNotTakenRedirect
242                    // GHInfo from last pred does not corresponds with this packet
243                    // if3_ghInfoNotIdenticalRedirect
244                  )
245
246  val if3_target = WireInit(if3_snpc)
247
248  if3_target := Mux1H(Seq((if3_prevHalfNotMetRedirect -> if3_prevHalfInstr.bits.npc),
249                          (if3_predTakenRedirect      -> if3_bp.target),
250                          (if3_predNotTakenRedirect   -> if3_snpc)))
251
252  npcGen.register(if3_redirect, if3_target, Some("if3_target"))
253
254
255  //********************** IF4 ****************************//
256  val ftqEnqBuf_ready = Wire(Bool())
257  val if4_ftqEnqPtr = Wire(new FtqPtr)
258  val if4_pd = RegEnable(icache.io.pd_out, if3_fire)
259  val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.bits.ipf, if3_fire)
260  val if4_acf = RegEnable(icacheResp.acf, if3_fire)
261  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
262  val if4_valid = RegInit(false.B)
263  val if4_fire = if4_valid && io.fetchPacket.ready && ftqEnqBuf_ready
264  val if4_pc = RegEnable(if3_pc, if3_fire)
265  val if4_snpc = RegEnable(if3_snpc, if3_fire)
266  // This is the real mask given from icache
267  val if4_mask = RegEnable(icacheResp.mask, if3_fire)
268
269
270  val if4_predHist = RegEnable(if3_predHist, enable=if3_fire)
271  // wait until prevHalfInstr written into reg
272  if4_ready := (io.fetchPacket.ready && !hasPrevHalfInstrReq && ftqEnqBuf_ready || !if4_valid) && GTimer() > 500.U
273  when (if4_flush) {
274    if4_valid := false.B
275  }.elsewhen (if3_fire && !if3_flush) {
276    if4_valid := Mux(if3_pendingPrevHalfInstr, if3_prevHalfInstrMet, true.B)
277  }.elsewhen (if4_fire) {
278    if4_valid := false.B
279  }
280
281  val if4_bp = Wire(new BranchPrediction)
282  if4_bp := bpu.io.out(2)
283
284  if4_predicted_gh := if4_gh.update(if4_bp.hasNotTakenBrs, if4_bp.takenOnBr)
285
286  def jal_offset(inst: UInt, rvc: Bool): SInt = {
287    Mux(rvc,
288      Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)).asSInt(),
289      Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)).asSInt()
290    )
291  }
292  def br_offset(inst: UInt, rvc: Bool): SInt = {
293    Mux(rvc,
294      Cat(inst(12), inst(6, 5), inst(2), inst(11, 10), inst(4, 3), 0.U(1.W)).asSInt,
295      Cat(inst(31), inst(7), inst(30, 25), inst(11, 8), 0.U(1.W)).asSInt()
296    )
297  }
298  val if4_instrs = if4_pd.instrs
299  val if4_jals = if4_bp.jalMask
300  val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => (if4_pd.pc(i).asSInt + jal_offset(if4_instrs(i), if4_pd.pd(i).isRVC)).asUInt))
301  val if4_brs = if4_bp.brMask
302  val if4_br_tgts = VecInit((0 until PredictWidth).map(i => (if4_pd.pc(i).asSInt + br_offset(if4_instrs(i), if4_pd.pd(i).isRVC)).asUInt))
303  (0 until PredictWidth).foreach {i =>
304    when (if4_jals(i)) {
305      if4_bp.targets(i) := if4_jal_tgts(i)
306    }.elsewhen (if4_brs(i)) {
307      if4_bp.targets(i) := if4_br_tgts(i)
308    }
309  }
310
311  // we need this to tell BPU the prediction of prev half
312  // because the prediction is with the start of each inst
313  val if4_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
314  val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid && HasCExtension.B
315  val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && if4_valid
316  val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire
317  val if4_prevHalfFlush = if4_flush
318
319  when (if4_prevHalfFlush) {
320    if4_prevHalfInstr.valid := false.B
321  }.elsewhen (if3_prevHalfConsumed) {
322    if4_prevHalfInstr.valid := if3_prevHalfInstr.valid
323  }.elsewhen (if4_prevHalfConsumed) {
324    if4_prevHalfInstr.valid := false.B
325  }
326
327  when (if3_prevHalfConsumed) {
328    if4_prevHalfInstr.bits := if3_prevHalfInstr.bits
329  }
330
331  prevHalfInstrReq.valid := if4_fire && if4_bp.saveHalfRVI && HasCExtension.B
332
333  // // this is result of the last half RVI
334  prevHalfInstrReq.bits.pc := if4_pd.pc(PredictWidth-1)
335  prevHalfInstrReq.bits.npc := snpc(if4_pc)
336  prevHalfInstrReq.bits.instr := if4_pd.instrs(PredictWidth-1)(15, 0)
337  prevHalfInstrReq.bits.ipf := if4_ipf
338
339  class IF4_PC_COMP extends XSModule {
340    val io = IO(new Bundle {
341      val if2_pc = Input(UInt(VAddrBits.W))
342      val if3_pc = Input(UInt(VAddrBits.W))
343      val pc     = Input(UInt(VAddrBits.W))
344      val if2_valid = Input(Bool())
345      val if3_valid = Input(Bool())
346      val res = Output(Bool())
347    })
348    io.res := io.if3_valid  && io.if3_pc =/= io.pc ||
349              !io.if3_valid && (io.if2_valid && io.if2_pc =/= io.pc) ||
350              !io.if3_valid && !io.if2_valid
351  }
352  def if4_nextValidPCNotEquals(pc: UInt) = {
353    val comp = Module(new IF4_PC_COMP)
354    comp.io.if2_pc := if2_pc
355    comp.io.if3_pc := if3_pc
356    comp.io.pc     := pc
357    comp.io.if2_valid := if2_valid
358    comp.io.if3_valid := if3_valid
359    comp.io.res
360  }
361
362  val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.bits.pc+2.U)
363  val if4_predTakenRedirect = if4_bp.taken && if4_nextValidPCNotEquals(if4_bp.target)
364  val if4_predNotTakenRedirect = !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc)
365  // val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo && enableGhistRepair.B
366
367  if4_redirect := if4_valid && (
368                    // when if4 has a lastHalfRVI, but the next fetch packet is not snpc
369                    // if4_prevHalfNextNotMet ||
370                    // when if4 preds taken, but the pc of next fetch packet is not the target
371                    if4_predTakenRedirect ||
372                    // when if4 preds not taken, but the pc of next fetch packet is not snpc
373                    if4_predNotTakenRedirect
374                    // GHInfo from last pred does not corresponds with this packet
375                    // if4_ghInfoNotIdenticalRedirect
376                  )
377
378  val if4_target = WireInit(if4_snpc)
379
380  if4_target := Mux(if4_bp.taken, if4_bp.target, if4_snpc)
381
382  npcGen.register(if4_redirect, if4_target, Some("if4_target"))
383
384  when (if4_fire) {
385    final_gh := if4_predicted_gh
386  }
387  if4_gh := final_gh
388  if3_gh := Mux(if4_valid, if4_predicted_gh, if4_gh)
389  if2_gh := Mux(if3_valid && !if3_flush, if3_predicted_gh, if3_gh)
390  if1_gh := Mux(if2_valid && !if2_flush, if2_predicted_gh, if2_gh)
391
392  // ***************** Ftq enq buffer ********************
393  val toFtqBuf = Wire(new FtqEntry)
394  val ftqEnqBuf = RegEnable(toFtqBuf, enable=if4_fire)
395  val ftqEnqBuf_valid = RegInit(false.B)
396  val ftqLeftOne = WireInit(false.B) // TODO: to be replaced
397  ftqEnqBuf_ready := io.toFtq.ready && !(io.ftqLeftOne && ftqEnqBuf_valid)
398  if4_ftqEnqPtr := Mux(ftqEnqBuf_valid, io.ftqEnqPtr+1.U, io.ftqEnqPtr)
399  when (io.redirect.valid)  { ftqEnqBuf_valid := false.B }
400  .elsewhen (if4_fire)      { ftqEnqBuf_valid := true.B }
401  .elsewhen (io.toFtq.fire) { ftqEnqBuf_valid := false.B }
402
403  io.toFtq.valid := ftqEnqBuf_valid
404  io.toFtq.bits  := ftqEnqBuf
405
406  toFtqBuf := DontCare
407  toFtqBuf.ftqPC    := if4_pc
408  toFtqBuf.lastPacketPC.valid := if4_pendingPrevHalfInstr
409  toFtqBuf.lastPacketPC.bits  := if4_prevHalfInstr.bits.pc
410
411  toFtqBuf.hist     := final_gh
412  toFtqBuf.predHist := if4_predHist.asTypeOf(new GlobalHistory)
413  toFtqBuf.rasSp    := bpu.io.brInfo.rasSp
414  toFtqBuf.rasTop   := bpu.io.brInfo.rasTop
415  toFtqBuf.specCnt  := bpu.io.brInfo.specCnt
416  toFtqBuf.metas    := bpu.io.brInfo.metas
417
418  // For perf counters
419  toFtqBuf.pd    := if4_pd.pd
420
421
422  val if4_jmpIdx = WireInit(if4_bp.jmpIdx)
423  val if4_taken = WireInit(if4_bp.taken)
424  val if4_real_valids = if4_pd.mask &
425    (Fill(PredictWidth, !if4_taken) |
426      (Fill(PredictWidth, 1.U(1.W)) >> (~if4_jmpIdx)))
427
428  val cfiIsCall = if4_pd.pd(if4_jmpIdx).isCall
429  val cfiIsRet  = if4_pd.pd(if4_jmpIdx).isRet
430  val cfiIsRVC  = if4_pd.pd(if4_jmpIdx).isRVC
431  toFtqBuf.cfiIsCall := cfiIsCall
432  toFtqBuf.cfiIsRet  := cfiIsRet
433  toFtqBuf.cfiIsRVC  := cfiIsRVC
434  toFtqBuf.cfiIndex.valid := if4_taken
435  toFtqBuf.cfiIndex.bits  := if4_jmpIdx
436
437  toFtqBuf.br_mask   := if4_bp.brMask.asTypeOf(Vec(PredictWidth, Bool()))
438  toFtqBuf.rvc_mask  := VecInit(if4_pd.pd.map(_.isRVC))
439  toFtqBuf.valids    := if4_real_valids.asTypeOf(Vec(PredictWidth, Bool()))
440  toFtqBuf.target := Mux(if4_taken, if4_target, if4_snpc)
441
442
443
444  val r = io.redirect
445  val cfiUpdate = io.redirect.bits.cfiUpdate
446  when (r.valid) {
447    val isMisPred = r.bits.level === 0.U
448    val b = cfiUpdate
449    val oldGh = b.hist
450    val sawNTBr = b.sawNotTakenBranch
451    val isBr = b.pd.isBr
452    val taken = Mux(isMisPred, b.taken, b.predTaken)
453    val updatedGh = oldGh.update(sawNTBr, isBr && taken)
454    final_gh := updatedGh
455    if1_gh := updatedGh
456  }
457
458  npcGen.register(io.redirect.valid, io.redirect.bits.cfiUpdate.target, Some("backend_redirect"))
459  npcGen.register(RegNext(reset.asBool) && !reset.asBool, resetVector.U(VAddrBits.W), Some("reset_vector"))
460
461  if1_npc := npcGen()
462
463
464  icache.io.req.valid := if1_fire
465  icache.io.resp.ready := if4_ready
466  icache.io.req.bits.addr := if1_npc
467  icache.io.req.bits.mask := mask(if1_npc)
468  icache.io.flush := Cat(if3_flush, if2_flush)
469  icache.io.mem_grant <> io.icacheMemGrant
470  icache.io.fencei := io.fencei
471  icache.io.prev.valid := if3_prevHalfInstrMet
472  icache.io.prev.bits := if3_prevHalfInstr.bits.instr
473  icache.io.prev_ipf := if3_prevHalfInstr.bits.ipf
474  icache.io.prev_pc := if3_prevHalfInstr.bits.pc
475  icache.io.mmio_acquire <> io.mmio_acquire
476  icache.io.mmio_grant <> io.mmio_grant
477  icache.io.mmio_flush <> io.mmio_flush
478  io.icacheMemAcq <> icache.io.mem_acquire
479  io.l1plusFlush := icache.io.l1plusflush
480  io.prefetchTrainReq := icache.io.prefetchTrainReq
481
482  bpu.io.ctrl := RegNext(io.bp_ctrl)
483  bpu.io.commit <> io.commitUpdate
484  bpu.io.redirect <> io.redirect
485
486  bpu.io.inFire(0) := if1_fire
487  bpu.io.inFire(1) := if2_fire
488  bpu.io.inFire(2) := if3_fire
489  bpu.io.inFire(3) := if4_fire
490  bpu.io.in.pc := if1_npc
491  bpu.io.in.hist := if1_gh.asUInt
492  bpu.io.in.inMask := mask(if1_npc)
493  bpu.io.predecode.mask := if4_pd.mask
494  bpu.io.predecode.lastHalf := if4_pd.lastHalf
495  bpu.io.predecode.pd := if4_pd.pd
496  bpu.io.predecode.hasLastHalfRVI := if4_prevHalfInstrMet
497
498
499  when (if3_prevHalfInstrMet && icacheResp.ipf && !if3_prevHalfInstr.bits.ipf) {
500    crossPageIPF := true.B // higher 16 bits page fault
501  }
502
503  val fetchPacketValid = if4_valid && !io.redirect.valid && ftqEnqBuf_ready
504  val fetchPacketWire = Wire(new FetchPacket)
505
506  fetchPacketWire.mask := if4_real_valids
507  //RVC expand
508  val expandedInstrs = Wire(Vec(PredictWidth, UInt(32.W)))
509  for(i <- 0 until PredictWidth){
510      val expander = Module(new RVCExpander)
511      expander.io.in := if4_pd.instrs(i)
512      expandedInstrs(i) := expander.io.out.bits
513  }
514  fetchPacketWire.instrs := expandedInstrs
515
516  fetchPacketWire.pc := if4_pd.pc
517  fetchPacketWire.foldpc := if4_pd.pc.map(i => XORFold(i(VAddrBits-1,1), WaitTableAddrWidth))
518
519  fetchPacketWire.pdmask := if4_pd.mask
520  fetchPacketWire.pd := if4_pd.pd
521  fetchPacketWire.ipf := if4_ipf
522  fetchPacketWire.acf := if4_acf
523  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
524  fetchPacketWire.ftqPtr := if4_ftqEnqPtr
525
526  // predTaken Vec
527  fetchPacketWire.pred_taken := if4_bp.takens
528
529  io.fetchPacket.bits := fetchPacketWire
530  io.fetchPacket.valid := fetchPacketValid
531
532  if (!env.FPGAPlatform && env.EnablePerfDebug) {
533    val predictor_s3 = RegEnable(Mux(if3_redirect, 1.U(log2Up(4).W), 0.U(log2Up(4).W)), if3_fire)
534    val predictor_s4 = Mux(if4_redirect, 2.U, predictor_s3)
535    val predictor = predictor_s4
536    toFtqBuf.metas.map(_.predictor := predictor)
537
538    toFtqBuf.metas.zipWithIndex.foreach{ case(x,i) =>
539      x.predictor := predictor
540
541      x.ubtbAns := bpu.io.brInfo.metas(i).ubtbAns
542      x.btbAns := bpu.io.brInfo.metas(i).btbAns
543      x.tageAns := bpu.io.brInfo.metas(i).tageAns
544      x.rasAns := bpu.io.brInfo.metas(i).rasAns // Is this right?
545      x.loopAns := bpu.io.brInfo.metas(i).loopAns
546    }
547  }
548
549  // TODO: perfs
550  // frontend redirect from each stage
551  XSPerf("if2_redirect", if2_valid && if2_bp.taken && !if2_flush)
552  XSPerf("if2_redirect_fired", if2_fire && if2_bp.taken && !if2_flush)
553  XSPerf("if3_redirect", if3_valid && if3_redirect && !if3_flush)
554  XSPerf("if3_redirect_fired", if3_fire && if3_redirect && !if3_flush)
555  XSPerf("if4_redirect", if4_valid && if4_redirect && !if4_flush)
556  XSPerf("if4_redirect_fired", if4_fire && if4_redirect && !if4_flush)
557
558  XSPerf("if1_total_stall", !if2_allReady && if1_valid)
559  XSPerf("if1_stall_from_icache_req", !icache.io.req.ready && if1_valid)
560  XSPerf("if1_stall_from_if2", !if2_ready && if1_valid)
561  XSPerf("itlb_stall", if2_valid && if3_ready && !icache.io.tlb.resp.valid)
562  XSPerf("icache_resp_stall", if3_valid && if4_ready && !icache.io.resp.valid)
563  XSPerf("if4_stall", if4_valid && !if4_fire)
564  XSPerf("if4_stall_ibuffer", if4_valid && !io.fetchPacket.ready && ftqEnqBuf_ready)
565  XSPerf("if4_stall_ftq", if4_valid && io.fetchPacket.ready && !ftqEnqBuf_ready)
566
567  XSPerf("if3_prevHalfConsumed", if3_prevHalfConsumed)
568  XSPerf("if4_prevHalfConsumed", if4_prevHalfConsumed)
569
570
571  // debug info
572  if (IFUDebug) {
573    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
574    XSDebug(icache.io.flush(0).asBool, "Flush icache stage2...\n")
575    XSDebug(icache.io.flush(1).asBool, "Flush icache stage3...\n")
576    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.cfiUpdate.target)}\n")
577
578    XSDebug("[IF1] v=%d      fire=%d             flush=%d pc=%x mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, mask(if1_npc))
579    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_snpc)
580    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, crossPageIPF, if3_bp.hasNotTakenBrs)
581    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_crossPageIPF, if4_bp.hasNotTakenBrs)
582    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", icache.io.req.valid, icache.io.req.ready, icache.io.req.bits.addr)
583    XSDebug("[IF1][ghr] hist=%b\n", if1_gh.asUInt)
584
585    XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
586    if2_gh.debug("if2")
587
588    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", icache.io.resp.valid, icache.io.resp.ready, icache.io.resp.bits.pc, icache.io.resp.bits.mask)
589    XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
590    XSDebug("[IF3][redirect]: v=%d, prevNMet=%d, predT=%d, predNT=%d\n", if3_redirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect)
591    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
592    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
593    XSDebug("[IF3][if3_prevHalfInstr] v=%d pc=%x npc=%x  instr=%x ipf=%d\n\n",
594    if3_prevHalfInstr.valid, if3_prevHalfInstr.bits.pc, if3_prevHalfInstr.bits.npc, if3_prevHalfInstr.bits.instr, if3_prevHalfInstr.bits.ipf)
595    if3_gh.debug("if3")
596
597    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
598    XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask)
599    XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
600    XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect)
601    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx))
602    XSDebug("[IF4][ prevHalfInstrReq] v=%d pc=%x npc=%x instr=%x ipf=%d\n",
603      prevHalfInstrReq.valid, prevHalfInstrReq.bits.pc, prevHalfInstrReq.bits.npc, prevHalfInstrReq.bits.instr, prevHalfInstrReq.bits.ipf)
604    XSDebug("[IF4][if4_prevHalfInstr] v=%d pc=%x npc=%x instr=%x ipf=%d\n",
605      if4_prevHalfInstr.valid, if4_prevHalfInstr.bits.pc, if4_prevHalfInstr.bits.npc, if4_prevHalfInstr.bits.instr, if4_prevHalfInstr.bits.ipf)
606    if4_gh.debug("if4")
607    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d acf=%d crossPageIPF=%d\n",
608      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.acf, io.fetchPacket.bits.crossPageIPFFix)
609    for (i <- 0 until PredictWidth) {
610      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
611        io.fetchPacket.bits.mask(i),
612        io.fetchPacket.bits.instrs(i),
613        io.fetchPacket.bits.pc(i),
614        io.fetchPacket.bits.pd(i).isRVC,
615        io.fetchPacket.bits.pd(i).brType,
616        io.fetchPacket.bits.pd(i).isCall,
617        io.fetchPacket.bits.pd(i).isRet
618      )
619    }
620    val b = ftqEnqBuf
621    XSDebug("[FtqEnqBuf] v=%d r=%d pc=%x cfiIndex(%d)=%d cfiIsCall=%d cfiIsRet=%d cfiIsRVC=%d\n",
622      ftqEnqBuf_valid, ftqEnqBuf_ready, b.ftqPC, b.cfiIndex.valid, b.cfiIndex.bits, b.cfiIsCall, b.cfiIsRet, b.cfiIsRVC)
623    XSDebug("[FtqEnqBuf] valids=%b br_mask=%b rvc_mask=%b hist=%x predHist=%x rasSp=%d rasTopAddr=%x rasTopCtr=%d\n",
624      b.valids.asUInt, b.br_mask.asUInt, b.rvc_mask.asUInt, b.hist.asUInt, b.predHist.asUInt, b.rasSp, b.rasTop.retAddr, b.rasTop.ctr)
625    XSDebug("[ToFTQ] v=%d r=%d leftOne=%d ptr=%d\n", io.toFtq.valid, io.toFtq.ready, io.ftqLeftOne, io.ftqEnqPtr.value)
626  }
627
628}
629