xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision f99debe2bd49391f072afeb198e68845b4abf6f1)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9import chisel3.experimental.chiselName
10import freechips.rocketchip.tile.HasLazyRoCC
11import chisel3.ExcitingUtils._
12import xiangshan.backend.ftq.FtqPtr
13
14trait HasInstrMMIOConst extends HasXSParameter with HasIFUConst{
15  def mmioBusWidth = 64
16  def mmioBusBytes = mmioBusWidth /8
17  def mmioBeats = FetchWidth * 4 * 8 / mmioBusWidth
18  def mmioMask  = VecInit(List.fill(PredictWidth)(true.B)).asUInt
19  def mmioBusAligned(pc :UInt): UInt = align(pc, mmioBusBytes)
20}
21
22trait HasIFUConst extends HasXSParameter {
23  val resetVector = 0x10000000L//TODO: set reset vec
24  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
25  val groupBytes = 64 // correspond to cache line size
26  val groupOffsetBits = log2Ceil(groupBytes)
27  val groupWidth = groupBytes / instBytes
28  val packetBytes = PredictWidth * instBytes
29  val packetOffsetBits = log2Ceil(packetBytes)
30  def offsetInPacket(pc: UInt) = pc(packetOffsetBits-1, instOffsetBits)
31  def packetIdx(pc: UInt) = pc(VAddrBits-1, log2Ceil(packetBytes))
32  def groupAligned(pc: UInt)  = align(pc, groupBytes)
33  def packetAligned(pc: UInt) = align(pc, packetBytes)
34  def mask(pc: UInt): UInt = ((~(0.U(PredictWidth.W))) << offsetInPacket(pc))(PredictWidth-1,0)
35  def snpc(pc: UInt): UInt = packetAligned(pc) + packetBytes.U
36
37  val enableGhistRepair = true
38  val IFUDebug = true
39}
40
41class GlobalHistory extends XSBundle {
42  val predHist = UInt(HistoryLength.W)
43  def update(sawNTBr: Bool, takenOnBr: Bool, hist: UInt = predHist): GlobalHistory = {
44    val g = Wire(new GlobalHistory)
45    val shifted = takenOnBr || sawNTBr
46    g.predHist := Mux(shifted, (hist << 1) | takenOnBr.asUInt, hist)
47    g
48  }
49
50  final def === (that: GlobalHistory): Bool = {
51    predHist === that.predHist
52  }
53
54  final def =/= (that: GlobalHistory): Bool = !(this === that)
55
56  implicit val name = "IFU"
57  def debug(where: String) = XSDebug(p"[${where}_GlobalHistory] hist=${Binary(predHist)}\n")
58  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
59}
60
61
62class IFUIO extends XSBundle
63{
64  // to ibuffer
65  val fetchPacket = DecoupledIO(new FetchPacket)
66  // from backend
67  val redirect = Flipped(ValidIO(new Redirect))
68  val commitUpdate = Flipped(ValidIO(new FtqEntry))
69  val ftqEnqPtr = Input(new FtqPtr)
70  val ftqLeftOne = Input(Bool())
71  // to backend
72  val toFtq = DecoupledIO(new FtqEntry)
73  // to icache
74  val icacheMemGrant = Flipped(DecoupledIO(new L1plusCacheResp))
75  val fencei = Input(Bool())
76  // from icache
77  val icacheMemAcq = DecoupledIO(new L1plusCacheReq)
78  val l1plusFlush = Output(Bool())
79  val prefetchTrainReq = ValidIO(new IcacheMissReq)
80  // to tlb
81  val sfence = Input(new SfenceBundle)
82  val tlbCsr = Input(new TlbCsrBundle)
83  // from tlb
84  val ptw = new TlbPtwIO
85  // icache uncache
86  val mmio_acquire = DecoupledIO(new InsUncacheReq)
87  val mmio_grant  = Flipped(DecoupledIO(new InsUncacheResp))
88  val mmio_flush = Output(Bool())
89}
90
91class PrevHalfInstr extends XSBundle {
92  val taken = Bool()
93  val ghInfo = new GlobalHistory()
94  val fetchpc = UInt(VAddrBits.W) // only for debug
95  val idx = UInt(VAddrBits.W) // only for debug
96  val pc = UInt(VAddrBits.W)
97  val npc = UInt(VAddrBits.W)
98  val target = UInt(VAddrBits.W)
99  val instr = UInt(16.W)
100  val ipf = Bool()
101  val meta = new BpuMeta
102}
103
104@chiselName
105class IFU extends XSModule with HasIFUConst with HasCircularQueuePtrHelper
106{
107  val io = IO(new IFUIO)
108  val bpu = BPU(EnableBPU)
109  val icache = Module(new ICache)
110
111  io.ptw <> TLB(
112    in = Seq(icache.io.tlb),
113    sfence = io.sfence,
114    csr = io.tlbCsr,
115    width = 1,
116    isDtlb = false,
117    shouldBlock = true
118  )
119
120  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
121  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
122
123  val icacheResp = icache.io.resp.bits
124
125  if4_flush := io.redirect.valid
126  if3_flush := if4_flush || if4_redirect
127  if2_flush := if3_flush || if3_redirect
128  if1_flush := if2_flush || if2_redirect
129
130  //********************** IF1 ****************************//
131  val if1_valid = !reset.asBool && GTimer() > 500.U
132  val if1_npc = WireInit(0.U(VAddrBits.W))
133  val if2_ready = WireInit(false.B)
134  val if2_valid = RegInit(init = false.B)
135  val if2_allReady = WireInit(if2_ready && icache.io.req.ready)
136  val if1_fire = (if1_valid &&  if2_allReady) && (icache.io.tlb.resp.valid || !if2_valid)
137  val if1_can_go = if1_fire || if2_flush
138
139  val if1_gh, if2_gh, if3_gh, if4_gh = Wire(new GlobalHistory)
140  val if2_predicted_gh, if3_predicted_gh, if4_predicted_gh = Wire(new GlobalHistory)
141  val final_gh = RegInit(0.U.asTypeOf(new GlobalHistory))
142  val final_gh_bypass = WireInit(0.U.asTypeOf(new GlobalHistory))
143  val flush_final_gh = WireInit(false.B)
144
145  //********************** IF2 ****************************//
146  val if2_allValid = if2_valid && icache.io.tlb.resp.valid
147  val if3_ready = WireInit(false.B)
148  val if2_fire = (if2_valid && if3_ready) && icache.io.tlb.resp.valid
149  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_can_go)
150  val if2_snpc = snpc(if2_pc)
151  val if2_predHist = RegEnable(if1_gh.predHist, enable=if1_can_go)
152  if2_ready := if3_ready || !if2_valid
153  when (if1_can_go)       { if2_valid := true.B }
154  .elsewhen (if2_flush) { if2_valid := false.B }
155  .elsewhen (if2_fire)  { if2_valid := false.B }
156
157  val npcGen = new PriorityMuxGenerator[UInt]
158  npcGen.register(true.B, RegNext(if1_npc), Some("stallPC"))
159  val if2_bp = bpu.io.out(0)
160
161  // if taken, bp_redirect should be true
162  // when taken on half RVI, we suppress this redirect signal
163
164  npcGen.register(if2_valid, Mux(if2_bp.taken, if2_bp.target, if2_snpc), Some("if2_target"))
165
166  if2_predicted_gh := if2_gh.update(if2_bp.hasNotTakenBrs, if2_bp.takenOnBr)
167
168  //********************** IF3 ****************************//
169  // if3 should wait for instructions resp to arrive
170  val if3_valid = RegInit(init = false.B)
171  val if4_ready = WireInit(false.B)
172  val if3_allValid = if3_valid && icache.io.resp.valid
173  val if3_fire = if3_allValid && if4_ready
174  val if3_pc = RegEnable(if2_pc, if2_fire)
175  val if3_snpc = RegEnable(if2_snpc, if2_fire)
176  val if3_predHist = RegEnable(if2_predHist, enable=if2_fire)
177  if3_ready := if4_ready && icache.io.resp.valid || !if3_valid
178  when (if3_flush) {
179    if3_valid := false.B
180  }.elsewhen (if2_fire && !if2_flush) {
181    if3_valid := true.B
182  }.elsewhen (if3_fire) {
183    if3_valid := false.B
184  }
185
186  val if3_bp = bpu.io.out(1)
187  if3_predicted_gh := if3_gh.update(if3_bp.hasNotTakenBrs, if3_bp.takenOnBr)
188
189
190  val prevHalfInstrReq = WireInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
191  // only valid when if4_fire
192  val hasPrevHalfInstrReq = prevHalfInstrReq.valid && HasCExtension.B
193
194  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
195
196  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
197  val crossPageIPF = WireInit(false.B)
198
199  val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid && HasCExtension.B
200
201  // the previous half of RVI instruction waits until it meets its last half
202  val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && if3_prevHalfInstr.bits.npc === if3_pc && if3_valid
203  // set to invalid once consumed or redirect from backend
204  val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire
205  val if3_prevHalfFlush = if4_flush
206  when (if3_prevHalfFlush) {
207    if3_prevHalfInstr.valid := false.B
208  }.elsewhen (hasPrevHalfInstrReq) {
209    if3_prevHalfInstr.valid := true.B
210  }.elsewhen (if3_prevHalfConsumed) {
211    if3_prevHalfInstr.valid := false.B
212  }
213  when (hasPrevHalfInstrReq) {
214    if3_prevHalfInstr.bits := prevHalfInstrReq.bits
215  }
216  // when bp signal a redirect, we distinguish between taken and not taken
217  // if taken and saveHalfRVI is true, we do not redirect to the target
218
219  class IF3_PC_COMP extends XSModule {
220    val io = IO(new Bundle {
221      val if2_pc = Input(UInt(VAddrBits.W))
222      val pc     = Input(UInt(VAddrBits.W))
223      val if2_valid = Input(Bool())
224      val res = Output(Bool())
225    })
226    io.res := !io.if2_valid || io.if2_valid && io.if2_pc =/= io.pc
227  }
228  def if3_nextValidPCNotEquals(pc: UInt) = {
229    val comp = Module(new IF3_PC_COMP)
230    comp.io.if2_pc := if2_pc
231    comp.io.pc     := pc
232    comp.io.if2_valid := if2_valid
233    comp.io.res
234  }
235
236  val if3_predTakenRedirectVec = VecInit((0 until PredictWidth).map(i => !if3_pendingPrevHalfInstr && if3_bp.realTakens(i) && if3_nextValidPCNotEquals(if3_bp.targets(i))))
237  val if3_prevHalfMetRedirect    = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.bits.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.target)
238  val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.npc)
239  val if3_predTakenRedirect    = ParallelOR(if3_predTakenRedirectVec)
240  val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(if3_snpc)
241  // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr
242  // val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo && enableGhistRepair.B
243
244  if3_redirect := if3_valid && (
245                    // prevHalf is consumed but the next packet is not where it meant to be
246                    // we do not handle this condition because of the burden of building a correct GHInfo
247                    // prevHalfMetRedirect ||
248                    // prevHalf does not match if3_pc and the next fetch packet is not snpc
249                    if3_prevHalfNotMetRedirect && HasCExtension.B ||
250                    // pred taken and next fetch packet is not the predicted target
251                    if3_predTakenRedirect ||
252                    // pred not taken and next fetch packet is not snpc
253                    if3_predNotTakenRedirect
254                    // GHInfo from last pred does not corresponds with this packet
255                    // if3_ghInfoNotIdenticalRedirect
256                  )
257
258  val if3_target = WireInit(if3_snpc)
259
260  if3_target := Mux1H(Seq((if3_prevHalfNotMetRedirect -> if3_prevHalfInstr.bits.npc),
261                          (if3_predTakenRedirect      -> if3_bp.target),
262                          (if3_predNotTakenRedirect   -> if3_snpc)))
263
264  npcGen.register(if3_redirect, if3_target, Some("if3_target"))
265
266
267  //********************** IF4 ****************************//
268  val ftqEnqBuf_ready = Wire(Bool())
269  val if4_ftqEnqPtr = Wire(new FtqPtr)
270  val if4_pd = RegEnable(icache.io.pd_out, if3_fire)
271  val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.bits.ipf, if3_fire)
272  val if4_acf = RegEnable(icacheResp.acf, if3_fire)
273  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
274  val if4_valid = RegInit(false.B)
275  val if4_fire = if4_valid && io.fetchPacket.ready && ftqEnqBuf_ready
276  val if4_pc = RegEnable(if3_pc, if3_fire)
277  val if4_snpc = RegEnable(if3_snpc, if3_fire)
278  // This is the real mask given from icache
279  val if4_mask = RegEnable(icacheResp.mask, if3_fire)
280
281
282  val if4_predHist = RegEnable(if3_predHist, enable=if3_fire)
283  // wait until prevHalfInstr written into reg
284  if4_ready := (io.fetchPacket.ready && !hasPrevHalfInstrReq || !if4_valid) && GTimer() > 500.U
285  when (if4_flush) {
286    if4_valid := false.B
287  }.elsewhen (if3_fire && !if3_flush) {
288    if4_valid := Mux(if3_pendingPrevHalfInstr, if3_prevHalfInstrMet, true.B)
289  }.elsewhen (if4_fire) {
290    if4_valid := false.B
291  }
292
293  val if4_bp = Wire(new BranchPrediction)
294  if4_bp := bpu.io.out(2)
295
296  if4_predicted_gh := if4_gh.update(if4_bp.hasNotTakenBrs, if4_bp.takenOnBr)
297
298  def jal_offset(inst: UInt, rvc: Bool): SInt = {
299    Mux(rvc,
300      Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)).asSInt(),
301      Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)).asSInt()
302    )
303  }
304  val if4_instrs = if4_pd.instrs
305  val if4_jals = if4_bp.jalMask
306  val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => (if4_pd.pc(i).asSInt + jal_offset(if4_instrs(i), if4_pd.pd(i).isRVC)).asUInt))
307
308  (0 until PredictWidth).foreach {i =>
309    when (if4_jals(i)) {
310      if4_bp.targets(i) := if4_jal_tgts(i)
311    }
312  }
313
314  // we need this to tell BPU the prediction of prev half
315  // because the prediction is with the start of each inst
316  val if4_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
317  val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid && HasCExtension.B
318  val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && if4_valid
319  val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire
320  val if4_prevHalfFlush = if4_flush
321
322  val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.bits.taken)
323  when (if4_prevHalfFlush) {
324    if4_prevHalfInstr.valid := false.B
325  }.elsewhen (if3_prevHalfConsumed) {
326    if4_prevHalfInstr.valid := if3_prevHalfInstr.valid
327  }.elsewhen (if4_prevHalfConsumed) {
328    if4_prevHalfInstr.valid := false.B
329  }
330
331  when (if3_prevHalfConsumed) {
332    if4_prevHalfInstr.bits := if3_prevHalfInstr.bits
333  }
334
335  prevHalfInstrReq.valid := if4_fire && if4_bp.saveHalfRVI && HasCExtension.B
336  val idx = if4_bp.lastHalfRVIIdx
337
338  // // this is result of the last half RVI
339  prevHalfInstrReq.bits.taken := if4_bp.lastHalfRVITaken
340  prevHalfInstrReq.bits.ghInfo := if4_gh
341  prevHalfInstrReq.bits.fetchpc := if4_pc
342  prevHalfInstrReq.bits.idx := idx
343  prevHalfInstrReq.bits.pc := if4_pd.pc(idx)
344  prevHalfInstrReq.bits.npc := if4_pd.pc(idx) + 2.U
345  prevHalfInstrReq.bits.target := if4_bp.lastHalfRVITarget
346  prevHalfInstrReq.bits.instr := if4_pd.instrs(idx)(15, 0)
347  prevHalfInstrReq.bits.ipf := if4_ipf
348  prevHalfInstrReq.bits.meta := bpu.io.brInfo.metas(idx)
349
350  class IF4_PC_COMP extends XSModule {
351    val io = IO(new Bundle {
352      val if2_pc = Input(UInt(VAddrBits.W))
353      val if3_pc = Input(UInt(VAddrBits.W))
354      val pc     = Input(UInt(VAddrBits.W))
355      val if2_valid = Input(Bool())
356      val if3_valid = Input(Bool())
357      val res = Output(Bool())
358    })
359    io.res := io.if3_valid  && io.if3_pc =/= io.pc ||
360              !io.if3_valid && (io.if2_valid && io.if2_pc =/= io.pc) ||
361              !io.if3_valid && !io.if2_valid
362  }
363  def if4_nextValidPCNotEquals(pc: UInt) = {
364    val comp = Module(new IF4_PC_COMP)
365    comp.io.if2_pc := if2_pc
366    comp.io.if3_pc := if3_pc
367    comp.io.pc     := pc
368    comp.io.if2_valid := if2_valid
369    comp.io.if3_valid := if3_valid
370    comp.io.res
371  }
372
373  val if4_predTakenRedirectVec = VecInit((0 until PredictWidth).map(i => if4_bp.realTakens(i) && if4_nextValidPCNotEquals(if4_bp.targets(i))))
374
375  val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.bits.pc+2.U)
376  val if4_predTakenRedirect = ParallelORR(if4_predTakenRedirectVec)
377  val if4_predNotTakenRedirect = !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc)
378  // val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo && enableGhistRepair.B
379
380  if4_redirect := if4_valid && (
381                    // when if4 has a lastHalfRVI, but the next fetch packet is not snpc
382                    // if4_prevHalfNextNotMet ||
383                    // when if4 preds taken, but the pc of next fetch packet is not the target
384                    if4_predTakenRedirect ||
385                    // when if4 preds not taken, but the pc of next fetch packet is not snpc
386                    if4_predNotTakenRedirect
387                    // GHInfo from last pred does not corresponds with this packet
388                    // if4_ghInfoNotIdenticalRedirect
389                  )
390
391  val if4_target = WireInit(if4_snpc)
392
393  if4_target := Mux(if4_bp.taken, if4_bp.target, if4_snpc)
394
395  npcGen.register(if4_redirect, if4_target, Some("if4_target"))
396
397  when (if4_fire) {
398    final_gh := if4_predicted_gh
399  }
400  if4_gh := Mux(flush_final_gh, final_gh_bypass, final_gh)
401  if3_gh := Mux(if4_valid && !if4_flush, if4_predicted_gh, if4_gh)
402  if2_gh := Mux(if3_valid && !if3_flush, if3_predicted_gh, if3_gh)
403  if1_gh := Mux(if2_valid && !if2_flush, if2_predicted_gh, if2_gh)
404
405  // ***************** Ftq enq buffer ********************
406  val toFtqBuf = Wire(new FtqEntry)
407  val ftqEnqBuf = RegEnable(toFtqBuf, enable=if4_fire)
408  val ftqEnqBuf_valid = RegInit(false.B)
409  val ftqLeftOne = WireInit(false.B) // TODO: to be replaced
410  ftqEnqBuf_ready := io.toFtq.ready && !(io.ftqLeftOne && ftqEnqBuf_valid)
411  if4_ftqEnqPtr := Mux(ftqEnqBuf_valid, io.ftqEnqPtr+1.U, io.ftqEnqPtr)
412  when (io.redirect.valid)  { ftqEnqBuf_valid := false.B }
413  .elsewhen (if4_fire)      { ftqEnqBuf_valid := true.B }
414  .elsewhen (io.toFtq.fire) { ftqEnqBuf_valid := false.B }
415
416  io.toFtq.valid := ftqEnqBuf_valid
417  io.toFtq.bits  := ftqEnqBuf
418
419  toFtqBuf := DontCare
420  toFtqBuf.ftqPC    := if4_pc
421  toFtqBuf.hist     := final_gh
422  toFtqBuf.predHist := if4_predHist.asTypeOf(new GlobalHistory)
423  toFtqBuf.rasSp    := bpu.io.brInfo.rasSp
424  toFtqBuf.rasTop   := bpu.io.brInfo.rasTop
425  toFtqBuf.specCnt  := bpu.io.brInfo.specCnt
426  toFtqBuf.metas    := bpu.io.brInfo.metas
427
428  // save it for update
429  when (if4_pendingPrevHalfInstr) {
430    toFtqBuf.metas(0) := if4_prevHalfInstr.bits.meta
431  }
432  val cfiIsCall = if4_pd.pd(if4_bp.jmpIdx).isCall
433  val cfiIsRet  = if4_pd.pd(if4_bp.jmpIdx).isRet
434  val cfiIsRVC  = if4_pd.pd(if4_bp.jmpIdx).isRVC
435  toFtqBuf.cfiIsCall := cfiIsCall
436  toFtqBuf.cfiIsRet  := cfiIsRet
437  toFtqBuf.cfiIsRVC  := cfiIsRVC
438  toFtqBuf.cfiIndex.valid := if4_bp.taken
439  toFtqBuf.cfiIndex.bits  := Mux(cfiIsRVC, if4_bp.jmpIdx, if4_bp.jmpIdx - 1.U)
440
441  toFtqBuf.br_mask   := if4_bp.brMask.asTypeOf(Vec(PredictWidth, Bool()))
442  toFtqBuf.rvc_mask  := VecInit(if4_pd.pd.map(_.isRVC))
443  toFtqBuf.valids    := if4_pd.mask.asTypeOf(Vec(PredictWidth, Bool()))
444
445
446
447  val r = io.redirect
448  val cfiUpdate = io.redirect.bits.cfiUpdate
449  when (r.valid) {
450    val isMisPred = r.bits.level === 0.U
451    val b = cfiUpdate
452    val oldGh = b.hist
453    val sawNTBr = b.sawNotTakenBranch
454    val isBr = b.pd.isBr
455    val taken = Mux(isMisPred, b.taken, b.predTaken)
456    val updatedGh = oldGh.update(sawNTBr, isBr && taken)
457    final_gh := updatedGh
458    final_gh_bypass := updatedGh
459    flush_final_gh := true.B
460  }
461
462  npcGen.register(io.redirect.valid, io.redirect.bits.cfiUpdate.target, Some("backend_redirect"))
463  npcGen.register(RegNext(reset.asBool) && !reset.asBool, resetVector.U(VAddrBits.W), Some("reset_vector"))
464
465  if1_npc := npcGen()
466
467
468  icache.io.req.valid := if1_can_go
469  icache.io.resp.ready := if4_ready
470  icache.io.req.bits.addr := if1_npc
471  icache.io.req.bits.mask := mask(if1_npc)
472  icache.io.flush := Cat(if3_flush, if2_flush)
473  icache.io.mem_grant <> io.icacheMemGrant
474  icache.io.fencei := io.fencei
475  icache.io.prev.valid := if3_prevHalfInstrMet
476  icache.io.prev.bits := if3_prevHalfInstr.bits.instr
477  icache.io.prev_ipf := if3_prevHalfInstr.bits.ipf
478  icache.io.prev_pc := if3_prevHalfInstr.bits.pc
479  icache.io.mmio_acquire <> io.mmio_acquire
480  icache.io.mmio_grant <> io.mmio_grant
481  icache.io.mmio_flush <> io.mmio_flush
482  io.icacheMemAcq <> icache.io.mem_acquire
483  io.l1plusFlush := icache.io.l1plusflush
484  io.prefetchTrainReq := icache.io.prefetchTrainReq
485
486  bpu.io.commit <> io.commitUpdate
487  bpu.io.redirect <> io.redirect
488
489  bpu.io.inFire(0) := if1_can_go
490  bpu.io.inFire(1) := if2_fire
491  bpu.io.inFire(2) := if3_fire
492  bpu.io.inFire(3) := if4_fire
493  bpu.io.in.pc := if1_npc
494  bpu.io.in.hist := if1_gh.asUInt
495  bpu.io.in.inMask := mask(if1_npc)
496  bpu.io.predecode.mask := if4_pd.mask
497  bpu.io.predecode.lastHalf := if4_pd.lastHalf
498  bpu.io.predecode.pd := if4_pd.pd
499  bpu.io.predecode.hasLastHalfRVI := if4_prevHalfInstrMet
500  bpu.io.realMask := if4_mask
501  bpu.io.prevHalf := if4_prevHalfInstr
502
503
504  when (if3_prevHalfInstrMet && icacheResp.ipf && !if3_prevHalfInstr.bits.ipf) {
505    crossPageIPF := true.B // higher 16 bits page fault
506  }
507
508  val fetchPacketValid = if4_valid && !io.redirect.valid && ftqEnqBuf_ready
509  val fetchPacketWire = Wire(new FetchPacket)
510
511
512  fetchPacketWire.instrs := if4_pd.instrs
513  fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
514  fetchPacketWire.pdmask := if4_pd.mask
515
516  fetchPacketWire.pc := if4_pd.pc
517  (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
518  when (if4_bp.taken) {
519    fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target
520  }
521
522  fetchPacketWire.pd := if4_pd.pd
523  fetchPacketWire.ipf := if4_ipf
524  fetchPacketWire.acf := if4_acf
525  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
526  fetchPacketWire.ftqPtr := if4_ftqEnqPtr
527
528  // predTaken Vec
529  fetchPacketWire.pred_taken := if4_bp.realTakens
530
531  io.fetchPacket.bits := fetchPacketWire
532  io.fetchPacket.valid := fetchPacketValid
533
534//  if(IFUDebug) {
535    val predictor_s3 = RegEnable(Mux(if3_redirect, 1.U(log2Up(4).W), 0.U(log2Up(4).W)), if3_fire)
536    val predictor_s4 = Mux(if4_redirect, 2.U, predictor_s3)
537    val predictor = predictor_s4
538  toFtqBuf.metas.map(_.predictor := predictor)
539 // }
540
541  // val predRight = cfiUpdate.valid && !cfiUpdate.bits.isMisPred && !cfiUpdate.bits.isReplay
542  // val predWrong = cfiUpdate.valid && cfiUpdate.bits.isMisPred && !cfiUpdate.bits.isReplay
543
544  // val ubtbRight = predRight && cfiUpdate.bits.bpuMeta.predictor === 0.U
545  // val ubtbWrong = predWrong && cfiUpdate.bits.bpuMeta.predictor === 0.U
546  // val btbRight  = predRight && cfiUpdate.bits.bpuMeta.predictor === 1.U
547  // val btbWrong  = predWrong && cfiUpdate.bits.bpuMeta.predictor === 1.U
548  // val tageRight = predRight && cfiUpdate.bits.bpuMeta.predictor === 2.U
549  // val tageWrong = predWrong && cfiUpdate.bits.bpuMeta.predictor === 2.U
550  // val loopRight = predRight && cfiUpdate.bits.bpuMeta.predictor === 3.U
551  // val loopWrong = predWrong && cfiUpdate.bits.bpuMeta.predictor === 3.U
552
553  // ExcitingUtils.addSource(ubtbRight, "perfCntubtbRight", Perf)
554  // ExcitingUtils.addSource(ubtbWrong, "perfCntubtbWrong", Perf)
555  // ExcitingUtils.addSource(btbRight, "perfCntbtbRight", Perf)
556  // ExcitingUtils.addSource(btbWrong, "perfCntbtbWrong", Perf)
557  // ExcitingUtils.addSource(tageRight, "perfCnttageRight", Perf)
558  // ExcitingUtils.addSource(tageWrong, "perfCnttageWrong", Perf)
559  // ExcitingUtils.addSource(loopRight, "perfCntloopRight", Perf)
560  // ExcitingUtils.addSource(loopWrong, "perfCntloopWrong", Perf)
561
562  // debug info
563  if (IFUDebug) {
564    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
565    XSDebug(icache.io.flush(0).asBool, "Flush icache stage2...\n")
566    XSDebug(icache.io.flush(1).asBool, "Flush icache stage3...\n")
567    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.cfiUpdate.target)}\n")
568
569    XSDebug("[IF1] v=%d     fire=%d  cango=%d          flush=%d pc=%x mask=%b\n", if1_valid, if1_fire,if1_can_go, if1_flush, if1_npc, mask(if1_npc))
570    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_snpc)
571    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, crossPageIPF, if3_bp.hasNotTakenBrs)
572    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_crossPageIPF, if4_bp.hasNotTakenBrs)
573    XSDebug("[predictor] predictor_s3=%d, predictor_s4=%d, predictor=%d\n", predictor_s3, predictor_s4, predictor)
574    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", icache.io.req.valid, icache.io.req.ready, icache.io.req.bits.addr)
575    XSDebug("[IF1][ghr] hist=%b\n", if1_gh.asUInt)
576    XSDebug("[IF1][ghr] extHist=%b\n\n", if1_gh.asUInt)
577
578    XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
579    if2_gh.debug("if2")
580
581    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", icache.io.resp.valid, icache.io.resp.ready, icache.io.resp.bits.pc, icache.io.resp.bits.mask)
582    XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
583    XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect)
584    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
585    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
586    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n\n",
587    if3_prevHalfInstr.valid, if3_prevHalfInstr.bits.taken, if3_prevHalfInstr.bits.fetchpc, if3_prevHalfInstr.bits.idx, if3_prevHalfInstr.bits.pc, if3_prevHalfInstr.bits.npc, if3_prevHalfInstr.bits.target, if3_prevHalfInstr.bits.instr, if3_prevHalfInstr.bits.ipf)
588    if3_gh.debug("if3")
589
590    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
591    XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask)
592    XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
593    XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect)
594    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx))
595    XSDebug("[IF4][ prevHalfInstrReq] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n",
596      prevHalfInstrReq.valid, prevHalfInstrReq.bits.taken, prevHalfInstrReq.bits.fetchpc, prevHalfInstrReq.bits.idx, prevHalfInstrReq.bits.pc, prevHalfInstrReq.bits.npc, prevHalfInstrReq.bits.target, prevHalfInstrReq.bits.instr, prevHalfInstrReq.bits.ipf)
597    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n",
598      if4_prevHalfInstr.valid, if4_prevHalfInstr.bits.taken, if4_prevHalfInstr.bits.fetchpc, if4_prevHalfInstr.bits.idx, if4_prevHalfInstr.bits.pc, if4_prevHalfInstr.bits.npc, if4_prevHalfInstr.bits.target, if4_prevHalfInstr.bits.instr, if4_prevHalfInstr.bits.ipf)
599    if4_gh.debug("if4")
600    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d acf=%d crossPageIPF=%d\n",
601      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.acf, io.fetchPacket.bits.crossPageIPFFix)
602    for (i <- 0 until PredictWidth) {
603      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
604        io.fetchPacket.bits.mask(i),
605        io.fetchPacket.bits.instrs(i),
606        io.fetchPacket.bits.pc(i),
607        io.fetchPacket.bits.pnpc(i),
608        io.fetchPacket.bits.pd(i).isRVC,
609        io.fetchPacket.bits.pd(i).brType,
610        io.fetchPacket.bits.pd(i).isCall,
611        io.fetchPacket.bits.pd(i).isRet
612      )
613    }
614    val b = ftqEnqBuf
615    XSDebug("[FtqEnqBuf] v=%d r=%d pc=%x cfiIndex(%d)=%d cfiIsCall=%d cfiIsRet=%d cfiIsRVC=%d\n",
616      ftqEnqBuf_valid, ftqEnqBuf_ready, b.ftqPC, b.cfiIndex.valid, b.cfiIndex.bits, b.cfiIsCall, b.cfiIsRet, b.cfiIsRVC)
617    XSDebug("[FtqEnqBuf] valids=%b br_mask=%b rvc_mask=%b hist=%x predHist=%x rasSp=%d rasTopAddr=%x rasTopCtr=%d\n",
618      b.valids.asUInt, b.br_mask.asUInt, b.rvc_mask.asUInt, b.hist.asUInt, b.predHist.asUInt, b.rasSp, b.rasTop.retAddr, b.rasTop.ctr)
619    XSDebug("[ToFTQ] v=%d r=%d leftOne=%d ptr=%d\n", io.toFtq.valid, io.toFtq.ready, io.ftqLeftOne, io.ftqEnqPtr.value)
620  }
621
622}
623