xref: /XiangShan/src/main/scala/xiangshan/frontend/NewFtq.scala (revision 57bb43b5f11c3f1e89ac52f232fe73056b35d9bd)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import xiangshan._
24import xiangshan.frontend.icache._
25import xiangshan.backend.CtrlToFtqIO
26import xiangshan.backend.decode.ImmUnion
27
28class FtqPtr(implicit p: Parameters) extends CircularQueuePtr[FtqPtr](
29  p => p(XSCoreParamsKey).FtqSize
30){
31  override def cloneType = (new FtqPtr).asInstanceOf[this.type]
32}
33
34object FtqPtr {
35  def apply(f: Bool, v: UInt)(implicit p: Parameters): FtqPtr = {
36    val ptr = Wire(new FtqPtr)
37    ptr.flag := f
38    ptr.value := v
39    ptr
40  }
41  def inverse(ptr: FtqPtr)(implicit p: Parameters): FtqPtr = {
42    apply(!ptr.flag, ptr.value)
43  }
44}
45
46class FtqNRSRAM[T <: Data](gen: T, numRead: Int)(implicit p: Parameters) extends XSModule {
47
48  val io = IO(new Bundle() {
49    val raddr = Input(Vec(numRead, UInt(log2Up(FtqSize).W)))
50    val ren = Input(Vec(numRead, Bool()))
51    val rdata = Output(Vec(numRead, gen))
52    val waddr = Input(UInt(log2Up(FtqSize).W))
53    val wen = Input(Bool())
54    val wdata = Input(gen)
55  })
56
57  for(i <- 0 until numRead){
58    val sram = Module(new SRAMTemplate(gen, FtqSize))
59    sram.io.r.req.valid := io.ren(i)
60    sram.io.r.req.bits.setIdx := io.raddr(i)
61    io.rdata(i) := sram.io.r.resp.data(0)
62    sram.io.w.req.valid := io.wen
63    sram.io.w.req.bits.setIdx := io.waddr
64    sram.io.w.req.bits.data := VecInit(io.wdata)
65  }
66
67}
68
69class Ftq_RF_Components(implicit p: Parameters) extends XSBundle with BPUUtils {
70  val startAddr = UInt(VAddrBits.W)
71  val nextLineAddr = UInt(VAddrBits.W)
72  val isNextMask = Vec(PredictWidth, Bool())
73  val fallThruError = Bool()
74  // val carry = Bool()
75  def getPc(offset: UInt) = {
76    def getHigher(pc: UInt) = pc(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1)
77    def getOffset(pc: UInt) = pc(log2Ceil(PredictWidth)+instOffsetBits, instOffsetBits)
78    Cat(getHigher(Mux(isNextMask(offset) && startAddr(log2Ceil(PredictWidth)+instOffsetBits), nextLineAddr, startAddr)),
79        getOffset(startAddr)+offset, 0.U(instOffsetBits.W))
80  }
81  def fromBranchPrediction(resp: BranchPredictionBundle) = {
82    def carryPos(addr: UInt) = addr(instOffsetBits+log2Ceil(PredictWidth)+1)
83    this.startAddr := resp.pc
84    this.nextLineAddr := resp.pc + (FetchWidth * 4 * 2).U // may be broken on other configs
85    this.isNextMask := VecInit((0 until PredictWidth).map(i =>
86      (resp.pc(log2Ceil(PredictWidth), 1) +& i.U)(log2Ceil(PredictWidth)).asBool()
87    ))
88    this.fallThruError := resp.fallThruError
89    this
90  }
91  override def toPrintable: Printable = {
92    p"startAddr:${Hexadecimal(startAddr)}"
93  }
94}
95
96class Ftq_pd_Entry(implicit p: Parameters) extends XSBundle {
97  val brMask = Vec(PredictWidth, Bool())
98  val jmpInfo = ValidUndirectioned(Vec(3, Bool()))
99  val jmpOffset = UInt(log2Ceil(PredictWidth).W)
100  val jalTarget = UInt(VAddrBits.W)
101  val rvcMask = Vec(PredictWidth, Bool())
102  def hasJal  = jmpInfo.valid && !jmpInfo.bits(0)
103  def hasJalr = jmpInfo.valid && jmpInfo.bits(0)
104  def hasCall = jmpInfo.valid && jmpInfo.bits(1)
105  def hasRet  = jmpInfo.valid && jmpInfo.bits(2)
106
107  def fromPdWb(pdWb: PredecodeWritebackBundle) = {
108    val pds = pdWb.pd
109    this.brMask := VecInit(pds.map(pd => pd.isBr && pd.valid))
110    this.jmpInfo.valid := VecInit(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid)).asUInt.orR
111    this.jmpInfo.bits := ParallelPriorityMux(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid),
112                                             pds.map(pd => VecInit(pd.isJalr, pd.isCall, pd.isRet)))
113    this.jmpOffset := ParallelPriorityEncoder(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid))
114    this.rvcMask := VecInit(pds.map(pd => pd.isRVC))
115    this.jalTarget := pdWb.jalTarget
116  }
117
118  def toPd(offset: UInt) = {
119    require(offset.getWidth == log2Ceil(PredictWidth))
120    val pd = Wire(new PreDecodeInfo)
121    pd.valid := true.B
122    pd.isRVC := rvcMask(offset)
123    val isBr = brMask(offset)
124    val isJalr = offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(0)
125    pd.brType := Cat(offset === jmpOffset && jmpInfo.valid, isJalr || isBr)
126    pd.isCall := offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(1)
127    pd.isRet  := offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(2)
128    pd
129  }
130}
131
132
133
134class Ftq_Redirect_SRAMEntry(implicit p: Parameters) extends XSBundle with HasBPUConst {
135  val rasSp = UInt(log2Ceil(RasSize).W)
136  val rasEntry = new RASEntry
137  // val specCnt = Vec(numBr, UInt(10.W))
138  // val ghist = new ShiftingGlobalHistory
139  val folded_hist = new AllFoldedHistories(foldedGHistInfos)
140  val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos)
141  val lastBrNumOH = UInt((numBr+1).W)
142
143  val histPtr = new CGHPtr
144
145  def fromBranchPrediction(resp: BranchPredictionBundle) = {
146    assert(!resp.is_minimal)
147    this.rasSp := resp.rasSp
148    this.rasEntry := resp.rasTop
149    this.folded_hist := resp.folded_hist
150    this.afhob := resp.afhob
151    this.lastBrNumOH := resp.lastBrNumOH
152    this.histPtr := resp.histPtr
153    this
154  }
155}
156
157class Ftq_1R_SRAMEntry(implicit p: Parameters) extends XSBundle with HasBPUConst {
158  val meta = UInt(MaxMetaLength.W)
159}
160
161class Ftq_Pred_Info(implicit p: Parameters) extends XSBundle {
162  val target = UInt(VAddrBits.W)
163  val cfiIndex = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
164}
165
166// class FtqEntry(implicit p: Parameters) extends XSBundle with HasBPUConst {
167//   val startAddr = UInt(VAddrBits.W)
168//   val fallThruAddr = UInt(VAddrBits.W)
169//   val isNextMask = Vec(PredictWidth, Bool())
170
171//   val meta = UInt(MaxMetaLength.W)
172
173//   val rasSp = UInt(log2Ceil(RasSize).W)
174//   val rasEntry = new RASEntry
175//   val hist = new ShiftingGlobalHistory
176//   val specCnt = Vec(numBr, UInt(10.W))
177
178//   val valids = Vec(PredictWidth, Bool())
179//   val brMask = Vec(PredictWidth, Bool())
180//   // isJalr, isCall, isRet
181//   val jmpInfo = ValidUndirectioned(Vec(3, Bool()))
182//   val jmpOffset = UInt(log2Ceil(PredictWidth).W)
183
184//   val mispredVec = Vec(PredictWidth, Bool())
185//   val cfiIndex = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
186//   val target = UInt(VAddrBits.W)
187// }
188
189class FtqRead[T <: Data](private val gen: T)(implicit p: Parameters) extends XSBundle {
190  val ptr = Output(new FtqPtr)
191  val offset = Output(UInt(log2Ceil(PredictWidth).W))
192  val data = Input(gen)
193  def apply(ptr: FtqPtr, offset: UInt) = {
194    this.ptr := ptr
195    this.offset := offset
196    this.data
197  }
198  override def cloneType = (new FtqRead(gen)).asInstanceOf[this.type]
199}
200
201
202class FtqToBpuIO(implicit p: Parameters) extends XSBundle {
203  val redirect = Valid(new BranchPredictionRedirect)
204  val update = Valid(new BranchPredictionUpdate)
205  val enq_ptr = Output(new FtqPtr)
206}
207
208class FtqToIfuIO(implicit p: Parameters) extends XSBundle with HasCircularQueuePtrHelper {
209  val req = Decoupled(new FetchRequestBundle)
210  val redirect = Valid(new Redirect)
211  val flushFromBpu = new Bundle {
212    // when ifu pipeline is not stalled,
213    // a packet from bpu s3 can reach f1 at most
214    val s2 = Valid(new FtqPtr)
215    val s3 = Valid(new FtqPtr)
216    def shouldFlushBy(src: Valid[FtqPtr], idx_to_flush: FtqPtr) = {
217      src.valid && !isAfter(src.bits, idx_to_flush)
218    }
219    def shouldFlushByStage2(idx: FtqPtr) = shouldFlushBy(s2, idx)
220    def shouldFlushByStage3(idx: FtqPtr) = shouldFlushBy(s3, idx)
221  }
222}
223
224trait HasBackendRedirectInfo extends HasXSParameter {
225  def numRedirectPcRead = exuParameters.JmpCnt + exuParameters.AluCnt + 1
226  def isLoadReplay(r: Valid[Redirect]) = r.bits.flushItself()
227}
228
229class FtqToCtrlIO(implicit p: Parameters) extends XSBundle with HasBackendRedirectInfo {
230  val pc_reads = Vec(1 + numRedirectPcRead + 1 + 1, Flipped(new FtqRead(UInt(VAddrBits.W))))
231  val target_read = Flipped(new FtqRead(UInt(VAddrBits.W)))
232  val redirect_s1_real_pc = Output(UInt(VAddrBits.W))
233  def getJumpPcRead = pc_reads.head
234  def getRedirectPcRead = VecInit(pc_reads.tail.dropRight(2))
235  def getRedirectPcReadData = pc_reads.tail.dropRight(2).map(_.data)
236  def getMemPredPcRead = pc_reads.init.last
237  def getRobFlushPcRead = pc_reads.last
238}
239
240
241class FTBEntryGen(implicit p: Parameters) extends XSModule with HasBackendRedirectInfo with HasBPUParameter {
242  val io = IO(new Bundle {
243    val start_addr = Input(UInt(VAddrBits.W))
244    val old_entry = Input(new FTBEntry)
245    val pd = Input(new Ftq_pd_Entry)
246    val cfiIndex = Flipped(Valid(UInt(log2Ceil(PredictWidth).W)))
247    val target = Input(UInt(VAddrBits.W))
248    val hit = Input(Bool())
249    val mispredict_vec = Input(Vec(PredictWidth, Bool()))
250
251    val new_entry = Output(new FTBEntry)
252    val new_br_insert_pos = Output(Vec(numBr, Bool()))
253    val taken_mask = Output(Vec(numBr, Bool()))
254    val mispred_mask = Output(Vec(numBr+1, Bool()))
255
256    // for perf counters
257    val is_init_entry = Output(Bool())
258    val is_old_entry = Output(Bool())
259    val is_new_br = Output(Bool())
260    val is_jalr_target_modified = Output(Bool())
261    val is_always_taken_modified = Output(Bool())
262    val is_br_full = Output(Bool())
263  })
264
265  // no mispredictions detected at predecode
266  val hit = io.hit
267  val pd = io.pd
268
269  val init_entry = WireInit(0.U.asTypeOf(new FTBEntry))
270
271
272  val cfi_is_br = pd.brMask(io.cfiIndex.bits) && io.cfiIndex.valid
273  val entry_has_jmp = pd.jmpInfo.valid
274  val new_jmp_is_jal  = entry_has_jmp && !pd.jmpInfo.bits(0) && io.cfiIndex.valid
275  val new_jmp_is_jalr = entry_has_jmp &&  pd.jmpInfo.bits(0) && io.cfiIndex.valid
276  val new_jmp_is_call = entry_has_jmp &&  pd.jmpInfo.bits(1) && io.cfiIndex.valid
277  val new_jmp_is_ret  = entry_has_jmp &&  pd.jmpInfo.bits(2) && io.cfiIndex.valid
278  val last_jmp_rvi = entry_has_jmp && pd.jmpOffset === (PredictWidth-1).U && !pd.rvcMask.last
279  // val last_br_rvi = cfi_is_br && io.cfiIndex.bits === (PredictWidth-1).U && !pd.rvcMask.last
280
281  val cfi_is_jal = io.cfiIndex.bits === pd.jmpOffset && new_jmp_is_jal
282  val cfi_is_jalr = io.cfiIndex.bits === pd.jmpOffset && new_jmp_is_jalr
283
284  def carryPos = log2Ceil(PredictWidth)+instOffsetBits
285  def getLower(pc: UInt) = pc(carryPos-1, instOffsetBits)
286  // if not hit, establish a new entry
287  init_entry.valid := true.B
288  // tag is left for ftb to assign
289
290  // case br
291  val init_br_slot = init_entry.getSlotForBr(0)
292  when (cfi_is_br) {
293    init_br_slot.valid := true.B
294    init_br_slot.offset := io.cfiIndex.bits
295    init_br_slot.setLowerStatByTarget(io.start_addr, io.target, numBr == 1)
296    init_entry.always_taken(0) := true.B // set to always taken on init
297  }
298
299  // case jmp
300  when (entry_has_jmp) {
301    init_entry.tailSlot.offset := pd.jmpOffset
302    init_entry.tailSlot.valid := new_jmp_is_jal || new_jmp_is_jalr
303    init_entry.tailSlot.setLowerStatByTarget(io.start_addr, Mux(cfi_is_jalr, io.target, pd.jalTarget), isShare=false)
304  }
305
306  val jmpPft = getLower(io.start_addr) +& pd.jmpOffset +& Mux(pd.rvcMask(pd.jmpOffset), 1.U, 2.U)
307  init_entry.pftAddr := Mux(entry_has_jmp && !last_jmp_rvi, jmpPft, getLower(io.start_addr))
308  init_entry.carry   := Mux(entry_has_jmp && !last_jmp_rvi, jmpPft(carryPos-instOffsetBits), true.B)
309  init_entry.isJalr := new_jmp_is_jalr
310  init_entry.isCall := new_jmp_is_call
311  init_entry.isRet  := new_jmp_is_ret
312  // that means fall thru points to the middle of an inst
313  init_entry.last_may_be_rvi_call := io.cfiIndex.bits === (PredictWidth-1).U && !pd.rvcMask(pd.jmpOffset)
314
315  // if hit, check whether a new cfi(only br is possible) is detected
316  val oe = io.old_entry
317  val br_recorded_vec = oe.getBrRecordedVec(io.cfiIndex.bits)
318  val br_recorded = br_recorded_vec.asUInt.orR
319  val is_new_br = cfi_is_br && !br_recorded
320  val new_br_offset = io.cfiIndex.bits
321  // vec(i) means new br will be inserted BEFORE old br(i)
322  val allBrSlotsVec = oe.allSlotsForBr
323  val new_br_insert_onehot = VecInit((0 until numBr).map{
324    i => i match {
325      case 0 =>
326        !allBrSlotsVec(0).valid || new_br_offset < allBrSlotsVec(0).offset
327      case idx =>
328        allBrSlotsVec(idx-1).valid && new_br_offset > allBrSlotsVec(idx-1).offset &&
329        (!allBrSlotsVec(idx).valid || new_br_offset < allBrSlotsVec(idx).offset)
330    }
331  })
332
333  val old_entry_modified = WireInit(io.old_entry)
334  for (i <- 0 until numBr) {
335    val slot = old_entry_modified.allSlotsForBr(i)
336    when (new_br_insert_onehot(i)) {
337      slot.valid := true.B
338      slot.offset := new_br_offset
339      slot.setLowerStatByTarget(io.start_addr, io.target, i == numBr-1)
340      old_entry_modified.always_taken(i) := true.B
341    }.elsewhen (new_br_offset > oe.allSlotsForBr(i).offset) {
342      old_entry_modified.always_taken(i) := false.B
343      // all other fields remain unchanged
344    }.otherwise {
345      // case i == 0, remain unchanged
346      if (i != 0) {
347        val noNeedToMoveFromFormerSlot = (i == numBr-1).B && !oe.brSlots.last.valid
348        when (!noNeedToMoveFromFormerSlot) {
349          slot.fromAnotherSlot(oe.allSlotsForBr(i-1))
350          old_entry_modified.always_taken(i) := oe.always_taken(i)
351        }
352      }
353    }
354  }
355
356  // two circumstances:
357  // 1. oe: | br | j  |, new br should be in front of j, thus addr of j should be new pft
358  // 2. oe: | br | br |, new br could be anywhere between, thus new pft is the addr of either
359  //        the previous last br or the new br
360  val may_have_to_replace = oe.noEmptySlotForNewBr
361  val pft_need_to_change = is_new_br && may_have_to_replace
362  // it should either be the given last br or the new br
363  when (pft_need_to_change) {
364    val new_pft_offset =
365      Mux(!new_br_insert_onehot.asUInt.orR,
366        new_br_offset, oe.allSlotsForBr.last.offset)
367
368    // set jmp to invalid
369    old_entry_modified.pftAddr := getLower(io.start_addr) + new_pft_offset
370    old_entry_modified.carry := (getLower(io.start_addr) +& new_pft_offset).head(1).asBool
371    old_entry_modified.last_may_be_rvi_call := false.B
372    old_entry_modified.isCall := false.B
373    old_entry_modified.isRet := false.B
374    old_entry_modified.isJalr := false.B
375  }
376
377  val old_entry_jmp_target_modified = WireInit(oe)
378  val old_target = oe.tailSlot.getTarget(io.start_addr) // may be wrong because we store only 20 lowest bits
379  val old_tail_is_jmp = !oe.tailSlot.sharing
380  val jalr_target_modified = cfi_is_jalr && (old_target =/= io.target) && old_tail_is_jmp // TODO: pass full jalr target
381  when (jalr_target_modified) {
382    old_entry_jmp_target_modified.setByJmpTarget(io.start_addr, io.target)
383    old_entry_jmp_target_modified.always_taken := 0.U.asTypeOf(Vec(numBr, Bool()))
384  }
385
386  val old_entry_always_taken = WireInit(oe)
387  val always_taken_modified_vec = Wire(Vec(numBr, Bool())) // whether modified or not
388  for (i <- 0 until numBr) {
389    old_entry_always_taken.always_taken(i) :=
390      oe.always_taken(i) && io.cfiIndex.valid && oe.brValids(i) && io.cfiIndex.bits === oe.brOffset(i)
391    always_taken_modified_vec(i) := oe.always_taken(i) && !old_entry_always_taken.always_taken(i)
392  }
393  val always_taken_modified = always_taken_modified_vec.reduce(_||_)
394
395
396
397  val derived_from_old_entry =
398    Mux(is_new_br, old_entry_modified,
399      Mux(jalr_target_modified, old_entry_jmp_target_modified, old_entry_always_taken))
400
401
402  io.new_entry := Mux(!hit, init_entry, derived_from_old_entry)
403
404  io.new_br_insert_pos := new_br_insert_onehot
405  io.taken_mask := VecInit((io.new_entry.brOffset zip io.new_entry.brValids).map{
406    case (off, v) => io.cfiIndex.bits === off && io.cfiIndex.valid && v
407  })
408  for (i <- 0 until numBr) {
409    io.mispred_mask(i) := io.new_entry.brValids(i) && io.mispredict_vec(io.new_entry.brOffset(i))
410  }
411  io.mispred_mask.last := io.new_entry.jmpValid && io.mispredict_vec(pd.jmpOffset)
412
413  // for perf counters
414  io.is_init_entry := !hit
415  io.is_old_entry := hit && !is_new_br && !jalr_target_modified && !always_taken_modified
416  io.is_new_br := hit && is_new_br
417  io.is_jalr_target_modified := hit && jalr_target_modified
418  io.is_always_taken_modified := hit && always_taken_modified
419  io.is_br_full := hit && is_new_br && may_have_to_replace
420}
421
422class Ftq(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper
423  with HasBackendRedirectInfo with BPUUtils with HasBPUConst with HasPerfEvents
424  with HasICacheParameters{
425  val io = IO(new Bundle {
426    val fromBpu = Flipped(new BpuToFtqIO)
427    val fromIfu = Flipped(new IfuToFtqIO)
428    val fromBackend = Flipped(new CtrlToFtqIO)
429
430    val toBpu = new FtqToBpuIO
431    val toIfu = new FtqToIfuIO
432    val toBackend = new FtqToCtrlIO
433
434    val toPrefetch = new FtqPrefechBundle
435
436    val bpuInfo = new Bundle {
437      val bpRight = Output(UInt(XLEN.W))
438      val bpWrong = Output(UInt(XLEN.W))
439    }
440  })
441  io.bpuInfo := DontCare
442
443  val backendRedirect = Wire(Valid(new Redirect))
444  val backendRedirectReg = RegNext(backendRedirect)
445
446  val stage2Flush = backendRedirect.valid
447  val backendFlush = stage2Flush || RegNext(stage2Flush)
448  val ifuFlush = Wire(Bool())
449
450  val flush = stage2Flush || RegNext(stage2Flush)
451
452  val allowBpuIn, allowToIfu = WireInit(false.B)
453  val flushToIfu = !allowToIfu
454  allowBpuIn := !ifuFlush && !backendRedirect.valid && !backendRedirectReg.valid
455  allowToIfu := !ifuFlush && !backendRedirect.valid && !backendRedirectReg.valid
456
457  val bpuPtr, ifuPtr, ifuWbPtr, commPtr = RegInit(FtqPtr(false.B, 0.U))
458  val validEntries = distanceBetween(bpuPtr, commPtr)
459
460  // **********************************************************************
461  // **************************** enq from bpu ****************************
462  // **********************************************************************
463  val new_entry_ready = validEntries < FtqSize.U
464  io.fromBpu.resp.ready := new_entry_ready
465
466  val bpu_s2_resp = io.fromBpu.resp.bits.s2
467  val bpu_s3_resp = io.fromBpu.resp.bits.s3
468  val bpu_s2_redirect = bpu_s2_resp.valid && bpu_s2_resp.hasRedirect
469  val bpu_s3_redirect = bpu_s3_resp.valid && bpu_s3_resp.hasRedirect
470
471  io.toBpu.enq_ptr := bpuPtr
472  val enq_fire = io.fromBpu.resp.fire() && allowBpuIn // from bpu s1
473  val bpu_in_fire = (io.fromBpu.resp.fire() || bpu_s2_redirect || bpu_s3_redirect) && allowBpuIn
474
475  val bpu_in_resp = io.fromBpu.resp.bits.selectedResp
476  val bpu_in_stage = io.fromBpu.resp.bits.selectedRespIdx
477  val bpu_in_resp_ptr = Mux(bpu_in_stage === BP_S1, bpuPtr, bpu_in_resp.ftq_idx)
478  val bpu_in_resp_idx = bpu_in_resp_ptr.value
479
480  // read ports:                            jumpPc + redirects + loadPred + robFlush + ifuReq1 + ifuReq2 + commitUpdate
481  val ftq_pc_mem = Module(new SyncDataModuleTemplate(new Ftq_RF_Components, FtqSize, 1+numRedirectPcRead+2+1+1+1, 1))
482  // resp from uBTB
483  ftq_pc_mem.io.wen(0) := bpu_in_fire
484  ftq_pc_mem.io.waddr(0) := bpu_in_resp_idx
485  ftq_pc_mem.io.wdata(0).fromBranchPrediction(bpu_in_resp)
486
487  //                                                            ifuRedirect + backendRedirect + commit
488  val ftq_redirect_sram = Module(new FtqNRSRAM(new Ftq_Redirect_SRAMEntry, 1+1+1))
489  // these info is intended to enq at the last stage of bpu
490  ftq_redirect_sram.io.wen := io.fromBpu.resp.bits.lastStage.valid
491  ftq_redirect_sram.io.waddr := io.fromBpu.resp.bits.lastStage.ftq_idx.value
492  ftq_redirect_sram.io.wdata.fromBranchPrediction(io.fromBpu.resp.bits.lastStage)
493  println(f"ftq redirect SRAM: entry ${ftq_redirect_sram.io.wdata.getWidth} * ${FtqSize} * 3")
494  println(f"ftq redirect SRAM: ahead fh ${ftq_redirect_sram.io.wdata.afhob.getWidth} * ${FtqSize} * 3")
495
496  val ftq_meta_1r_sram = Module(new FtqNRSRAM(new Ftq_1R_SRAMEntry, 1))
497  // these info is intended to enq at the last stage of bpu
498  ftq_meta_1r_sram.io.wen := io.fromBpu.resp.bits.lastStage.valid
499  ftq_meta_1r_sram.io.waddr := io.fromBpu.resp.bits.lastStage.ftq_idx.value
500  ftq_meta_1r_sram.io.wdata.meta := io.fromBpu.resp.bits.meta
501  //                                                            ifuRedirect + backendRedirect + commit
502  val ftb_entry_mem = Module(new SyncDataModuleTemplate(new FTBEntry, FtqSize, 1+1+1, 1))
503  ftb_entry_mem.io.wen(0) := io.fromBpu.resp.bits.lastStage.valid
504  ftb_entry_mem.io.waddr(0) := io.fromBpu.resp.bits.lastStage.ftq_idx.value
505  ftb_entry_mem.io.wdata(0) := io.fromBpu.resp.bits.lastStage.ftb_entry
506
507
508  // multi-write
509  val update_target = Reg(Vec(FtqSize, UInt(VAddrBits.W))) // could be taken target or fallThrough
510  val cfiIndex_vec = Reg(Vec(FtqSize, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))))
511  val mispredict_vec = Reg(Vec(FtqSize, Vec(PredictWidth, Bool())))
512  val pred_stage = Reg(Vec(FtqSize, UInt(2.W)))
513
514  val c_invalid :: c_valid :: c_commited :: Nil = Enum(3)
515  val commitStateQueue = RegInit(VecInit(Seq.fill(FtqSize) {
516    VecInit(Seq.fill(PredictWidth)(c_invalid))
517  }))
518
519  val f_to_send :: f_sent :: Nil = Enum(2)
520  val entry_fetch_status = RegInit(VecInit(Seq.fill(FtqSize)(f_sent)))
521
522  val h_not_hit :: h_false_hit :: h_hit :: Nil = Enum(3)
523  val entry_hit_status = RegInit(VecInit(Seq.fill(FtqSize)(h_not_hit)))
524
525
526  when (bpu_in_fire) {
527    entry_fetch_status(bpu_in_resp_idx) := f_to_send
528    commitStateQueue(bpu_in_resp_idx) := VecInit(Seq.fill(PredictWidth)(c_invalid))
529    cfiIndex_vec(bpu_in_resp_idx) := bpu_in_resp.cfiIndex
530    mispredict_vec(bpu_in_resp_idx) := WireInit(VecInit(Seq.fill(PredictWidth)(false.B)))
531    update_target(bpu_in_resp_idx) := bpu_in_resp.getTarget
532    pred_stage(bpu_in_resp_idx) := bpu_in_stage
533  }
534
535  bpuPtr := bpuPtr + enq_fire
536  ifuPtr := ifuPtr + (io.toIfu.req.fire && allowToIfu)
537
538  // only use ftb result to assign hit status
539  when (bpu_s2_resp.valid) {
540    entry_hit_status(bpu_s2_resp.ftq_idx.value) := Mux(bpu_s2_resp.full_pred.hit, h_hit, h_not_hit)
541  }
542
543
544  io.toIfu.flushFromBpu.s2.valid := bpu_s2_redirect
545  io.toIfu.flushFromBpu.s2.bits := bpu_s2_resp.ftq_idx
546  when (bpu_s2_resp.valid && bpu_s2_resp.hasRedirect) {
547    bpuPtr := bpu_s2_resp.ftq_idx + 1.U
548    // only when ifuPtr runs ahead of bpu s2 resp should we recover it
549    when (!isBefore(ifuPtr, bpu_s2_resp.ftq_idx)) {
550      ifuPtr := bpu_s2_resp.ftq_idx
551    }
552  }
553
554  io.toIfu.flushFromBpu.s3.valid := bpu_s3_redirect
555  io.toIfu.flushFromBpu.s3.bits := bpu_s3_resp.ftq_idx
556  when (bpu_s3_resp.valid && bpu_s3_resp.hasRedirect) {
557    bpuPtr := bpu_s3_resp.ftq_idx + 1.U
558    // only when ifuPtr runs ahead of bpu s2 resp should we recover it
559    when (!isBefore(ifuPtr, bpu_s3_resp.ftq_idx)) {
560      ifuPtr := bpu_s3_resp.ftq_idx
561    }
562  }
563
564  XSError(isBefore(bpuPtr, ifuPtr) && !isFull(bpuPtr, ifuPtr), "\nifuPtr is before bpuPtr!\n")
565
566  // ****************************************************************
567  // **************************** to ifu ****************************
568  // ****************************************************************
569  val bpu_in_bypass_buf = RegEnable(ftq_pc_mem.io.wdata(0), enable=bpu_in_fire)
570  val bpu_in_bypass_ptr = RegNext(bpu_in_resp_ptr)
571  val last_cycle_bpu_in = RegNext(bpu_in_fire)
572  val last_cycle_to_ifu_fire = RegNext(io.toIfu.req.fire)
573
574  // read pc and target
575  ftq_pc_mem.io.raddr.init.init.last := ifuPtr.value
576  ftq_pc_mem.io.raddr.init.last := (ifuPtr+1.U).value
577
578  io.toIfu.req.bits.ftqIdx := ifuPtr
579  io.toIfu.req.bits.nextStartAddr := update_target(ifuPtr.value)
580  io.toIfu.req.bits.ftqOffset := cfiIndex_vec(ifuPtr.value)
581
582  val toIfuPcBundle = Wire(new Ftq_RF_Components)
583  val entry_is_to_send = WireInit(false.B)
584
585  when (last_cycle_bpu_in && bpu_in_bypass_ptr === ifuPtr) {
586    toIfuPcBundle := bpu_in_bypass_buf
587    entry_is_to_send := true.B
588  }.elsewhen (last_cycle_to_ifu_fire) {
589    toIfuPcBundle := ftq_pc_mem.io.rdata.init.last
590    entry_is_to_send := RegNext(entry_fetch_status((ifuPtr+1.U).value) === f_to_send)
591  }.otherwise {
592    toIfuPcBundle := ftq_pc_mem.io.rdata.init.init.last
593    entry_is_to_send := RegNext(entry_fetch_status(ifuPtr.value) === f_to_send)
594  }
595
596  io.toIfu.req.valid := entry_is_to_send && ifuPtr =/= bpuPtr
597  io.toIfu.req.bits.fromFtqPcBundle(toIfuPcBundle)
598
599  // when fall through is smaller in value than start address, there must be a false hit
600  when (toIfuPcBundle.fallThruError && entry_hit_status(ifuPtr.value) === h_hit) {
601    when (io.toIfu.req.fire &&
602      !(bpu_s2_redirect && bpu_s2_resp.ftq_idx === ifuPtr) &&
603      !(bpu_s3_redirect && bpu_s3_resp.ftq_idx === ifuPtr)
604    ) {
605      entry_hit_status(ifuPtr.value) := h_false_hit
606      // XSError(true.B, "FTB false hit by fallThroughError, startAddr: %x, fallTHru: %x\n", io.toIfu.req.bits.startAddr, io.toIfu.req.bits.nextStartAddr)
607    }
608    XSDebug(true.B, "fallThruError! start:%x, fallThru:%x\n", io.toIfu.req.bits.startAddr, io.toIfu.req.bits.nextStartAddr)
609  }
610
611  XSPerfAccumulate(f"fall_through_error_to_ifu", toIfuPcBundle.fallThruError && entry_hit_status(ifuPtr.value) === h_hit &&
612    io.toIfu.req.fire && !(bpu_s2_redirect && bpu_s2_resp.ftq_idx === ifuPtr) && !(bpu_s3_redirect && bpu_s3_resp.ftq_idx === ifuPtr))
613
614  val ifu_req_should_be_flushed =
615    io.toIfu.flushFromBpu.shouldFlushByStage2(io.toIfu.req.bits.ftqIdx) ||
616    io.toIfu.flushFromBpu.shouldFlushByStage3(io.toIfu.req.bits.ftqIdx)
617
618    when (io.toIfu.req.fire && !ifu_req_should_be_flushed) {
619      entry_fetch_status(ifuPtr.value) := f_sent
620    }
621
622  // *********************************************************************
623  // **************************** wb from ifu ****************************
624  // *********************************************************************
625  val pdWb = io.fromIfu.pdWb
626  val pds = pdWb.bits.pd
627  val ifu_wb_valid = pdWb.valid
628  val ifu_wb_idx = pdWb.bits.ftqIdx.value
629  // read ports:                                                         commit update
630  val ftq_pd_mem = Module(new SyncDataModuleTemplate(new Ftq_pd_Entry, FtqSize, 1, 1))
631  ftq_pd_mem.io.wen(0) := ifu_wb_valid
632  ftq_pd_mem.io.waddr(0) := pdWb.bits.ftqIdx.value
633  ftq_pd_mem.io.wdata(0).fromPdWb(pdWb.bits)
634
635  val hit_pd_valid = entry_hit_status(ifu_wb_idx) === h_hit && ifu_wb_valid
636  val hit_pd_mispred = hit_pd_valid && pdWb.bits.misOffset.valid
637  val hit_pd_mispred_reg = RegNext(hit_pd_mispred, init=false.B)
638  val pd_reg       = RegEnable(pds,             enable = pdWb.valid)
639  val start_pc_reg = RegEnable(pdWb.bits.pc(0), enable = pdWb.valid)
640  val wb_idx_reg   = RegEnable(ifu_wb_idx,      enable = pdWb.valid)
641
642  when (ifu_wb_valid) {
643    val comm_stq_wen = VecInit(pds.map(_.valid).zip(pdWb.bits.instrRange).map{
644      case (v, inRange) => v && inRange
645    })
646    (commitStateQueue(ifu_wb_idx) zip comm_stq_wen).map{
647      case (qe, v) => when (v) { qe := c_valid }
648    }
649  }
650
651  ifuWbPtr := ifuWbPtr + ifu_wb_valid
652
653  ftb_entry_mem.io.raddr.head := ifu_wb_idx
654  val has_false_hit = WireInit(false.B)
655  when (RegNext(hit_pd_valid)) {
656    // check for false hit
657    val pred_ftb_entry = ftb_entry_mem.io.rdata.head
658    val brSlots = pred_ftb_entry.brSlots
659    val tailSlot = pred_ftb_entry.tailSlot
660    // we check cfis that bpu predicted
661
662    // bpu predicted branches but denied by predecode
663    val br_false_hit =
664      brSlots.map{
665        s => s.valid && !(pd_reg(s.offset).valid && pd_reg(s.offset).isBr)
666      }.reduce(_||_) ||
667      (tailSlot.valid && pred_ftb_entry.tailSlot.sharing &&
668        !(pd_reg(tailSlot.offset).valid && pd_reg(tailSlot.offset).isBr))
669
670    val jmpOffset = tailSlot.offset
671    val jmp_pd = pd_reg(jmpOffset)
672    val jal_false_hit = pred_ftb_entry.jmpValid &&
673      ((pred_ftb_entry.isJal  && !(jmp_pd.valid && jmp_pd.isJal)) ||
674       (pred_ftb_entry.isJalr && !(jmp_pd.valid && jmp_pd.isJalr)) ||
675       (pred_ftb_entry.isCall && !(jmp_pd.valid && jmp_pd.isCall)) ||
676       (pred_ftb_entry.isRet  && !(jmp_pd.valid && jmp_pd.isRet))
677      )
678
679    has_false_hit := br_false_hit || jal_false_hit || hit_pd_mispred_reg
680    XSDebug(has_false_hit, "FTB false hit by br or jal or hit_pd, startAddr: %x\n", pdWb.bits.pc(0))
681
682    // assert(!has_false_hit)
683  }
684
685  when (has_false_hit) {
686    entry_hit_status(wb_idx_reg) := h_false_hit
687  }
688
689
690  // **********************************************************************
691  // **************************** backend read ****************************
692  // **********************************************************************
693
694  // pc reads
695  for ((req, i) <- io.toBackend.pc_reads.zipWithIndex) {
696    ftq_pc_mem.io.raddr(i) := req.ptr.value
697    req.data := ftq_pc_mem.io.rdata(i).getPc(RegNext(req.offset))
698  }
699  // target read
700  io.toBackend.target_read.data := RegNext(update_target(io.toBackend.target_read.ptr.value))
701
702  // *******************************************************************************
703  // **************************** redirect from backend ****************************
704  // *******************************************************************************
705
706  // redirect read cfiInfo, couples to redirectGen s2
707  ftq_redirect_sram.io.ren.init.last := backendRedirect.valid
708  ftq_redirect_sram.io.raddr.init.last := backendRedirect.bits.ftqIdx.value
709
710  ftb_entry_mem.io.raddr.init.last := backendRedirect.bits.ftqIdx.value
711
712  val stage3CfiInfo = ftq_redirect_sram.io.rdata.init.last
713  val fromBackendRedirect = WireInit(backendRedirectReg)
714  val backendRedirectCfi = fromBackendRedirect.bits.cfiUpdate
715  backendRedirectCfi.fromFtqRedirectSram(stage3CfiInfo)
716
717  val r_ftb_entry = ftb_entry_mem.io.rdata.init.last
718  val r_ftqOffset = fromBackendRedirect.bits.ftqOffset
719
720  when (entry_hit_status(fromBackendRedirect.bits.ftqIdx.value) === h_hit) {
721    backendRedirectCfi.shift := PopCount(r_ftb_entry.getBrMaskByOffset(r_ftqOffset)) +&
722      (backendRedirectCfi.pd.isBr && !r_ftb_entry.brIsSaved(r_ftqOffset) &&
723      !r_ftb_entry.newBrCanNotInsert(r_ftqOffset))
724
725    backendRedirectCfi.addIntoHist := backendRedirectCfi.pd.isBr && (r_ftb_entry.brIsSaved(r_ftqOffset) ||
726        !r_ftb_entry.newBrCanNotInsert(r_ftqOffset))
727  }.otherwise {
728    backendRedirectCfi.shift := (backendRedirectCfi.pd.isBr && backendRedirectCfi.taken).asUInt
729    backendRedirectCfi.addIntoHist := backendRedirectCfi.pd.isBr.asUInt
730  }
731
732
733  // ***************************************************************************
734  // **************************** redirect from ifu ****************************
735  // ***************************************************************************
736  val fromIfuRedirect = WireInit(0.U.asTypeOf(Valid(new Redirect)))
737  fromIfuRedirect.valid := pdWb.valid && pdWb.bits.misOffset.valid && !backendFlush
738  fromIfuRedirect.bits.ftqIdx := pdWb.bits.ftqIdx
739  fromIfuRedirect.bits.ftqOffset := pdWb.bits.misOffset.bits
740  fromIfuRedirect.bits.level := RedirectLevel.flushAfter
741
742  val ifuRedirectCfiUpdate = fromIfuRedirect.bits.cfiUpdate
743  ifuRedirectCfiUpdate.pc := pdWb.bits.pc(pdWb.bits.misOffset.bits)
744  ifuRedirectCfiUpdate.pd := pdWb.bits.pd(pdWb.bits.misOffset.bits)
745  ifuRedirectCfiUpdate.predTaken := cfiIndex_vec(pdWb.bits.ftqIdx.value).valid
746  ifuRedirectCfiUpdate.target := pdWb.bits.target
747  ifuRedirectCfiUpdate.taken := pdWb.bits.cfiOffset.valid
748  ifuRedirectCfiUpdate.isMisPred := pdWb.bits.misOffset.valid
749
750  val ifuRedirectReg = RegNext(fromIfuRedirect, init=0.U.asTypeOf(Valid(new Redirect)))
751  val ifuRedirectToBpu = WireInit(ifuRedirectReg)
752  ifuFlush := fromIfuRedirect.valid || ifuRedirectToBpu.valid
753
754  ftq_redirect_sram.io.ren.head := fromIfuRedirect.valid
755  ftq_redirect_sram.io.raddr.head := fromIfuRedirect.bits.ftqIdx.value
756
757  ftb_entry_mem.io.raddr.head := fromIfuRedirect.bits.ftqIdx.value
758
759  val toBpuCfi = ifuRedirectToBpu.bits.cfiUpdate
760  toBpuCfi.fromFtqRedirectSram(ftq_redirect_sram.io.rdata.head)
761  when (ifuRedirectReg.bits.cfiUpdate.pd.isRet) {
762    toBpuCfi.target := toBpuCfi.rasEntry.retAddr
763  }
764
765  // *********************************************************************
766  // **************************** wb from exu ****************************
767  // *********************************************************************
768
769  class RedirectGen(implicit p: Parameters) extends XSModule
770    with HasCircularQueuePtrHelper {
771    val io = IO(new Bundle {
772      val in = Flipped((new CtrlToFtqIO).for_redirect_gen)
773      val stage1Pc = Input(Vec(numRedirectPcRead, UInt(VAddrBits.W)))
774      val out = Valid(new Redirect)
775      val s1_real_pc = Output(UInt(VAddrBits.W))
776      val debug_diff = Flipped(Valid(new Redirect))
777    })
778    val s1_jumpTarget = io.in.s1_jumpTarget
779    val s1_uop = io.in.s1_oldest_exu_output.bits.uop
780    val s1_imm12_reg = s1_uop.ctrl.imm(11,0)
781    val s1_pd = s1_uop.cf.pd
782    val s1_isReplay = io.in.s1_redirect_onehot.last
783    val s1_isJump = io.in.s1_redirect_onehot.head
784    val real_pc = Mux1H(io.in.s1_redirect_onehot, io.stage1Pc)
785    val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s1_imm12_reg), XLEN)
786    val snpc = real_pc + Mux(s1_pd.isRVC, 2.U, 4.U)
787    val target = Mux(s1_isReplay,
788      real_pc,
789      Mux(io.in.s1_oldest_redirect.bits.cfiUpdate.taken,
790        Mux(s1_isJump, io.in.s1_jumpTarget, brTarget),
791        snpc
792      )
793    )
794
795    val redirectGenRes = WireInit(io.in.rawRedirect)
796    redirectGenRes.bits.cfiUpdate.pc := real_pc
797    redirectGenRes.bits.cfiUpdate.pd := s1_pd
798    redirectGenRes.bits.cfiUpdate.target := target
799
800    val realRedirect = Wire(Valid(new Redirect))
801    realRedirect.valid := redirectGenRes.valid || io.in.flushRedirect.valid
802    realRedirect.bits := Mux(io.in.flushRedirect.valid, io.in.flushRedirect.bits, redirectGenRes.bits)
803
804    when (io.in.flushRedirect.valid) {
805      realRedirect.bits.level := RedirectLevel.flush
806      realRedirect.bits.cfiUpdate.target := io.in.frontendFlushTarget
807    }
808
809    io.out := realRedirect
810    io.s1_real_pc := real_pc
811    XSError((io.debug_diff.valid || realRedirect.valid) && io.debug_diff.asUInt =/= io.out.asUInt, "redirect wrong")
812
813  }
814
815  val redirectGen = Module(new RedirectGen)
816  redirectGen.io.in <> io.fromBackend.for_redirect_gen
817  redirectGen.io.stage1Pc := io.toBackend.getRedirectPcReadData
818  redirectGen.io.debug_diff := io.fromBackend.redirect
819  backendRedirect := redirectGen.io.out
820
821  io.toBackend.redirect_s1_real_pc := redirectGen.io.s1_real_pc
822
823  def extractRedirectInfo(wb: Valid[Redirect]) = {
824    val ftqIdx = wb.bits.ftqIdx.value
825    val ftqOffset = wb.bits.ftqOffset
826    val taken = wb.bits.cfiUpdate.taken
827    val mispred = wb.bits.cfiUpdate.isMisPred
828    (wb.valid, ftqIdx, ftqOffset, taken, mispred)
829  }
830
831  // fix mispredict entry
832  val lastIsMispredict = RegNext(
833    backendRedirect.valid && backendRedirect.bits.level === RedirectLevel.flushAfter, init = false.B
834  )
835
836  def updateCfiInfo(redirect: Valid[Redirect], isBackend: Boolean = true) = {
837    val (r_valid, r_idx, r_offset, r_taken, r_mispred) = extractRedirectInfo(redirect)
838    val cfiIndex_bits_wen = r_valid && r_taken && r_offset < cfiIndex_vec(r_idx).bits
839    val cfiIndex_valid_wen = r_valid && r_offset === cfiIndex_vec(r_idx).bits
840    when (cfiIndex_bits_wen || cfiIndex_valid_wen) {
841      cfiIndex_vec(r_idx).valid := cfiIndex_bits_wen || cfiIndex_valid_wen && r_taken
842    }
843    when (cfiIndex_bits_wen) {
844      cfiIndex_vec(r_idx).bits := r_offset
845    }
846    update_target(r_idx) := redirect.bits.cfiUpdate.target
847    if (isBackend) {
848      mispredict_vec(r_idx)(r_offset) := r_mispred
849    }
850  }
851
852  when(backendRedirectReg.valid && lastIsMispredict) {
853    updateCfiInfo(backendRedirectReg)
854  }.elsewhen (ifuRedirectToBpu.valid) {
855    updateCfiInfo(ifuRedirectToBpu, isBackend=false)
856  }
857
858  // ***********************************************************************************
859  // **************************** flush ptr and state queue ****************************
860  // ***********************************************************************************
861
862  val redirectVec = VecInit(backendRedirect, fromIfuRedirect)
863
864  // when redirect, we should reset ptrs and status queues
865  when(redirectVec.map(r => r.valid).reduce(_||_)){
866    val r = PriorityMux(redirectVec.map(r => (r.valid -> r.bits)))
867    val notIfu = redirectVec.dropRight(1).map(r => r.valid).reduce(_||_)
868    val (idx, offset, flushItSelf) = (r.ftqIdx, r.ftqOffset, RedirectLevel.flushItself(r.level))
869    val next = idx + 1.U
870    bpuPtr := next
871    ifuPtr := next
872    ifuWbPtr := next
873    when (notIfu) {
874      commitStateQueue(idx.value).zipWithIndex.foreach({ case (s, i) =>
875        when(i.U > offset || i.U === offset && flushItSelf){
876          s := c_invalid
877        }
878      })
879    }
880  }
881
882  // only the valid bit is actually needed
883  io.toIfu.redirect.bits    := backendRedirect.bits
884  io.toIfu.redirect.valid   := stage2Flush
885
886  // commit
887  for (c <- io.fromBackend.rob_commits) {
888    when(c.valid) {
889      commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset) := c_commited
890      // TODO: remove this
891      // For instruction fusions, we also update the next instruction
892      when (c.bits.commitType === 4.U) {
893        commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset + 1.U) := c_commited
894      }.elsewhen(c.bits.commitType === 5.U) {
895        commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset + 2.U) := c_commited
896      }.elsewhen(c.bits.commitType === 6.U) {
897        val index = (c.bits.ftqIdx + 1.U).value
898        commitStateQueue(index)(0) := c_commited
899      }.elsewhen(c.bits.commitType === 7.U) {
900        val index = (c.bits.ftqIdx + 1.U).value
901        commitStateQueue(index)(1) := c_commited
902      }
903    }
904  }
905
906  // ****************************************************************
907  // **************************** to bpu ****************************
908  // ****************************************************************
909
910  io.toBpu.redirect <> Mux(fromBackendRedirect.valid, fromBackendRedirect, ifuRedirectToBpu)
911
912  val may_have_stall_from_bpu = RegInit(false.B)
913  val canCommit = commPtr =/= ifuWbPtr && !may_have_stall_from_bpu &&
914    Cat(commitStateQueue(commPtr.value).map(s => {
915      s === c_invalid || s === c_commited
916    })).andR()
917
918  // commit reads
919  ftq_pc_mem.io.raddr.last := commPtr.value
920  val commit_pc_bundle = ftq_pc_mem.io.rdata.last
921  ftq_pd_mem.io.raddr.last := commPtr.value
922  val commit_pd = ftq_pd_mem.io.rdata.last
923  ftq_redirect_sram.io.ren.last := canCommit
924  ftq_redirect_sram.io.raddr.last := commPtr.value
925  val commit_spec_meta = ftq_redirect_sram.io.rdata.last
926  ftq_meta_1r_sram.io.ren(0) := canCommit
927  ftq_meta_1r_sram.io.raddr(0) := commPtr.value
928  val commit_meta = ftq_meta_1r_sram.io.rdata(0)
929  ftb_entry_mem.io.raddr.last := commPtr.value
930  val commit_ftb_entry = ftb_entry_mem.io.rdata.last
931
932  // need one cycle to read mem and srams
933  val do_commit_ptr = RegNext(commPtr)
934  val do_commit = RegNext(canCommit, init=false.B)
935  when (canCommit) { commPtr := commPtr + 1.U }
936  val commit_state = RegNext(commitStateQueue(commPtr.value))
937  val can_commit_cfi = WireInit(cfiIndex_vec(commPtr.value))
938  when (commitStateQueue(commPtr.value)(can_commit_cfi.bits) =/= c_commited) {
939    can_commit_cfi.valid := false.B
940  }
941  val commit_cfi = RegNext(can_commit_cfi)
942
943  val commit_mispredict = VecInit((RegNext(mispredict_vec(commPtr.value)) zip commit_state).map {
944    case (mis, state) => mis && state === c_commited
945  })
946  val can_commit_hit = entry_hit_status(commPtr.value)
947  val commit_hit = RegNext(can_commit_hit)
948  val commit_target = RegNext(update_target(commPtr.value))
949  val commit_stage = RegNext(pred_stage(commPtr.value))
950  val commit_valid = commit_hit === h_hit || commit_cfi.valid // hit or taken
951
952  val to_bpu_hit = can_commit_hit === h_hit || can_commit_hit === h_false_hit
953  may_have_stall_from_bpu := can_commit_cfi.valid && !to_bpu_hit && !may_have_stall_from_bpu
954
955  io.toBpu.update := DontCare
956  io.toBpu.update.valid := commit_valid && do_commit
957  val update = io.toBpu.update.bits
958  update.false_hit   := commit_hit === h_false_hit
959  update.pc          := commit_pc_bundle.startAddr
960  update.meta        := commit_meta.meta
961  update.full_target := commit_target
962  update.from_stage  := commit_stage
963  update.fromFtqRedirectSram(commit_spec_meta)
964
965  val commit_real_hit = commit_hit === h_hit
966  val update_ftb_entry = update.ftb_entry
967
968  val ftbEntryGen = Module(new FTBEntryGen).io
969  ftbEntryGen.start_addr     := commit_pc_bundle.startAddr
970  ftbEntryGen.old_entry      := commit_ftb_entry
971  ftbEntryGen.pd             := commit_pd
972  ftbEntryGen.cfiIndex       := commit_cfi
973  ftbEntryGen.target         := commit_target
974  ftbEntryGen.hit            := commit_real_hit
975  ftbEntryGen.mispredict_vec := commit_mispredict
976
977  update_ftb_entry         := ftbEntryGen.new_entry
978  update.new_br_insert_pos := ftbEntryGen.new_br_insert_pos
979  update.mispred_mask      := ftbEntryGen.mispred_mask
980  update.old_entry         := ftbEntryGen.is_old_entry
981  update.pred_hit          := commit_hit === h_hit || commit_hit === h_false_hit
982
983  update.is_minimal := false.B
984  update.full_pred.fromFtbEntry(ftbEntryGen.new_entry, update.pc)
985  update.full_pred.br_taken_mask  := ftbEntryGen.taken_mask
986  update.full_pred.jalr_target := commit_target
987  update.full_pred.hit := true.B
988  when (update.full_pred.is_jalr) {
989    update.full_pred.targets.last := commit_target
990  }
991
992  // ****************************************************************
993  // *********************** to prefetch ****************************
994  // ****************************************************************
995
996  if(cacheParams.hasPrefetch){
997    val prefetchPtr = RegInit(FtqPtr(false.B, 0.U))
998    prefetchPtr := prefetchPtr + io.toPrefetch.req.fire()
999
1000    when (bpu_s2_resp.valid && bpu_s2_resp.hasRedirect && !isBefore(prefetchPtr, bpu_s2_resp.ftq_idx)) {
1001      prefetchPtr := bpu_s2_resp.ftq_idx
1002    }
1003
1004    when (bpu_s3_resp.valid && bpu_s3_resp.hasRedirect && !isBefore(prefetchPtr, bpu_s3_resp.ftq_idx)) {
1005      prefetchPtr := bpu_s3_resp.ftq_idx
1006      // XSError(true.B, "\ns3_redirect mechanism not implemented!\n")
1007    }
1008
1009    io.toPrefetch.req.valid := prefetchPtr =/= bpuPtr && entry_fetch_status(prefetchPtr.value) === f_to_send
1010    io.toPrefetch.req.bits.target := update_target(prefetchPtr.value)
1011
1012    when(redirectVec.map(r => r.valid).reduce(_||_)){
1013      val r = PriorityMux(redirectVec.map(r => (r.valid -> r.bits)))
1014      val next = r.ftqIdx + 1.U
1015      prefetchPtr := next
1016    }
1017
1018    XSError(isBefore(bpuPtr, prefetchPtr) && !isFull(bpuPtr, prefetchPtr), "\nprefetchPtr is before bpuPtr!\n")
1019  }
1020  else {
1021    io.toPrefetch.req <> DontCare
1022  }
1023
1024  // ******************************************************************************
1025  // **************************** commit perf counters ****************************
1026  // ******************************************************************************
1027
1028  val commit_inst_mask    = VecInit(commit_state.map(c => c === c_commited && do_commit)).asUInt
1029  val commit_mispred_mask = commit_mispredict.asUInt
1030  val commit_not_mispred_mask = ~commit_mispred_mask
1031
1032  val commit_br_mask = commit_pd.brMask.asUInt
1033  val commit_jmp_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.jmpInfo.valid.asTypeOf(UInt(1.W)))
1034  val commit_cfi_mask = (commit_br_mask | commit_jmp_mask)
1035
1036  val mbpInstrs = commit_inst_mask & commit_cfi_mask
1037
1038  val mbpRights = mbpInstrs & commit_not_mispred_mask
1039  val mbpWrongs = mbpInstrs & commit_mispred_mask
1040
1041  io.bpuInfo.bpRight := PopCount(mbpRights)
1042  io.bpuInfo.bpWrong := PopCount(mbpWrongs)
1043
1044  // Cfi Info
1045  for (i <- 0 until PredictWidth) {
1046    val pc = commit_pc_bundle.startAddr + (i * instBytes).U
1047    val v = commit_state(i) === c_commited
1048    val isBr = commit_pd.brMask(i)
1049    val isJmp = commit_pd.jmpInfo.valid && commit_pd.jmpOffset === i.U
1050    val isCfi = isBr || isJmp
1051    val isTaken = commit_cfi.valid && commit_cfi.bits === i.U
1052    val misPred = commit_mispredict(i)
1053    // val ghist = commit_spec_meta.ghist.predHist
1054    val histPtr = commit_spec_meta.histPtr
1055    val predCycle = commit_meta.meta(63, 0)
1056    val target = commit_target
1057
1058    val brIdx = OHToUInt(Reverse(Cat(update_ftb_entry.brValids.zip(update_ftb_entry.brOffset).map{case(v, offset) => v && offset === i.U})))
1059    val inFtbEntry = update_ftb_entry.brValids.zip(update_ftb_entry.brOffset).map{case(v, offset) => v && offset === i.U}.reduce(_||_)
1060    val addIntoHist = ((commit_hit === h_hit) && inFtbEntry) || ((!(commit_hit === h_hit) && i.U === commit_cfi.bits && isBr && commit_cfi.valid))
1061    XSDebug(v && do_commit && isCfi, p"cfi_update: isBr(${isBr}) pc(${Hexadecimal(pc)}) " +
1062    p"taken(${isTaken}) mispred(${misPred}) cycle($predCycle) hist(${histPtr.value}) " +
1063    p"startAddr(${Hexadecimal(commit_pc_bundle.startAddr)}) AddIntoHist(${addIntoHist}) " +
1064    p"brInEntry(${inFtbEntry}) brIdx(${brIdx}) target(${Hexadecimal(target)})\n")
1065  }
1066
1067  val enq = io.fromBpu.resp
1068  val perf_redirect = backendRedirect
1069
1070  XSPerfAccumulate("entry", validEntries)
1071  XSPerfAccumulate("bpu_to_ftq_stall", enq.valid && !enq.ready)
1072  XSPerfAccumulate("mispredictRedirect", perf_redirect.valid && RedirectLevel.flushAfter === perf_redirect.bits.level)
1073  XSPerfAccumulate("replayRedirect", perf_redirect.valid && RedirectLevel.flushItself(perf_redirect.bits.level))
1074  XSPerfAccumulate("predecodeRedirect", fromIfuRedirect.valid)
1075
1076  XSPerfAccumulate("to_ifu_bubble", io.toIfu.req.ready && !io.toIfu.req.valid)
1077
1078  XSPerfAccumulate("to_ifu_stall", io.toIfu.req.valid && !io.toIfu.req.ready)
1079  XSPerfAccumulate("from_bpu_real_bubble", !enq.valid && enq.ready && allowBpuIn)
1080  XSPerfAccumulate("bpu_to_ifu_bubble", bpuPtr === ifuPtr)
1081
1082  val from_bpu = io.fromBpu.resp.bits
1083  def in_entry_len_map_gen(resp: BranchPredictionBundle)(stage: String) = {
1084    assert(!resp.is_minimal)
1085    val entry_len = (resp.ftb_entry.getFallThrough(resp.pc) - resp.pc) >> instOffsetBits
1086    val entry_len_recording_vec = (1 to PredictWidth+1).map(i => entry_len === i.U)
1087    val entry_len_map = (1 to PredictWidth+1).map(i =>
1088      f"${stage}_ftb_entry_len_$i" -> (entry_len_recording_vec(i-1) && resp.valid)
1089    ).foldLeft(Map[String, UInt]())(_+_)
1090    entry_len_map
1091  }
1092  val s2_entry_len_map = in_entry_len_map_gen(from_bpu.s2)("s2")
1093  val s3_entry_len_map = in_entry_len_map_gen(from_bpu.s3)("s3")
1094
1095  val to_ifu = io.toIfu.req.bits
1096
1097
1098
1099  val commit_num_inst_recording_vec = (1 to PredictWidth).map(i => PopCount(commit_inst_mask) === i.U)
1100  val commit_num_inst_map = (1 to PredictWidth).map(i =>
1101    f"commit_num_inst_$i" -> (commit_num_inst_recording_vec(i-1) && do_commit)
1102  ).foldLeft(Map[String, UInt]())(_+_)
1103
1104
1105
1106  val commit_jal_mask  = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasJal.asTypeOf(UInt(1.W)))
1107  val commit_jalr_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasJalr.asTypeOf(UInt(1.W)))
1108  val commit_call_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasCall.asTypeOf(UInt(1.W)))
1109  val commit_ret_mask  = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasRet.asTypeOf(UInt(1.W)))
1110
1111
1112  val mbpBRights = mbpRights & commit_br_mask
1113  val mbpJRights = mbpRights & commit_jal_mask
1114  val mbpIRights = mbpRights & commit_jalr_mask
1115  val mbpCRights = mbpRights & commit_call_mask
1116  val mbpRRights = mbpRights & commit_ret_mask
1117
1118  val mbpBWrongs = mbpWrongs & commit_br_mask
1119  val mbpJWrongs = mbpWrongs & commit_jal_mask
1120  val mbpIWrongs = mbpWrongs & commit_jalr_mask
1121  val mbpCWrongs = mbpWrongs & commit_call_mask
1122  val mbpRWrongs = mbpWrongs & commit_ret_mask
1123
1124  val commit_pred_stage = RegNext(pred_stage(commPtr.value))
1125
1126  def pred_stage_map(src: UInt, name: String) = {
1127    (0 until numBpStages).map(i =>
1128      f"${name}_stage_${i+1}" -> PopCount(src.asBools.map(_ && commit_pred_stage === BP_STAGES(i)))
1129    ).foldLeft(Map[String, UInt]())(_+_)
1130  }
1131
1132  val mispred_stage_map      = pred_stage_map(mbpWrongs,  "mispredict")
1133  val br_mispred_stage_map   = pred_stage_map(mbpBWrongs, "br_mispredict")
1134  val jalr_mispred_stage_map = pred_stage_map(mbpIWrongs, "jalr_mispredict")
1135  val correct_stage_map      = pred_stage_map(mbpRights,  "correct")
1136  val br_correct_stage_map   = pred_stage_map(mbpBRights, "br_correct")
1137  val jalr_correct_stage_map = pred_stage_map(mbpIRights, "jalr_correct")
1138
1139  val update_valid = io.toBpu.update.valid
1140  def u(cond: Bool) = update_valid && cond
1141  val ftb_false_hit = u(update.false_hit)
1142  // assert(!ftb_false_hit)
1143  val ftb_hit = u(commit_hit === h_hit)
1144
1145  val ftb_new_entry = u(ftbEntryGen.is_init_entry)
1146  val ftb_new_entry_only_br = ftb_new_entry && !update_ftb_entry.jmpValid
1147  val ftb_new_entry_only_jmp = ftb_new_entry && !update_ftb_entry.brValids(0)
1148  val ftb_new_entry_has_br_and_jmp = ftb_new_entry && update_ftb_entry.brValids(0) && update_ftb_entry.jmpValid
1149
1150  val ftb_old_entry = u(ftbEntryGen.is_old_entry)
1151
1152  val ftb_modified_entry = u(ftbEntryGen.is_new_br || ftbEntryGen.is_jalr_target_modified || ftbEntryGen.is_always_taken_modified)
1153  val ftb_modified_entry_new_br = u(ftbEntryGen.is_new_br)
1154  val ftb_modified_entry_jalr_target_modified = u(ftbEntryGen.is_jalr_target_modified)
1155  val ftb_modified_entry_br_full = ftb_modified_entry && ftbEntryGen.is_br_full
1156  val ftb_modified_entry_always_taken = ftb_modified_entry && ftbEntryGen.is_always_taken_modified
1157
1158  val ftb_entry_len = (ftbEntryGen.new_entry.getFallThrough(update.pc) - update.pc) >> instOffsetBits
1159  val ftb_entry_len_recording_vec = (1 to PredictWidth+1).map(i => ftb_entry_len === i.U)
1160  val ftb_init_entry_len_map = (1 to PredictWidth+1).map(i =>
1161    f"ftb_init_entry_len_$i" -> (ftb_entry_len_recording_vec(i-1) && ftb_new_entry)
1162  ).foldLeft(Map[String, UInt]())(_+_)
1163  val ftb_modified_entry_len_map = (1 to PredictWidth+1).map(i =>
1164    f"ftb_modified_entry_len_$i" -> (ftb_entry_len_recording_vec(i-1) && ftb_modified_entry)
1165  ).foldLeft(Map[String, UInt]())(_+_)
1166
1167  val ftq_occupancy_map = (0 to FtqSize).map(i =>
1168    f"ftq_has_entry_$i" ->( validEntries === i.U)
1169  ).foldLeft(Map[String, UInt]())(_+_)
1170
1171  val perfCountsMap = Map(
1172    "BpInstr" -> PopCount(mbpInstrs),
1173    "BpBInstr" -> PopCount(mbpBRights | mbpBWrongs),
1174    "BpRight"  -> PopCount(mbpRights),
1175    "BpWrong"  -> PopCount(mbpWrongs),
1176    "BpBRight" -> PopCount(mbpBRights),
1177    "BpBWrong" -> PopCount(mbpBWrongs),
1178    "BpJRight" -> PopCount(mbpJRights),
1179    "BpJWrong" -> PopCount(mbpJWrongs),
1180    "BpIRight" -> PopCount(mbpIRights),
1181    "BpIWrong" -> PopCount(mbpIWrongs),
1182    "BpCRight" -> PopCount(mbpCRights),
1183    "BpCWrong" -> PopCount(mbpCWrongs),
1184    "BpRRight" -> PopCount(mbpRRights),
1185    "BpRWrong" -> PopCount(mbpRWrongs),
1186
1187    "ftb_false_hit"                -> PopCount(ftb_false_hit),
1188    "ftb_hit"                      -> PopCount(ftb_hit),
1189    "ftb_new_entry"                -> PopCount(ftb_new_entry),
1190    "ftb_new_entry_only_br"        -> PopCount(ftb_new_entry_only_br),
1191    "ftb_new_entry_only_jmp"       -> PopCount(ftb_new_entry_only_jmp),
1192    "ftb_new_entry_has_br_and_jmp" -> PopCount(ftb_new_entry_has_br_and_jmp),
1193    "ftb_old_entry"                -> PopCount(ftb_old_entry),
1194    "ftb_modified_entry"           -> PopCount(ftb_modified_entry),
1195    "ftb_modified_entry_new_br"    -> PopCount(ftb_modified_entry_new_br),
1196    "ftb_jalr_target_modified"     -> PopCount(ftb_modified_entry_jalr_target_modified),
1197    "ftb_modified_entry_br_full"   -> PopCount(ftb_modified_entry_br_full),
1198    "ftb_modified_entry_always_taken" -> PopCount(ftb_modified_entry_always_taken)
1199  ) ++ ftb_init_entry_len_map ++ ftb_modified_entry_len_map ++ s2_entry_len_map ++
1200  s3_entry_len_map ++ commit_num_inst_map ++ ftq_occupancy_map ++
1201  mispred_stage_map ++ br_mispred_stage_map ++ jalr_mispred_stage_map ++
1202  correct_stage_map ++ br_correct_stage_map ++ jalr_correct_stage_map
1203
1204  for((key, value) <- perfCountsMap) {
1205    XSPerfAccumulate(key, value)
1206  }
1207
1208  // --------------------------- Debug --------------------------------
1209  // XSDebug(enq_fire, p"enq! " + io.fromBpu.resp.bits.toPrintable)
1210  XSDebug(io.toIfu.req.fire, p"fire to ifu " + io.toIfu.req.bits.toPrintable)
1211  XSDebug(do_commit, p"deq! [ptr] $do_commit_ptr\n")
1212  XSDebug(true.B, p"[bpuPtr] $bpuPtr, [ifuPtr] $ifuPtr, [ifuWbPtr] $ifuWbPtr [commPtr] $commPtr\n")
1213  XSDebug(true.B, p"[in] v:${io.fromBpu.resp.valid} r:${io.fromBpu.resp.ready} " +
1214    p"[out] v:${io.toIfu.req.valid} r:${io.toIfu.req.ready}\n")
1215  XSDebug(do_commit, p"[deq info] cfiIndex: $commit_cfi, $commit_pc_bundle, target: ${Hexadecimal(commit_target)}\n")
1216
1217  //   def ubtbCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = {
1218  //     commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map {
1219  //       case (((valid, pd), ans), taken) =>
1220  //       Mux(valid && pd.isBr,
1221  //         isWrong ^ Mux(ans.hit.asBool,
1222  //           Mux(ans.taken.asBool, taken && ans.target === commitEntry.target,
1223  //           !taken),
1224  //         !taken),
1225  //       false.B)
1226  //     }
1227  //   }
1228
1229  //   def btbCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = {
1230  //     commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map {
1231  //       case (((valid, pd), ans), taken) =>
1232  //       Mux(valid && pd.isBr,
1233  //         isWrong ^ Mux(ans.hit.asBool,
1234  //           Mux(ans.taken.asBool, taken && ans.target === commitEntry.target,
1235  //           !taken),
1236  //         !taken),
1237  //       false.B)
1238  //     }
1239  //   }
1240
1241  //   def tageCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = {
1242  //     commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map {
1243  //       case (((valid, pd), ans), taken) =>
1244  //       Mux(valid && pd.isBr,
1245  //         isWrong ^ (ans.taken.asBool === taken),
1246  //       false.B)
1247  //     }
1248  //   }
1249
1250  //   def loopCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = {
1251  //     commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map {
1252  //       case (((valid, pd), ans), taken) =>
1253  //       Mux(valid && (pd.isBr) && ans.hit.asBool,
1254  //         isWrong ^ (!taken),
1255  //           false.B)
1256  //     }
1257  //   }
1258
1259  //   def rasCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = {
1260  //     commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map {
1261  //       case (((valid, pd), ans), taken) =>
1262  //       Mux(valid && pd.isRet.asBool /*&& taken*/ && ans.hit.asBool,
1263  //         isWrong ^ (ans.target === commitEntry.target),
1264  //           false.B)
1265  //     }
1266  //   }
1267
1268  //   val ubtbRights = ubtbCheck(commitEntry, commitEntry.metas.map(_.ubtbAns), false.B)
1269  //   val ubtbWrongs = ubtbCheck(commitEntry, commitEntry.metas.map(_.ubtbAns), true.B)
1270  //   // btb and ubtb pred jal and jalr as well
1271  //   val btbRights = btbCheck(commitEntry, commitEntry.metas.map(_.btbAns), false.B)
1272  //   val btbWrongs = btbCheck(commitEntry, commitEntry.metas.map(_.btbAns), true.B)
1273  //   val tageRights = tageCheck(commitEntry, commitEntry.metas.map(_.tageAns), false.B)
1274  //   val tageWrongs = tageCheck(commitEntry, commitEntry.metas.map(_.tageAns), true.B)
1275
1276  //   val loopRights = loopCheck(commitEntry, commitEntry.metas.map(_.loopAns), false.B)
1277  //   val loopWrongs = loopCheck(commitEntry, commitEntry.metas.map(_.loopAns), true.B)
1278
1279  //   val rasRights = rasCheck(commitEntry, commitEntry.metas.map(_.rasAns), false.B)
1280  //   val rasWrongs = rasCheck(commitEntry, commitEntry.metas.map(_.rasAns), true.B)
1281
1282  val perfEvents = Seq(
1283    ("bpu_s2_redirect        ", bpu_s2_redirect                                                             ),
1284    ("bpu_s3_redirect        ", bpu_s3_redirect                                                             ),
1285    ("bpu_to_ftq_stall       ", enq.valid && ~enq.ready                                                     ),
1286    ("mispredictRedirect     ", perf_redirect.valid && RedirectLevel.flushAfter === perf_redirect.bits.level),
1287    ("replayRedirect         ", perf_redirect.valid && RedirectLevel.flushItself(perf_redirect.bits.level)  ),
1288    ("predecodeRedirect      ", fromIfuRedirect.valid                                                       ),
1289    ("to_ifu_bubble          ", io.toIfu.req.ready && !io.toIfu.req.valid                                   ),
1290    ("from_bpu_real_bubble   ", !enq.valid && enq.ready && allowBpuIn                                       ),
1291    ("BpInstr                ", PopCount(mbpInstrs)                                                         ),
1292    ("BpBInstr               ", PopCount(mbpBRights | mbpBWrongs)                                           ),
1293    ("BpRight                ", PopCount(mbpRights)                                                         ),
1294    ("BpWrong                ", PopCount(mbpWrongs)                                                         ),
1295    ("BpBRight               ", PopCount(mbpBRights)                                                        ),
1296    ("BpBWrong               ", PopCount(mbpBWrongs)                                                        ),
1297    ("BpJRight               ", PopCount(mbpJRights)                                                        ),
1298    ("BpJWrong               ", PopCount(mbpJWrongs)                                                        ),
1299    ("BpIRight               ", PopCount(mbpIRights)                                                        ),
1300    ("BpIWrong               ", PopCount(mbpIWrongs)                                                        ),
1301    ("BpCRight               ", PopCount(mbpCRights)                                                        ),
1302    ("BpCWrong               ", PopCount(mbpCWrongs)                                                        ),
1303    ("BpRRight               ", PopCount(mbpRRights)                                                        ),
1304    ("BpRWrong               ", PopCount(mbpRWrongs)                                                        ),
1305    ("ftb_false_hit          ", PopCount(ftb_false_hit)                                                     ),
1306    ("ftb_hit                ", PopCount(ftb_hit)                                                           ),
1307  )
1308  generatePerfEvent()
1309}
1310