xref: /XiangShan/src/main/scala/xiangshan/frontend/IBuffer.scala (revision 92c61038092ac384201fa5f56e31f96be14a30f5)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import utility._
25import xiangshan.ExceptionNO._
26
27class IBufPtr(implicit p: Parameters) extends CircularQueuePtr[IBufPtr](
28  p => p(XSCoreParamsKey).IBufSize
29) {
30}
31
32class IBufInBankPtr(implicit p: Parameters) extends CircularQueuePtr[IBufInBankPtr](
33  p => p(XSCoreParamsKey).IBufSize / p(XSCoreParamsKey).IBufNBank
34) {
35}
36
37class IBufBankPtr(implicit p: Parameters) extends CircularQueuePtr[IBufBankPtr](
38  p => p(XSCoreParamsKey).IBufNBank
39) {
40}
41
42class IBufferIO(implicit p: Parameters) extends XSBundle {
43  val flush = Input(Bool())
44  val ControlRedirect = Input(Bool())
45  val ControlBTBMissBubble = Input(Bool())
46  val TAGEMissBubble = Input(Bool())
47  val SCMissBubble = Input(Bool())
48  val ITTAGEMissBubble = Input(Bool())
49  val RASMissBubble = Input(Bool())
50  val MemVioRedirect = Input(Bool())
51  val in = Flipped(DecoupledIO(new FetchToIBuffer))
52  val out = Vec(DecodeWidth, DecoupledIO(new CtrlFlow))
53  val full = Output(Bool())
54  val decodeCanAccept = Input(Bool())
55  val stallReason = new StallReasonIO(DecodeWidth)
56}
57
58class IBufEntry(implicit p: Parameters) extends XSBundle {
59  val inst = UInt(32.W)
60  val pc = UInt(VAddrBits.W)
61  val foldpc = UInt(MemPredPCWidth.W)
62  val pd = new PreDecodeInfo
63  val pred_taken = Bool()
64  val ftqPtr = new FtqPtr
65  val ftqOffset = UInt(log2Ceil(PredictWidth).W)
66  val exceptionType = IBufferExceptionType()
67  val triggered = TriggerAction()
68
69  def fromFetch(fetch: FetchToIBuffer, i: Int): IBufEntry = {
70    inst   := fetch.instrs(i)
71    pc     := fetch.pc(i)
72    foldpc := fetch.foldpc(i)
73    pd     := fetch.pd(i)
74    pred_taken := fetch.ftqOffset(i).valid
75    ftqPtr := fetch.ftqPtr
76    ftqOffset := fetch.ftqOffset(i).bits
77    exceptionType := IBufferExceptionType.cvtFromFetchExcpAndCrossPageAndRVCII(
78      fetch.exceptionType(i),
79      fetch.crossPageIPFFix(i),
80      fetch.illegalInstr(i),
81    )
82    triggered := fetch.triggered(i)
83    this
84  }
85
86  def toCtrlFlow: CtrlFlow = {
87    val cf = Wire(new CtrlFlow)
88    cf.instr := inst
89    cf.pc := pc
90    cf.foldpc := foldpc
91    cf.exceptionVec := 0.U.asTypeOf(ExceptionVec())
92    cf.exceptionVec(instrPageFault)      := IBufferExceptionType.isPF (this.exceptionType)
93    cf.exceptionVec(instrGuestPageFault) := IBufferExceptionType.isGPF(this.exceptionType)
94    cf.exceptionVec(instrAccessFault)    := IBufferExceptionType.isAF (this.exceptionType)
95    cf.exceptionVec(EX_II)               := IBufferExceptionType.isRVCII(this.exceptionType)
96    cf.trigger := triggered
97    cf.pd := pd
98    cf.pred_taken := pred_taken
99    cf.crossPageIPFFix := IBufferExceptionType.isCrossPage(this.exceptionType)
100    cf.storeSetHit := DontCare
101    cf.waitForRobIdx := DontCare
102    cf.loadWaitBit := DontCare
103    cf.loadWaitStrict := DontCare
104    cf.ssid := DontCare
105    cf.ftqPtr := ftqPtr
106    cf.ftqOffset := ftqOffset
107    cf
108  }
109
110  object IBufferExceptionType extends NamedUInt(3) {
111    def None         = "b000".U
112    def NonCrossPF   = "b001".U
113    def NonCrossGPF  = "b010".U
114    def NonCrossAF   = "b011".U
115    // illegal instruction
116    def rvcII        = "b100".U
117    def CrossPF      = "b101".U
118    def CrossGPF     = "b110".U
119    def CrossAF      = "b111".U
120
121    def cvtFromFetchExcpAndCrossPageAndRVCII(fetchExcp: UInt, crossPage: Bool, rvcIll: Bool): UInt = {
122      require(
123        fetchExcp.getWidth == ExceptionType.width,
124        s"The width(${fetchExcp.getWidth}) of fetchExcp should be equal to " +
125        s"the width(${ExceptionType.width}) of frontend.ExceptionType."
126      )
127      MuxCase(fetchExcp, Seq(
128        rvcIll    -> this.rvcII,
129        crossPage -> Cat(1.U(1.W), fetchExcp),
130      ))
131    }
132
133    def isRVCII(uint: UInt): Bool = {
134      this.checkInputWidth(uint)
135      uint(2) && uint(1, 0) === 0.U
136    }
137
138    def isCrossPage(uint: UInt): Bool = {
139      this.checkInputWidth(uint)
140      uint(2) && uint(1, 0) =/= 0.U
141    }
142
143    def isPF (uint: UInt): Bool = uint(1, 0) === this.NonCrossPF (1, 0)
144    def isGPF(uint: UInt): Bool = uint(1, 0) === this.NonCrossGPF(1, 0)
145    def isAF (uint: UInt): Bool = uint(1, 0) === this.NonCrossAF (1, 0)
146  }
147}
148
149class IBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper with HasPerfEvents {
150  val io = IO(new IBufferIO)
151
152  // io alias
153  private val decodeCanAccept = io.decodeCanAccept
154
155  // Parameter Check
156  private val bankSize = IBufSize / IBufNBank
157  require(IBufSize % IBufNBank == 0, s"IBufNBank should divide IBufSize, IBufNBank: $IBufNBank, IBufSize: $IBufSize")
158  require(IBufNBank >= DecodeWidth,
159    s"IBufNBank should be equal or larger than DecodeWidth, IBufNBank: $IBufNBank, DecodeWidth: $DecodeWidth")
160
161  // IBuffer is organized as raw registers
162  // This is due to IBuffer is a huge queue, read & write port logic should be precisely controlled
163  //                             . + + E E E - .
164  //                             . + + E E E - .
165  //                             . . + E E E - .
166  //                             . . + E E E E -
167  // As shown above, + means enqueue, - means dequeue, E is current content
168  // When dequeue, read port is organized like a banked FIFO
169  // Dequeue reads no more than 1 entry from each bank sequentially, this can be exploit to reduce area
170  // Enqueue writes cannot benefit from this characteristic unless use a SRAM
171  // For detail see Enqueue and Dequeue below
172  private val ibuf: Vec[IBufEntry] = RegInit(VecInit.fill(IBufSize)(0.U.asTypeOf(new IBufEntry)))
173  private val bankedIBufView: Vec[Vec[IBufEntry]] = VecInit.tabulate(IBufNBank)(
174    bankID => VecInit.tabulate(bankSize)(
175      inBankOffset => ibuf(bankID + inBankOffset * IBufNBank)
176    )
177  )
178
179
180  // Bypass wire
181  private val bypassEntries = WireDefault(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry))))
182  // Normal read wire
183  private val deqEntries = WireDefault(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry))))
184  // Output register
185  private val outputEntries = RegInit(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry))))
186  private val outputEntriesValidNum = PriorityMuxDefault(outputEntries.map(_.valid).zip(Seq.range(1, DecodeWidth).map(_.U)).reverse.toSeq, 0.U)
187
188  // Between Bank
189  private val deqBankPtrVec: Vec[IBufBankPtr] = RegInit(VecInit.tabulate(DecodeWidth)(_.U.asTypeOf(new IBufBankPtr)))
190  private val deqBankPtr: IBufBankPtr = deqBankPtrVec(0)
191  private val deqBankPtrVecNext = Wire(deqBankPtrVec.cloneType)
192  // Inside Bank
193  private val deqInBankPtr: Vec[IBufInBankPtr] = RegInit(VecInit.fill(IBufNBank)(0.U.asTypeOf(new IBufInBankPtr)))
194  private val deqInBankPtrNext = Wire(deqInBankPtr.cloneType)
195
196  val deqPtr = RegInit(0.U.asTypeOf(new IBufPtr))
197  val deqPtrNext = Wire(deqPtr.cloneType)
198
199  val enqPtrVec = RegInit(VecInit.tabulate(PredictWidth)(_.U.asTypeOf(new IBufPtr)))
200  val enqPtr = enqPtrVec(0)
201
202  val numTryEnq = WireDefault(0.U)
203  val numEnq = Mux(io.in.fire, numTryEnq, 0.U)
204
205  // empty and decode can accept insts
206  val useBypass = enqPtr === deqPtr && decodeCanAccept
207
208  // The number of decode accepted insts.
209  // Since decode promises accepting insts in order, use priority encoder to simplify the accumulation.
210  private val numOut = Wire(UInt(log2Ceil(DecodeWidth).W))
211  private val numDeq = numOut
212
213  // counter current number of valid
214  val numValid = distanceBetween(enqPtr, deqPtr)
215  val numValidAfterDeq = numValid - numDeq
216  // counter next number of valid
217  val numValidNext = numValid + numEnq - numDeq
218  val allowEnq = RegInit(true.B)
219  val numFromFetch = Mux(io.in.valid, PopCount(io.in.bits.enqEnable), 0.U)
220
221  allowEnq := (IBufSize - PredictWidth).U >= numValidNext // Disable when almost full
222
223  val enqOffset = VecInit.tabulate(PredictWidth)(i => PopCount(io.in.bits.valid.asBools.take(i)))
224  val enqData = VecInit.tabulate(PredictWidth)(i => Wire(new IBufEntry).fromFetch(io.in.bits, i))
225
226  val outputEntriesIsNotFull = !outputEntries(DecodeWidth-1).valid
227  when(decodeCanAccept) {
228    numOut := Mux(numValid >= DecodeWidth.U, DecodeWidth.U, numValid)
229  }.elsewhen(outputEntriesIsNotFull) {
230    numOut := Mux(numValid >= DecodeWidth.U - outputEntriesValidNum, DecodeWidth.U - outputEntriesValidNum, numValid)
231  }.otherwise {
232    numOut := 0.U
233  }
234  val numBypass = Wire(UInt(log2Ceil(DecodeWidth).W))
235  // when using bypass, bypassed entries do not enqueue
236  when(useBypass) {
237    when(numFromFetch >= DecodeWidth.U) {
238      numTryEnq := numFromFetch - DecodeWidth.U
239      numBypass := DecodeWidth.U
240    } .otherwise {
241      numTryEnq := 0.U
242      numBypass := numFromFetch
243    }
244  } .otherwise {
245    numTryEnq := numFromFetch
246    numBypass := 0.U
247  }
248
249  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
250  // Bypass
251  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
252  bypassEntries.zipWithIndex.foreach {
253    case (entry, idx) =>
254      // Select
255      val validOH = Range(0, PredictWidth).map {
256        i =>
257          io.in.bits.valid(i) &&
258            io.in.bits.enqEnable(i) &&
259            enqOffset(i) === idx.asUInt
260      } // Should be OneHot
261      entry.valid := validOH.reduce(_ || _) && io.in.fire && !io.flush
262      entry.bits := Mux1H(validOH, enqData)
263
264      // Debug Assertion
265      XSError(io.in.valid && PopCount(validOH) > 1.asUInt, "validOH is not OneHot")
266  }
267
268  // => Decode Output
269  // clean register output
270  io.out zip outputEntries foreach {
271    case (io, reg) =>
272      io.valid := reg.valid
273      io.bits := reg.bits.toCtrlFlow
274  }
275  (outputEntries zip bypassEntries).zipWithIndex.foreach {
276    case ((out, bypass), i) =>
277      when(decodeCanAccept) {
278        when(useBypass && io.in.valid) {
279          out := bypass
280        }.otherwise {
281          out := deqEntries(i)
282        }
283      }.elsewhen(outputEntriesIsNotFull){
284        out.valid := deqEntries(i).valid
285        out.bits := Mux(i.U < outputEntriesValidNum, out.bits, VecInit(deqEntries.take(i + 1).map(_.bits))(i.U - outputEntriesValidNum))
286      }
287  }
288
289  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
290  // Enqueue
291  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
292  io.in.ready := allowEnq
293  // Data
294  ibuf.zipWithIndex.foreach {
295    case (entry, idx) => {
296      // Select
297      val validOH = Range(0, PredictWidth).map {
298        i =>
299          val useBypassMatch = enqOffset(i) >= DecodeWidth.U &&
300            enqPtrVec(enqOffset(i) - DecodeWidth.U).value === idx.asUInt
301          val normalMatch = enqPtrVec(enqOffset(i)).value === idx.asUInt
302          val m = Mux(useBypass, useBypassMatch, normalMatch) // when using bypass, bypassed entries do not enqueue
303
304          io.in.bits.valid(i) && io.in.bits.enqEnable(i) && m
305      } // Should be OneHot
306      val wen = validOH.reduce(_ || _) && io.in.fire && !io.flush
307
308      // Write port
309      // Each IBuffer entry has a PredictWidth -> 1 Mux
310      val writeEntry = Mux1H(validOH, enqData)
311      entry := Mux(wen, writeEntry, entry)
312
313      // Debug Assertion
314      XSError(io.in.valid && PopCount(validOH) > 1.asUInt, "validOH is not OneHot")
315    }
316  }
317  // Pointer maintenance
318  when (io.in.fire && !io.flush) {
319    enqPtrVec := VecInit(enqPtrVec.map(_ + numTryEnq))
320  }
321
322  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
323  // Dequeue
324  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
325  val outputEntriesValidNumNext = Wire(UInt(log2Ceil(DecodeWidth).W))
326  XSError(outputEntriesValidNumNext > DecodeWidth.U, "Ibuffer: outputEntriesValidNumNext > DecodeWidth.U")
327  val validVec = UIntToMask(outputEntriesValidNumNext(log2Ceil(DecodeWidth) - 1, 0), DecodeWidth)
328  when(decodeCanAccept) {
329    outputEntriesValidNumNext := Mux(useBypass, numBypass, numDeq)
330  }.elsewhen(outputEntriesIsNotFull) {
331    outputEntriesValidNumNext := outputEntriesValidNum + numDeq
332  }.otherwise {
333    outputEntriesValidNumNext := outputEntriesValidNum
334  }
335  // Data
336  // Read port
337  // 2-stage, IBufNBank * (bankSize -> 1) + IBufNBank -> 1
338  // Should be better than IBufSize -> 1 in area, with no significant latency increase
339  private val readStage1: Vec[IBufEntry] = VecInit.tabulate(IBufNBank)(
340    bankID => Mux1H(UIntToOH(deqInBankPtr(bankID).value), bankedIBufView(bankID))
341  )
342  for (i <- 0 until DecodeWidth) {
343    deqEntries(i).valid := validVec(i)
344    deqEntries(i).bits := Mux1H(UIntToOH(deqBankPtrVec(i).value), readStage1)
345  }
346  // Pointer maintenance
347  deqBankPtrVecNext := VecInit(deqBankPtrVec.map(_ + numDeq))
348  deqPtrNext := deqPtr + numDeq
349  deqInBankPtrNext.zip(deqInBankPtr).zipWithIndex.foreach {
350    case ((ptrNext, ptr), idx) => {
351      // validVec[k] == bankValid[deqBankPtr + k]
352      // So bankValid[n] == validVec[n - deqBankPtr]
353      val validIdx = Mux(idx.asUInt >= deqBankPtr.value,
354        idx.asUInt - deqBankPtr.value,
355        ((idx + IBufNBank).asUInt - deqBankPtr.value)(log2Ceil(IBufNBank) - 1, 0)
356      )(log2Ceil(DecodeWidth) - 1, 0)
357      val bankAdvance = numOut > validIdx
358      ptrNext := Mux(bankAdvance , ptr + 1.U, ptr)
359    }
360  }
361
362  // Flush
363  when (io.flush) {
364    allowEnq := true.B
365    enqPtrVec := enqPtrVec.indices.map(_.U.asTypeOf(new IBufPtr))
366    deqBankPtrVec := deqBankPtrVec.indices.map(_.U.asTypeOf(new IBufBankPtr))
367    deqInBankPtr := VecInit.fill(IBufNBank)(0.U.asTypeOf(new IBufInBankPtr))
368    deqPtr := 0.U.asTypeOf(new IBufPtr())
369    outputEntries.foreach(_.valid := false.B)
370  }.otherwise {
371    deqPtr := deqPtrNext
372    deqInBankPtr := deqInBankPtrNext
373    deqBankPtrVec := deqBankPtrVecNext
374  }
375  io.full := !allowEnq
376
377  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
378  // TopDown
379  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
380  val topdown_stage = RegInit(0.U.asTypeOf(new FrontendTopDownBundle))
381  topdown_stage := io.in.bits.topdown_info
382  when(io.flush) {
383    when(io.ControlRedirect) {
384      when(io.ControlBTBMissBubble) {
385        topdown_stage.reasons(TopDownCounters.BTBMissBubble.id) := true.B
386      }.elsewhen(io.TAGEMissBubble) {
387        topdown_stage.reasons(TopDownCounters.TAGEMissBubble.id) := true.B
388      }.elsewhen(io.SCMissBubble) {
389        topdown_stage.reasons(TopDownCounters.SCMissBubble.id) := true.B
390      }.elsewhen(io.ITTAGEMissBubble) {
391        topdown_stage.reasons(TopDownCounters.ITTAGEMissBubble.id) := true.B
392      }.elsewhen(io.RASMissBubble) {
393        topdown_stage.reasons(TopDownCounters.RASMissBubble.id) := true.B
394      }
395    }.elsewhen(io.MemVioRedirect) {
396      topdown_stage.reasons(TopDownCounters.MemVioRedirectBubble.id) := true.B
397    }.otherwise {
398      topdown_stage.reasons(TopDownCounters.OtherRedirectBubble.id) := true.B
399    }
400  }
401
402
403  val dequeueInsufficient = Wire(Bool())
404  val matchBubble = Wire(UInt(log2Up(TopDownCounters.NumStallReasons.id).W))
405  val deqValidCount = PopCount(validVec.asBools)
406  val deqWasteCount = DecodeWidth.U - deqValidCount
407  dequeueInsufficient := deqValidCount < DecodeWidth.U
408  matchBubble := (TopDownCounters.NumStallReasons.id - 1).U - PriorityEncoder(topdown_stage.reasons.reverse)
409
410  io.stallReason.reason.map(_ := 0.U)
411  for (i <- 0 until DecodeWidth) {
412    when(i.U < deqWasteCount) {
413      io.stallReason.reason(DecodeWidth - i - 1) := matchBubble
414    }
415  }
416
417  when(!(deqWasteCount === DecodeWidth.U || topdown_stage.reasons.asUInt.orR)) {
418    // should set reason for FetchFragmentationStall
419    // topdown_stage.reasons(TopDownCounters.FetchFragmentationStall.id) := true.B
420    for (i <- 0 until DecodeWidth) {
421      when(i.U < deqWasteCount) {
422        io.stallReason.reason(DecodeWidth - i - 1) := TopDownCounters.FetchFragBubble.id.U
423      }
424    }
425  }
426
427  when(io.stallReason.backReason.valid) {
428    io.stallReason.reason.map(_ := io.stallReason.backReason.bits)
429  }
430
431  // Debug info
432  XSError(
433    deqPtr.value =/= deqBankPtr.value + deqInBankPtr(deqBankPtr.value).value * IBufNBank.asUInt,
434    "Dequeue PTR mismatch"
435  )
436  XSError(isBefore(enqPtr, deqPtr) && !isFull(enqPtr, deqPtr), "\ndeqPtr is older than enqPtr!\n")
437
438  XSDebug(io.flush, "IBuffer Flushed\n")
439
440  when(io.in.fire) {
441    XSDebug("Enque:\n")
442    XSDebug(p"MASK=${Binary(io.in.bits.valid)}\n")
443    for(i <- 0 until PredictWidth){
444      XSDebug(p"PC=${Hexadecimal(io.in.bits.pc(i))} ${Hexadecimal(io.in.bits.instrs(i))}\n")
445    }
446  }
447
448  for (i <- 0 until DecodeWidth) {
449    XSDebug(io.out(i).fire,
450      p"deq: ${Hexadecimal(io.out(i).bits.instr)} PC=${Hexadecimal(io.out(i).bits.pc)}" +
451      p"v=${io.out(i).valid} r=${io.out(i).ready} " +
452      p"excpVec=${Binary(io.out(i).bits.exceptionVec.asUInt)} crossPageIPF=${io.out(i).bits.crossPageIPFFix}\n")
453  }
454
455  XSDebug(p"numValid: ${numValid}\n")
456  XSDebug(p"EnqNum: ${numEnq}\n")
457  XSDebug(p"DeqNum: ${numDeq}\n")
458
459  val afterInit = RegInit(false.B)
460  val headBubble = RegInit(false.B)
461  when (io.in.fire) { afterInit := true.B }
462  when (io.flush) {
463    headBubble := true.B
464  } .elsewhen(numValid =/= 0.U) {
465    headBubble := false.B
466  }
467  val instrHungry = afterInit && (numValid === 0.U) && !headBubble
468
469  QueuePerf(IBufSize, numValid, !allowEnq)
470  XSPerfAccumulate("flush", io.flush)
471  XSPerfAccumulate("hungry", instrHungry)
472
473  val ibuffer_IDWidth_hvButNotFull = afterInit && (numValid =/= 0.U) && (numValid < DecodeWidth.U) && !headBubble
474  XSPerfAccumulate("ibuffer_IDWidth_hvButNotFull", ibuffer_IDWidth_hvButNotFull)
475  /*
476  XSPerfAccumulate("ICacheMissBubble", Mux(matchBubbleVec(TopDownCounters.ICacheMissBubble.id), deqWasteCount, 0.U))
477  XSPerfAccumulate("ITLBMissBubble", Mux(matchBubbleVec(TopDownCounters.ITLBMissBubble.id), deqWasteCount, 0.U))
478  XSPerfAccumulate("ControlRedirectBubble", Mux(matchBubbleVec(TopDownCounters.ControlRedirectBubble.id), deqWasteCount, 0.U))
479  XSPerfAccumulate("MemVioRedirectBubble", Mux(matchBubbleVec(TopDownCounters.MemVioRedirectBubble.id), deqWasteCount, 0.U))
480  XSPerfAccumulate("OtherRedirectBubble", Mux(matchBubbleVec(TopDownCounters.OtherRedirectBubble.id), deqWasteCount, 0.U))
481  XSPerfAccumulate("BTBMissBubble", Mux(matchBubbleVec(TopDownCounters.BTBMissBubble.id), deqWasteCount, 0.U))
482  XSPerfAccumulate("OverrideBubble", Mux(matchBubbleVec(TopDownCounters.OverrideBubble.id), deqWasteCount, 0.U))
483  XSPerfAccumulate("FtqUpdateBubble", Mux(matchBubbleVec(TopDownCounters.FtqUpdateBubble.id), deqWasteCount, 0.U))
484  XSPerfAccumulate("FtqFullStall", Mux(matchBubbleVec(TopDownCounters.FtqFullStall.id), deqWasteCount, 0.U))
485  XSPerfAccumulate("FetchFragmentBubble",
486  Mux(deqWasteCount === DecodeWidth.U || topdown_stage.reasons.asUInt.orR, 0.U, deqWasteCount))
487  XSPerfAccumulate("TAGEMissBubble", Mux(matchBubbleVec(TopDownCounters.TAGEMissBubble.id), deqWasteCount, 0.U))
488  XSPerfAccumulate("SCMissBubble", Mux(matchBubbleVec(TopDownCounters.SCMissBubble.id), deqWasteCount, 0.U))
489  XSPerfAccumulate("ITTAGEMissBubble", Mux(matchBubbleVec(TopDownCounters.ITTAGEMissBubble.id), deqWasteCount, 0.U))
490  XSPerfAccumulate("RASMissBubble", Mux(matchBubbleVec(TopDownCounters.RASMissBubble.id), deqWasteCount, 0.U))
491  */
492
493  val perfEvents = Seq(
494    ("IBuffer_Flushed  ", io.flush                                                                     ),
495    ("IBuffer_hungry   ", instrHungry                                                                  ),
496    ("IBuffer_1_4_valid", (numValid >  (0*(IBufSize/4)).U) & (numValid < (1*(IBufSize/4)).U)   ),
497    ("IBuffer_2_4_valid", (numValid >= (1*(IBufSize/4)).U) & (numValid < (2*(IBufSize/4)).U)   ),
498    ("IBuffer_3_4_valid", (numValid >= (2*(IBufSize/4)).U) & (numValid < (3*(IBufSize/4)).U)   ),
499    ("IBuffer_4_4_valid", (numValid >= (3*(IBufSize/4)).U) & (numValid < (4*(IBufSize/4)).U)   ),
500    ("IBuffer_full     ",  numValid.andR                                                           ),
501    ("Front_Bubble     ", PopCount((0 until DecodeWidth).map(i => io.out(i).ready && !io.out(i).valid)))
502  )
503  generatePerfEvent()
504}
505