xref: /XiangShan/src/main/scala/xiangshan/frontend/FrontendBundle.scala (revision c49b0e7fe673d974e1547da3c04423fb071e8910)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16package xiangshan.frontend
17
18import chipsalliance.rocketchip.config.Parameters
19import chisel3._
20import chisel3.util._
21import chisel3.experimental.chiselName
22import xiangshan._
23import utils._
24import scala.math._
25
26@chiselName
27class FetchRequestBundle(implicit p: Parameters) extends XSBundle {
28  val startAddr       = UInt(VAddrBits.W)
29  //val fallThruAddr    = UInt(VAddrBits.W)
30  val crossCacheline  = Bool()
31  val nextlineStart   = UInt(VAddrBits.W)
32  val fallThruError   = Bool()
33  val ftqIdx          = new FtqPtr
34  val ftqOffset       = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
35  val target          = UInt(VAddrBits.W)
36  val oversize        = Bool()
37
38  def fromFtqPcBundle(b: Ftq_RF_Components) = {
39    val ftError = b.fallThroughError()
40    this.startAddr := b.startAddr
41    this.fallThruError := ftError
42    this.fallThruAddr := Mux(ftError, b.nextRangeAddr, b.getFallThrough())
43    this.oversize := b.oversize
44    this
45  }
46  def fromBpuResp(resp: BranchPredictionBundle) = {
47    // only used to bypass, so some fields remains unchanged
48    this.startAddr := resp.pc
49    this.target := resp.target
50    this.ftqOffset := resp.genCfiIndex
51    this.fallThruAddr := resp.preds.fallThroughAddr
52    this.oversize := resp.preds.oversize
53    this
54  }
55  override def toPrintable: Printable = {
56    p"[start] ${Hexadecimal(startAddr)} [pft] ${Hexadecimal(fallThruAddr)}" +
57      p"[tgt] ${Hexadecimal(target)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" +
58      p" offset: ${ftqOffset.bits}\n"
59  }
60}
61
62class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle {
63  val pc           = Vec(PredictWidth, UInt(VAddrBits.W))
64  val pd           = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode
65  val ftqIdx       = new FtqPtr
66  val ftqOffset    = UInt(log2Ceil(PredictWidth).W)
67  val misOffset    = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
68  val cfiOffset    = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
69  val target       = UInt(VAddrBits.W)
70  val jalTarget    = UInt(VAddrBits.W)
71  val instrRange   = Vec(PredictWidth, Bool())
72}
73
74class Exception(implicit p: Parameters) extends XSBundle {
75
76}
77
78class FetchToIBuffer(implicit p: Parameters) extends XSBundle {
79  val instrs    = Vec(PredictWidth, UInt(32.W))
80  val valid     = UInt(PredictWidth.W)
81  val enqEnable = UInt(PredictWidth.W)
82  val pd        = Vec(PredictWidth, new PreDecodeInfo)
83  val pc        = Vec(PredictWidth, UInt(VAddrBits.W))
84  val foldpc    = Vec(PredictWidth, UInt(MemPredPCWidth.W))
85  val ftqPtr       = new FtqPtr
86  val ftqOffset    = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
87  val ipf          = Vec(PredictWidth, Bool())
88  val acf          = Vec(PredictWidth, Bool())
89  val crossPageIPFFix = Vec(PredictWidth, Bool())
90  val triggered    = Vec(PredictWidth, new TriggerCf)
91}
92
93// class BitWiseUInt(val width: Int, val init: UInt) extends Module {
94//   val io = IO(new Bundle {
95//     val set
96//   })
97// }
98// Move from BPU
99abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst {
100  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory
101}
102
103class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory {
104  val predHist = UInt(HistoryLength.W)
105
106  def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = {
107    val g = Wire(new ShiftingGlobalHistory)
108    g.predHist := (hist << shift) | taken
109    g
110  }
111
112  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = {
113    require(br_valids.length == numBr)
114    require(real_taken_mask.length == numBr)
115    val last_valid_idx = PriorityMux(
116      br_valids.reverse :+ true.B,
117      (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W))
118    )
119    val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask)
120    val smaller = Mux(last_valid_idx < first_taken_idx,
121      last_valid_idx,
122      first_taken_idx
123    )
124    val shift = smaller
125    val taken = real_taken_mask.reduce(_||_)
126    update(shift, taken, this.predHist)
127  }
128
129  // static read
130  def read(n: Int): Bool = predHist.asBools()(n)
131
132  final def === (that: ShiftingGlobalHistory): Bool = {
133    predHist === that.predHist
134  }
135
136  final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that)
137}
138
139// circular global history pointer
140class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr](
141  p => p(XSCoreParamsKey).HistoryLength
142){
143  override def cloneType = (new CGHPtr).asInstanceOf[this.type]
144}
145class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory {
146  val buffer = Vec(HistoryLength, Bool())
147  type HistPtr = UInt
148  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = {
149    this
150  }
151}
152
153class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters)
154  extends XSBundle with HasBPUConst {
155  require(compLen >= 1)
156  require(len > 0)
157  // require(folded_len <= len)
158  require(compLen >= max_update_num)
159  val folded_hist = UInt(compLen.W)
160
161  def info = (len, compLen)
162  def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1)
163  def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen)
164  def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0)
165  def oldest_bit_start = oldest_bit_pos_in_folded.head
166
167  def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = {
168    // TODO: wrap inc for histPtr value
169    oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value))
170  }
171
172  def circular_shift_left(src: UInt, shamt: Int) = {
173    val srcLen = src.getWidth
174    val src_doubled = Cat(src, src)
175    val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt)
176    shifted
177  }
178
179
180  def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = {
181    // do xors for several bitsets at specified bits
182    def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = {
183      val res = Wire(Vec(len, Bool()))
184      // println(f"num bitsets: ${bitsets.length}")
185      // println(f"bitsets $bitsets")
186      val resArr = Array.fill(len)(List[Bool]())
187      for (bs <- bitsets) {
188        for ((n, b) <- bs) {
189          resArr(n) = b :: resArr(n)
190        }
191      }
192      // println(f"${resArr.mkString}")
193      // println(f"histLen: ${this.len}, foldedLen: $folded_len")
194      for (i <- 0 until len) {
195        // println(f"bit[$i], ${resArr(i).mkString}")
196        if (resArr(i).length > 2) {
197          println(f"[warning] update logic of foldest history has two or more levels of xor gates! " +
198            f"histlen:${this.len}, compLen:$compLen")
199        }
200        if (resArr(i).length == 0) {
201          println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen")
202        }
203        res(i) := resArr(i).foldLeft(false.B)(_^_)
204      }
205      res.asUInt
206    }
207    val oldest_bits = get_oldest_bits_from_ghr(ghr, histPtr)
208
209    // mask off bits that do not update
210    val oldest_bits_masked = oldest_bits.zipWithIndex.map{
211      case (ob, i) => ob && (i < num).B
212    }
213    // if a bit does not wrap around, it should not be xored when it exits
214    val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i)))
215
216    // println(f"old bits pos ${oldest_bits_set.map(_._1)}")
217
218    // only the last bit could be 1, as we have at most one taken branch at a time
219    val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt
220    // if a bit does not wrap around, newest bits should not be xored onto it either
221    val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i)))
222
223    // println(f"new bits set ${newest_bits_set.map(_._1)}")
224    //
225    val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{
226      case (fb, i) => fb && !(num >= (len-i)).B
227    })
228    val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i)))
229
230
231    // histLen too short to wrap around
232    val new_folded_hist =
233      if (len <= compLen) {
234        ((folded_hist << num) | taken)(compLen-1,0)
235        // circular_shift_left(max_update_num)(Cat(Reverse(newest_bits_masked), folded_hist(compLen-max_update_num-1,0)), num)
236      } else {
237        // do xor then shift
238        val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set))
239        circular_shift_left(xored, num)
240      }
241    val fh = WireInit(this)
242    fh.folded_hist := new_folded_hist
243    fh
244  }
245
246  // def update(ghr: Vec[Bool], histPtr: CGHPtr, valids: Vec[Bool], takens: Vec[Bool]): FoldedHistory = {
247  //   val fh = WireInit(this)
248  //   require(valids.length == max_update_num)
249  //   require(takens.length == max_update_num)
250  //   val last_valid_idx = PriorityMux(
251  //     valids.reverse :+ true.B,
252  //     (max_update_num to 0 by -1).map(_.U(log2Ceil(max_update_num+1).W))
253  //     )
254  //   val first_taken_idx = PriorityEncoder(false.B +: takens)
255  //   val smaller = Mux(last_valid_idx < first_taken_idx,
256  //     last_valid_idx,
257  //     first_taken_idx
258  //   )
259  //   // update folded_hist
260  //   fh.update(ghr, histPtr, smaller, takens.reduce(_||_))
261  // }
262  // println(f"folded hist original length: ${len}, folded len: ${folded_len} " +
263  //   f"oldest bits' pos in folded: ${oldest_bit_pos_in_folded}")
264
265
266}
267
268class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{
269  def tagBits = VAddrBits - idxBits - instOffsetBits
270
271  val tag = UInt(tagBits.W)
272  val idx = UInt(idxBits.W)
273  val offset = UInt(instOffsetBits.W)
274
275  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
276  def getTag(x: UInt) = fromUInt(x).tag
277  def getIdx(x: UInt) = fromUInt(x).idx
278  def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U
279  def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x)
280}
281
282@chiselName
283class BranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst {
284  val br_taken_mask = Vec(numBr, Bool())
285
286  val slot_valids = Vec(totalSlot, Bool())
287
288  val targets = Vec(totalSlot, UInt(VAddrBits.W))
289  val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors
290  val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W))
291  val fallThroughAddr = UInt(VAddrBits.W)
292  val oversize = Bool()
293
294  val is_jal = Bool()
295  val is_jalr = Bool()
296  val is_call = Bool()
297  val is_ret = Bool()
298  val is_br_sharing = Bool()
299
300  // val call_is_rvc = Bool()
301  val hit = Bool()
302
303  def br_slot_valids = slot_valids.init
304  def tail_slot_valid = slot_valids.last
305
306  def br_valids = {
307    VecInit(
308      if (shareTailSlot)
309        br_slot_valids :+ (tail_slot_valid && is_br_sharing)
310      else
311        br_slot_valids
312    )
313  }
314
315  def taken_mask_on_slot = {
316    VecInit(
317      if (shareTailSlot)
318        (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ (
319          tail_slot_valid && (
320            is_br_sharing && br_taken_mask.last || !is_br_sharing
321          )
322        )
323      else
324        (br_slot_valids zip br_taken_mask).map{ case (v, t) => v && t } :+
325        tail_slot_valid
326    )
327  }
328
329  def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr)
330
331  def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
332    slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid
333    targets := entry.getTargetVec(pc)
334    jalr_target := targets.last
335    offsets := entry.getOffsetVec
336    fallThroughAddr := entry.getFallThrough(pc)
337    oversize := entry.oversize
338    is_jal := entry.tailSlot.valid && entry.isJal
339    is_jalr := entry.tailSlot.valid && entry.isJalr
340    is_call := entry.tailSlot.valid && entry.isCall
341    is_ret := entry.tailSlot.valid && entry.isRet
342    is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing
343  }
344
345  def fromMicroBTBEntry(entry: MicroBTBEntry) = {
346    slot_valids := entry.slot_valids
347    targets := entry.targets
348    jalr_target := DontCare
349    offsets := entry.offsets
350    fallThroughAddr := entry.fallThroughAddr
351    oversize := entry.oversize
352    is_jal := DontCare
353    is_jalr := DontCare
354    is_call := DontCare
355    is_ret := DontCare
356    is_br_sharing := entry.last_is_br
357  }
358  // override def toPrintable: Printable = {
359  //   p"-----------BranchPrediction----------- " +
360  //     p"[taken_mask] ${Binary(taken_mask.asUInt)} " +
361  //     p"[is_br] ${Binary(is_br.asUInt)}, [is_jal] ${Binary(is_jal.asUInt)} " +
362  //     p"[is_jalr] ${Binary(is_jalr.asUInt)}, [is_call] ${Binary(is_call.asUInt)}, [is_ret] ${Binary(is_ret.asUInt)} " +
363  //     p"[target] ${Hexadecimal(target)}}, [hit] $hit "
364  // }
365
366  def display(cond: Bool): Unit = {
367    XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n")
368  }
369}
370
371@chiselName
372class BranchPredictionBundle(implicit p: Parameters) extends XSBundle with HasBPUConst with BPUUtils{
373  val pc = UInt(VAddrBits.W)
374
375  val valid = Bool()
376
377  val hasRedirect = Bool()
378  val ftq_idx = new FtqPtr
379  // val hit = Bool()
380  val preds = new BranchPrediction
381
382  val folded_hist = new AllFoldedHistories(foldedGHistInfos)
383  val histPtr = new CGHPtr
384  val phist = UInt(PathHistoryLength.W)
385  val rasSp = UInt(log2Ceil(RasSize).W)
386  val rasTop = new RASEntry
387  val specCnt = Vec(numBr, UInt(10.W))
388  // val meta = UInt(MaxMetaLength.W)
389
390  val ftb_entry = new FTBEntry() // TODO: Send this entry to ftq
391
392  def real_slot_taken_mask(): Vec[Bool] = {
393    VecInit(preds.taken_mask_on_slot.map(_ && preds.hit))
394  }
395
396  // len numBr
397  def real_br_taken_mask(): Vec[Bool] = {
398    if (shareTailSlot)
399      VecInit(
400        preds.taken_mask_on_slot.map(_ && preds.hit).init :+
401        (preds.br_taken_mask.last && preds.tail_slot_valid && preds.is_br_sharing && preds.hit)
402      )
403    else
404      VecInit(real_slot_taken_mask().init)
405  }
406
407  // the vec indicating if ghr should shift on each branch
408  def shouldShiftVec =
409    VecInit(preds.br_valids.zipWithIndex.map{ case (v, i) =>
410      v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)})
411
412  def lastBrPosOH =
413    (!preds.hit || !preds.br_valids.reduce(_||_)) +: // not hit or no brs in entry
414    VecInit((0 until numBr).map(i =>
415      preds.br_valids(i) &&
416      !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it
417      (real_br_taken_mask()(i) || !preds.br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it
418      preds.hit
419    ))
420
421  def br_count(): UInt = {
422    val last_valid_idx = PriorityMux(
423      preds.br_valids.reverse :+ true.B,
424      (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W))
425      )
426    val first_taken_idx = PriorityEncoder(false.B +: real_br_taken_mask)
427    Mux(last_valid_idx < first_taken_idx,
428      last_valid_idx,
429      first_taken_idx
430    )
431  }
432
433  def hit_taken_on_jmp =
434    !real_slot_taken_mask().init.reduce(_||_) &&
435    real_slot_taken_mask().last && !preds.is_br_sharing
436  def hit_taken_on_call = hit_taken_on_jmp && preds.is_call
437  def hit_taken_on_ret  = hit_taken_on_jmp && preds.is_ret
438  def hit_taken_on_jalr = hit_taken_on_jmp && preds.is_jalr
439
440  def target(): UInt = {
441    val targetVecOnHit = preds.targets :+ preds.fallThroughAddr
442    val targetOnNotHit = pc + (FetchWidth * 4).U
443    val taken_mask = preds.taken_mask_on_slot
444    val selVecOHOnHit =
445      taken_mask.zipWithIndex.map{ case (t, i) => !taken_mask.take(i).fold(false.B)(_||_) && t} :+ !taken_mask.asUInt.orR
446    val targetOnHit = Mux1H(selVecOHOnHit, targetVecOnHit)
447    Mux(preds.hit, targetOnHit, targetOnNotHit)
448  }
449
450  def targetDiffFrom(addr: UInt) = {
451    val targetVec = preds.targets :+ preds.fallThroughAddr :+ (pc + (FetchWidth*4).U)
452    val taken_mask = preds.taken_mask_on_slot
453    val selVecOH =
454      taken_mask.zipWithIndex.map{ case (t, i) => !taken_mask.take(i).fold(false.B)(_||_) && t && preds.hit} :+
455      (!taken_mask.asUInt.orR && preds.hit) :+ !preds.hit
456    val diffVec = targetVec map (_ =/= addr)
457    Mux1H(selVecOH, diffVec)
458  }
459
460  def genCfiIndex = {
461    val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
462    cfiIndex.valid := real_slot_taken_mask().asUInt.orR
463    // when no takens, set cfiIndex to PredictWidth-1
464    cfiIndex.bits :=
465      ParallelPriorityMux(real_slot_taken_mask(), preds.offsets) |
466      Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt)
467    cfiIndex
468  }
469
470  def display(cond: Bool): Unit = {
471    XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n")
472    folded_hist.display(cond)
473    preds.display(cond)
474    ftb_entry.display(cond)
475  }
476}
477
478@chiselName
479class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst {
480  // val valids = Vec(3, Bool())
481  val s1 = new BranchPredictionBundle()
482  val s2 = new BranchPredictionBundle()
483
484  def selectedResp =
485    PriorityMux(Seq(
486      ((s2.valid && s2.hasRedirect) -> s2),
487      (s1.valid -> s1)
488    ))
489  def selectedRespIdx =
490    PriorityMux(Seq(
491      ((s2.valid && s2.hasRedirect) -> BP_S2),
492      (s1.valid -> BP_S1)
493    ))
494  def lastStage = s2
495}
496
497class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp with HasBPUConst {
498  val meta = UInt(MaxMetaLength.W)
499}
500
501object BpuToFtqBundle {
502  def apply(resp: BranchPredictionResp)(implicit p: Parameters): BpuToFtqBundle = {
503    val e = Wire(new BpuToFtqBundle())
504    e.s1 := resp.s1
505    e.s2 := resp.s2
506
507    e.meta := DontCare
508    e
509  }
510}
511
512class BranchPredictionUpdate(implicit p: Parameters) extends BranchPredictionBundle with HasBPUConst {
513  val mispred_mask = Vec(numBr+1, Bool())
514  val false_hit = Bool()
515  val new_br_insert_pos = Vec(numBr, Bool())
516  val old_entry = Bool()
517  val meta = UInt(MaxMetaLength.W)
518  val full_target = UInt(VAddrBits.W)
519
520  def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = {
521    folded_hist := entry.folded_hist
522    histPtr := entry.histPtr
523    phist := entry.phist
524    rasSp := entry.rasSp
525    rasTop := entry.rasEntry
526    specCnt := entry.specCnt
527    this
528  }
529
530  override def display(cond: Bool) = {
531    XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n")
532    XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n")
533    XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n")
534    super.display(cond)
535    XSDebug(cond, p"--------------------------------------------\n")
536  }
537}
538
539class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst {
540  // override def toPrintable: Printable = {
541  //   p"-----------BranchPredictionRedirect----------- " +
542  //     p"-----------cfiUpdate----------- " +
543  //     p"[pc] ${Hexadecimal(cfiUpdate.pc)} " +
544  //     p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " +
545  //     p"[target] ${Hexadecimal(cfiUpdate.target)} " +
546  //     p"------------------------------- " +
547  //     p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " +
548  //     p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " +
549  //     p"[ftqOffset] ${ftqOffset} " +
550  //     p"[level] ${level}, [interrupt] ${interrupt} " +
551  //     p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " +
552  //     p"[stFtqOffset] ${stFtqOffset} " +
553  //     p"\n"
554
555  // }
556
557  def display(cond: Bool): Unit = {
558    XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n")
559    XSDebug(cond, p"-----------cfiUpdate----------- \n")
560    XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n")
561    // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n")
562    XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n")
563    XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n")
564    XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n")
565    XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n")
566    XSDebug(cond, p"------------------------------- \n")
567    XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n")
568    XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n")
569    XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n")
570    XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n")
571    XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n")
572    XSDebug(cond, p"---------------------------------------------- \n")
573  }
574}
575