xref: /XiangShan/src/main/scala/xiangshan/frontend/FrontendBundle.scala (revision cb4f77ce497f499cefbac1624a84fbc34e49308b)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16package xiangshan.frontend
17
18import chipsalliance.rocketchip.config.Parameters
19import chisel3._
20import chisel3.util._
21import chisel3.experimental.chiselName
22import xiangshan._
23import xiangshan.frontend.icache.HasICacheParameters
24import utils._
25import scala.math._
26
27@chiselName
28class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters {
29  val startAddr       = UInt(VAddrBits.W)
30  val nextlineStart   = UInt(VAddrBits.W)
31  // val fallThruError   = Bool()
32  val ftqIdx          = new FtqPtr
33  val ftqOffset       = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
34  val nextStartAddr   = UInt(VAddrBits.W)
35  val oversize        = Bool()
36
37  def crossCacheline = startAddr(blockOffBits - 1) === 1.U
38
39  def fromFtqPcBundle(b: Ftq_RF_Components) = {
40    this.startAddr := b.startAddr
41    this.nextlineStart := b.nextLineAddr
42    this.oversize := b.oversize
43    when (b.fallThruError) {
44      val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.startAddr, b.nextLineAddr)
45      val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1)
46      this.nextStartAddr :=
47        Cat(nextBlockHigher,
48          startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W),
49          startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits),
50          0.U(instOffsetBits.W)
51        )
52    }
53    this
54  }
55  override def toPrintable: Printable = {
56    p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" +
57      p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" +
58      p" offset: ${ftqOffset.bits}\n"
59  }
60}
61
62class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle {
63  val pc           = Vec(PredictWidth, UInt(VAddrBits.W))
64  val pd           = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode
65  val ftqIdx       = new FtqPtr
66  val ftqOffset    = UInt(log2Ceil(PredictWidth).W)
67  val misOffset    = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
68  val cfiOffset    = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
69  val target       = UInt(VAddrBits.W)
70  val jalTarget    = UInt(VAddrBits.W)
71  val instrRange   = Vec(PredictWidth, Bool())
72}
73
74// Ftq send req to Prefetch
75class PrefetchRequest(implicit p:Parameters) extends XSBundle {
76  val target          = UInt(VAddrBits.W)
77}
78
79class FtqPrefechBundle(implicit p:Parameters) extends XSBundle {
80  val req = DecoupledIO(new PrefetchRequest)
81}
82
83class FetchToIBuffer(implicit p: Parameters) extends XSBundle {
84  val instrs    = Vec(PredictWidth, UInt(32.W))
85  val valid     = UInt(PredictWidth.W)
86  val enqEnable = UInt(PredictWidth.W)
87  val pd        = Vec(PredictWidth, new PreDecodeInfo)
88  val pc        = Vec(PredictWidth, UInt(VAddrBits.W))
89  val foldpc    = Vec(PredictWidth, UInt(MemPredPCWidth.W))
90  val ftqPtr       = new FtqPtr
91  val ftqOffset    = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
92  val ipf          = Vec(PredictWidth, Bool())
93  val acf          = Vec(PredictWidth, Bool())
94  val crossPageIPFFix = Vec(PredictWidth, Bool())
95  val triggered    = Vec(PredictWidth, new TriggerCf)
96}
97
98// class BitWiseUInt(val width: Int, val init: UInt) extends Module {
99//   val io = IO(new Bundle {
100//     val set
101//   })
102// }
103// Move from BPU
104abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst {
105  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory
106}
107
108class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory {
109  val predHist = UInt(HistoryLength.W)
110
111  def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = {
112    val g = Wire(new ShiftingGlobalHistory)
113    g.predHist := (hist << shift) | taken
114    g
115  }
116
117  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = {
118    require(br_valids.length == numBr)
119    require(real_taken_mask.length == numBr)
120    val last_valid_idx = PriorityMux(
121      br_valids.reverse :+ true.B,
122      (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W))
123    )
124    val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask)
125    val smaller = Mux(last_valid_idx < first_taken_idx,
126      last_valid_idx,
127      first_taken_idx
128    )
129    val shift = smaller
130    val taken = real_taken_mask.reduce(_||_)
131    update(shift, taken, this.predHist)
132  }
133
134  // static read
135  def read(n: Int): Bool = predHist.asBools()(n)
136
137  final def === (that: ShiftingGlobalHistory): Bool = {
138    predHist === that.predHist
139  }
140
141  final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that)
142}
143
144// circular global history pointer
145class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr](
146  p => p(XSCoreParamsKey).HistoryLength
147){
148  override def cloneType = (new CGHPtr).asInstanceOf[this.type]
149}
150class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory {
151  val buffer = Vec(HistoryLength, Bool())
152  type HistPtr = UInt
153  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = {
154    this
155  }
156}
157
158class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters)
159  extends XSBundle with HasBPUConst {
160  require(compLen >= 1)
161  require(len > 0)
162  // require(folded_len <= len)
163  require(compLen >= max_update_num)
164  val folded_hist = UInt(compLen.W)
165
166  def info = (len, compLen)
167  def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1)
168  def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen)
169  def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0)
170  def oldest_bit_start = oldest_bit_pos_in_folded.head
171
172  def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = {
173    // TODO: wrap inc for histPtr value
174    oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value))
175  }
176
177  def circular_shift_left(src: UInt, shamt: Int) = {
178    val srcLen = src.getWidth
179    val src_doubled = Cat(src, src)
180    val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt)
181    shifted
182  }
183
184
185  def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = {
186    // do xors for several bitsets at specified bits
187    def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = {
188      val res = Wire(Vec(len, Bool()))
189      // println(f"num bitsets: ${bitsets.length}")
190      // println(f"bitsets $bitsets")
191      val resArr = Array.fill(len)(List[Bool]())
192      for (bs <- bitsets) {
193        for ((n, b) <- bs) {
194          resArr(n) = b :: resArr(n)
195        }
196      }
197      // println(f"${resArr.mkString}")
198      // println(f"histLen: ${this.len}, foldedLen: $folded_len")
199      for (i <- 0 until len) {
200        // println(f"bit[$i], ${resArr(i).mkString}")
201        if (resArr(i).length > 2) {
202          println(f"[warning] update logic of foldest history has two or more levels of xor gates! " +
203            f"histlen:${this.len}, compLen:$compLen, at bit $i")
204        }
205        if (resArr(i).length == 0) {
206          println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen")
207        }
208        res(i) := resArr(i).foldLeft(false.B)(_^_)
209      }
210      res.asUInt
211    }
212    val oldest_bits = get_oldest_bits_from_ghr(ghr, histPtr)
213
214    // mask off bits that do not update
215    val oldest_bits_masked = oldest_bits.zipWithIndex.map{
216      case (ob, i) => ob && (i < num).B
217    }
218    // if a bit does not wrap around, it should not be xored when it exits
219    val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i)))
220
221    // println(f"old bits pos ${oldest_bits_set.map(_._1)}")
222
223    // only the last bit could be 1, as we have at most one taken branch at a time
224    val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt
225    // if a bit does not wrap around, newest bits should not be xored onto it either
226    val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i)))
227
228    // println(f"new bits set ${newest_bits_set.map(_._1)}")
229    //
230    val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{
231      case (fb, i) => fb && !(num >= (len-i)).B
232    })
233    val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i)))
234
235
236    // histLen too short to wrap around
237    val new_folded_hist =
238      if (len <= compLen) {
239        ((folded_hist << num) | taken)(compLen-1,0)
240        // circular_shift_left(max_update_num)(Cat(Reverse(newest_bits_masked), folded_hist(compLen-max_update_num-1,0)), num)
241      } else {
242        // do xor then shift
243        val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set))
244        circular_shift_left(xored, num)
245      }
246    val fh = WireInit(this)
247    fh.folded_hist := new_folded_hist
248    fh
249  }
250}
251
252class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{
253  def tagBits = VAddrBits - idxBits - instOffsetBits
254
255  val tag = UInt(tagBits.W)
256  val idx = UInt(idxBits.W)
257  val offset = UInt(instOffsetBits.W)
258
259  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
260  def getTag(x: UInt) = fromUInt(x).tag
261  def getIdx(x: UInt) = fromUInt(x).idx
262  def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U
263  def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x)
264}
265
266trait BasicPrediction extends HasXSParameter {
267  def cfiIndex: ValidUndirectioned[UInt]
268  def target(pc: UInt): UInt
269  def lastBrPosOH: Vec[Bool]
270  def brTaken: Bool
271  def shouldShiftVec: Vec[Bool]
272  def fallThruError: Bool
273  val oversize: Bool
274}
275class MinimalBranchPrediction(implicit p: Parameters) extends NewMicroBTBEntry with BasicPrediction {
276  val valid = Bool()
277  def cfiIndex = {
278    val res = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
279    res.valid := taken && valid
280    res.bits := cfiOffset | Fill(res.bits.getWidth, !valid)
281    res
282  }
283  def target(pc: UInt) = nextAddr
284  def lastBrPosOH: Vec[Bool] = VecInit(brNumOH.asBools())
285  def brTaken = takenOnBr
286  def shouldShiftVec: Vec[Bool] = VecInit((0 until numBr).map(i => lastBrPosOH.drop(i+1).reduce(_||_)))
287  def fallThruError: Bool = false.B
288
289  def fromMicroBTBEntry(valid: Bool, entry: NewMicroBTBEntry, pc: UInt) = {
290    this.valid := valid
291    this.nextAddr := Mux(valid, entry.nextAddr, pc + (FetchWidth*4).U)
292    this.cfiOffset := entry.cfiOffset | Fill(cfiOffset.getWidth, !valid)
293    this.taken := entry.taken && valid
294    this.takenOnBr := entry.takenOnBr && valid
295    this.brNumOH := Mux(valid, entry.brNumOH, 1.U(3.W))
296    this.oversize := entry.oversize && valid
297  }
298}
299@chiselName
300class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction {
301  val br_taken_mask = Vec(numBr, Bool())
302
303  val slot_valids = Vec(totalSlot, Bool())
304
305  val targets = Vec(totalSlot, UInt(VAddrBits.W))
306  val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors
307  val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W))
308  val fallThroughAddr = UInt(VAddrBits.W)
309  val fallThroughErr = Bool()
310  val oversize = Bool()
311
312  val is_jal = Bool()
313  val is_jalr = Bool()
314  val is_call = Bool()
315  val is_ret = Bool()
316  val is_br_sharing = Bool()
317
318  // val call_is_rvc = Bool()
319  val hit = Bool()
320
321  def br_slot_valids = slot_valids.init
322  def tail_slot_valid = slot_valids.last
323
324  def br_valids = {
325    VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing))
326  }
327
328  def taken_mask_on_slot = {
329    VecInit(
330      (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ (
331        tail_slot_valid && (
332          is_br_sharing && br_taken_mask.last || !is_br_sharing
333        )
334      )
335    )
336  }
337
338  def real_slot_taken_mask(): Vec[Bool] = {
339    VecInit(taken_mask_on_slot.map(_ && hit))
340  }
341
342  // len numBr
343  def real_br_taken_mask(): Vec[Bool] = {
344    VecInit(
345      taken_mask_on_slot.map(_ && hit).init :+
346      (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit)
347    )
348  }
349
350  // the vec indicating if ghr should shift on each branch
351  def shouldShiftVec =
352    VecInit(br_valids.zipWithIndex.map{ case (v, i) =>
353      v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)})
354
355  def lastBrPosOH =
356    VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry
357      (0 until numBr).map(i =>
358        br_valids(i) &&
359        !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it
360        (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it
361        hit
362      )
363    )
364
365  def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_)
366
367  def target(pc: UInt): UInt = {
368    val targetVec = targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U)
369    val tm = taken_mask_on_slot
370    val selVecOH =
371      tm.zipWithIndex.map{ case (t, i) => !tm.take(i).fold(false.B)(_||_) && t && hit} :+
372      (!tm.asUInt.orR && hit) :+ !hit
373    Mux1H(selVecOH, targetVec)
374  }
375
376  def fallThruError: Bool = hit && fallThroughErr
377
378  def hit_taken_on_jmp =
379    !real_slot_taken_mask().init.reduce(_||_) &&
380    real_slot_taken_mask().last && !is_br_sharing
381  def hit_taken_on_call = hit_taken_on_jmp && is_call
382  def hit_taken_on_ret  = hit_taken_on_jmp && is_ret
383  def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr
384
385  def cfiIndex = {
386    val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
387    cfiIndex.valid := real_slot_taken_mask().asUInt.orR
388    // when no takens, set cfiIndex to PredictWidth-1
389    cfiIndex.bits :=
390      ParallelPriorityMux(real_slot_taken_mask(), offsets) |
391      Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt)
392    cfiIndex
393  }
394
395  def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr)
396
397  def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
398    slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid
399    targets := entry.getTargetVec(pc)
400    jalr_target := targets.last
401    offsets := entry.getOffsetVec
402    oversize := entry.oversize
403    is_jal := entry.tailSlot.valid && entry.isJal
404    is_jalr := entry.tailSlot.valid && entry.isJalr
405    is_call := entry.tailSlot.valid && entry.isCall
406    is_ret := entry.tailSlot.valid && entry.isRet
407    is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing
408
409    val startLower        = Cat(0.U(1.W),    pc(instOffsetBits+log2Ceil(PredictWidth), instOffsetBits))
410    val endLowerwithCarry = Cat(entry.carry, entry.pftAddr)
411    fallThroughErr := startLower >= endLowerwithCarry || (endLowerwithCarry - startLower) > (PredictWidth+1).U
412    fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc))
413  }
414
415  def display(cond: Bool): Unit = {
416    XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n")
417  }
418}
419
420@chiselName
421class BranchPredictionBundle(implicit p: Parameters) extends XSBundle
422  with HasBPUConst with BPUUtils {
423  // def full_pred_info[T <: Data](x: T) = if (is_minimal) None else Some(x)
424  val pc = UInt(VAddrBits.W)
425
426  val valid = Bool()
427
428  val hasRedirect = Bool()
429  val ftq_idx = new FtqPtr
430  // val hit = Bool()
431  val is_minimal = Bool()
432  val minimal_pred = new MinimalBranchPrediction
433  val full_pred = new FullBranchPrediction
434
435
436  val folded_hist = new AllFoldedHistories(foldedGHistInfos)
437  val histPtr = new CGHPtr
438  val rasSp = UInt(log2Ceil(RasSize).W)
439  val rasTop = new RASEntry
440  // val specCnt = Vec(numBr, UInt(10.W))
441  // val meta = UInt(MaxMetaLength.W)
442
443  val ftb_entry = new FTBEntry()
444
445  def target(pc: UInt) = Mux(is_minimal, minimal_pred.target(pc),     full_pred.target(pc))
446  def cfiIndex         = Mux(is_minimal, minimal_pred.cfiIndex,       full_pred.cfiIndex)
447  def lastBrPosOH      = Mux(is_minimal, minimal_pred.lastBrPosOH,    full_pred.lastBrPosOH)
448  def brTaken          = Mux(is_minimal, minimal_pred.brTaken,        full_pred.brTaken)
449  def shouldShiftVec   = Mux(is_minimal, minimal_pred.shouldShiftVec, full_pred.shouldShiftVec)
450  def oversize         = Mux(is_minimal, minimal_pred.oversize,       full_pred.oversize)
451  def fallThruError    = Mux(is_minimal, minimal_pred.fallThruError,  full_pred.fallThruError)
452
453  def getTarget = target(pc)
454  def taken = cfiIndex.valid
455
456  def display(cond: Bool): Unit = {
457    XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n")
458    folded_hist.display(cond)
459    full_pred.display(cond)
460    ftb_entry.display(cond)
461  }
462}
463
464@chiselName
465class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst {
466  // val valids = Vec(3, Bool())
467  val s1 = new BranchPredictionBundle
468  val s2 = new BranchPredictionBundle
469  val s3 = new BranchPredictionBundle
470
471  def selectedResp ={
472    val res =
473      PriorityMux(Seq(
474        ((s3.valid && s3.hasRedirect) -> s3),
475        ((s2.valid && s2.hasRedirect) -> s2),
476        (s1.valid -> s1)
477      ))
478    // println("is minimal: ", res.is_minimal)
479    res
480  }
481  def selectedRespIdx =
482    PriorityMux(Seq(
483      ((s3.valid && s3.hasRedirect) -> BP_S3),
484      ((s2.valid && s2.hasRedirect) -> BP_S2),
485      (s1.valid -> BP_S1)
486    ))
487  def lastStage = s3
488}
489
490class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp with HasBPUConst {
491  val meta = UInt(MaxMetaLength.W)
492}
493
494object BpuToFtqBundle {
495  def apply(resp: BranchPredictionResp)(implicit p: Parameters): BpuToFtqBundle = {
496    val e = Wire(new BpuToFtqBundle())
497    e.s1 := resp.s1
498    e.s2 := resp.s2
499    e.s3 := resp.s3
500
501    e.meta := DontCare
502    e
503  }
504}
505
506class BranchPredictionUpdate(implicit p: Parameters) extends BranchPredictionBundle with HasBPUConst {
507  val mispred_mask = Vec(numBr+1, Bool())
508  val pred_hit = Bool()
509  val false_hit = Bool()
510  val new_br_insert_pos = Vec(numBr, Bool())
511  val old_entry = Bool()
512  val meta = UInt(MaxMetaLength.W)
513  val full_target = UInt(VAddrBits.W)
514  val from_stage = UInt(2.W)
515  val ghist = UInt(HistoryLength.W)
516
517  def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = {
518    folded_hist := entry.folded_hist
519    histPtr := entry.histPtr
520    rasSp := entry.rasSp
521    rasTop := entry.rasEntry
522    this
523  }
524
525  override def display(cond: Bool) = {
526    XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n")
527    XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n")
528    XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n")
529    super.display(cond)
530    XSDebug(cond, p"--------------------------------------------\n")
531  }
532}
533
534class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst {
535  // override def toPrintable: Printable = {
536  //   p"-----------BranchPredictionRedirect----------- " +
537  //     p"-----------cfiUpdate----------- " +
538  //     p"[pc] ${Hexadecimal(cfiUpdate.pc)} " +
539  //     p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " +
540  //     p"[target] ${Hexadecimal(cfiUpdate.target)} " +
541  //     p"------------------------------- " +
542  //     p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " +
543  //     p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " +
544  //     p"[ftqOffset] ${ftqOffset} " +
545  //     p"[level] ${level}, [interrupt] ${interrupt} " +
546  //     p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " +
547  //     p"[stFtqOffset] ${stFtqOffset} " +
548  //     p"\n"
549
550  // }
551
552  def display(cond: Bool): Unit = {
553    XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n")
554    XSDebug(cond, p"-----------cfiUpdate----------- \n")
555    XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n")
556    // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n")
557    XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n")
558    XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n")
559    XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n")
560    XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n")
561    XSDebug(cond, p"------------------------------- \n")
562    XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n")
563    XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n")
564    XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n")
565    XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n")
566    XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n")
567    XSDebug(cond, p"---------------------------------------------- \n")
568  }
569}
570