xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision eeb5ff92e228cc529156e0533d0f8c330c1d7bcb)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.experimental.chiselName
22import chisel3.util._
23import xiangshan._
24import utils._
25
26import scala.math.min
27
28trait HasBPUConst extends HasXSParameter with HasIFUConst {
29  val MaxMetaLength = 1024 // TODO: Reduce meta length
30  val MaxBasicBlockSize = 32
31  val LHistoryLength = 32
32  val numBr = 2
33  val useBPD = true
34  val useLHist = true
35  val shareTailSlot = true
36  val numBrSlot = if (shareTailSlot) numBr-1 else numBr
37  val totalSlot = numBrSlot + 1
38
39  def BP_S1 = 1.U(2.W)
40  def BP_S2 = 2.U(2.W)
41  def BP_S3 = 3.U(2.W)
42
43
44  val debug = true
45  val resetVector = 0x80000000L//TODO: set reset vec
46  // TODO: Replace log2Up by log2Ceil
47}
48
49trait HasBPUParameter extends HasXSParameter with HasBPUConst {
50  val BPUDebug = true && !env.FPGAPlatform && env.EnablePerfDebug
51  val EnableCFICommitLog = true
52  val EnbaleCFIPredLog = true
53  val EnableBPUTimeRecord = (EnableCFICommitLog || EnbaleCFIPredLog) && !env.FPGAPlatform
54  val EnableCommit = false
55}
56
57class BPUCtrl(implicit p: Parameters) extends XSBundle {
58  val ubtb_enable = Bool()
59  val btb_enable  = Bool()
60  val bim_enable  = Bool()
61  val tage_enable = Bool()
62  val sc_enable   = Bool()
63  val ras_enable  = Bool()
64  val loop_enable = Bool()
65}
66
67trait BPUUtils extends HasXSParameter {
68  // circular shifting
69  def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = {
70    val res = Wire(UInt(len.W))
71    val higher = source << shamt
72    val lower = source >> (len.U - shamt)
73    res := higher | lower
74    res
75  }
76
77  def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = {
78    val res = Wire(UInt(len.W))
79    val higher = source << (len.U - shamt)
80    val lower = source >> shamt
81    res := higher | lower
82    res
83  }
84
85  // To be verified
86  def satUpdate(old: UInt, len: Int, taken: Bool): UInt = {
87    val oldSatTaken = old === ((1 << len)-1).U
88    val oldSatNotTaken = old === 0.U
89    Mux(oldSatTaken && taken, ((1 << len)-1).U,
90      Mux(oldSatNotTaken && !taken, 0.U,
91        Mux(taken, old + 1.U, old - 1.U)))
92  }
93
94  def signedSatUpdate(old: SInt, len: Int, taken: Bool): SInt = {
95    val oldSatTaken = old === ((1 << (len-1))-1).S
96    val oldSatNotTaken = old === (-(1 << (len-1))).S
97    Mux(oldSatTaken && taken, ((1 << (len-1))-1).S,
98      Mux(oldSatNotTaken && !taken, (-(1 << (len-1))).S,
99        Mux(taken, old + 1.S, old - 1.S)))
100  }
101
102  def getFallThroughAddr(start: UInt, carry: Bool, pft: UInt) = {
103    val higher = start.head(VAddrBits-log2Ceil(PredictWidth)-instOffsetBits-1)
104    Cat(Mux(carry, higher+1.U, higher), pft, 0.U(instOffsetBits.W))
105  }
106
107  def foldTag(tag: UInt, l: Int): UInt = {
108    val nChunks = (tag.getWidth + l - 1) / l
109    val chunks = (0 until nChunks).map { i =>
110      tag(min((i+1)*l, tag.getWidth)-1, i*l)
111    }
112    ParallelXOR(chunks)
113  }
114}
115
116// class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst {
117//   val pc = UInt(VAddrBits.W)
118//   val br_offset = Vec(num_br, UInt(log2Up(MaxBasicBlockSize).W))
119//   val br_mask = Vec(MaxBasicBlockSize, Bool())
120//
121//   val jmp_valid = Bool()
122//   val jmp_type = UInt(3.W)
123//
124//   val is_NextMask = Vec(FetchWidth*2, Bool())
125//
126//   val cfi_idx = Valid(UInt(log2Ceil(MaxBasicBlockSize).W))
127//   val cfi_mispredict = Bool()
128//   val cfi_is_br = Bool()
129//   val cfi_is_jal = Bool()
130//   val cfi_is_jalr = Bool()
131//
132//   val ghist = new GlobalHistory()
133//
134//   val target = UInt(VAddrBits.W)
135//
136//   val meta = UInt(MaxMetaLength.W)
137//   val spec_meta = UInt(MaxMetaLength.W)
138//
139//   def taken = cfi_idx.valid
140// }
141
142class BasePredictorInput (implicit p: Parameters) extends XSBundle with HasBPUConst {
143  def nInputs = 1
144
145  val s0_pc = UInt(VAddrBits.W)
146
147  val ghist = UInt(HistoryLength.W)
148  val phist = UInt(PathHistoryLength.W)
149
150  val resp_in = Vec(nInputs, new BranchPredictionResp)
151  // val toFtq_fire = Bool()
152
153  // val s0_all_ready = Bool()
154}
155
156class BasePredictorOutput (implicit p: Parameters) extends XSBundle with HasBPUConst {
157  val s3_meta = UInt(MaxMetaLength.W) // This is use by composer
158  val resp = new BranchPredictionResp
159
160  // These store in meta, extract in composer
161  // val rasSp = UInt(log2Ceil(RasSize).W)
162  // val rasTop = new RASEntry
163  // val specCnt = Vec(PredictWidth, UInt(10.W))
164}
165
166class BasePredictorIO (implicit p: Parameters) extends XSBundle with HasBPUConst {
167  val in  = Flipped(DecoupledIO(new BasePredictorInput)) // TODO: Remove DecoupledIO
168  // val out = DecoupledIO(new BasePredictorOutput)
169  val out = Output(new BasePredictorOutput)
170  // val flush_out = Valid(UInt(VAddrBits.W))
171
172  // val ctrl = Input(new BPUCtrl())
173
174  val s0_fire = Input(Bool())
175  val s1_fire = Input(Bool())
176  val s2_fire = Input(Bool())
177  val s3_fire = Input(Bool())
178
179  val s1_ready = Output(Bool())
180  val s2_ready = Output(Bool())
181  val s3_ready = Output(Bool())
182
183  val update = Flipped(Valid(new BranchPredictionUpdate))
184  val redirect = Flipped(Valid(new BranchPredictionRedirect))
185}
186
187abstract class BasePredictor(implicit p: Parameters) extends XSModule with HasBPUConst with BPUUtils {
188  val meta_size = 0
189  val spec_meta_size = 0
190
191  val io = IO(new BasePredictorIO())
192
193  io.out.resp := io.in.bits.resp_in(0)
194
195  io.out.s3_meta := 0.U
196
197  io.in.ready := !io.redirect.valid
198
199  io.s1_ready := true.B
200  io.s2_ready := true.B
201  io.s3_ready := true.B
202
203  val s0_pc       = WireInit(io.in.bits.s0_pc) // fetchIdx(io.f0_pc)
204  val s1_pc       = RegEnable(s0_pc, resetVector.U, io.s0_fire)
205  val s2_pc       = RegEnable(s1_pc, io.s1_fire)
206  val s3_pc       = RegEnable(s2_pc, io.s2_fire)
207}
208
209class FakePredictor(implicit p: Parameters) extends BasePredictor {
210  io.in.ready                 := true.B
211  io.out.s3_meta              := 0.U
212  io.out.resp := io.in.bits.resp_in(0)
213}
214
215class BpuToFtqIO(implicit p: Parameters) extends XSBundle {
216  val resp = DecoupledIO(new BpuToFtqBundle())
217}
218
219class PredictorIO(implicit p: Parameters) extends XSBundle {
220  val bpu_to_ftq = new BpuToFtqIO()
221  val ftq_to_bpu = Flipped(new FtqToBpuIO())
222}
223
224class FakeBPU(implicit p: Parameters) extends XSModule with HasBPUConst {
225  val io = IO(new PredictorIO)
226
227  val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready
228
229  val s0_pc = RegInit(resetVector.U)
230
231  when(toFtq_fire) {
232    s0_pc := s0_pc + (FetchWidth*4).U
233  }
234
235  when (io.ftq_to_bpu.redirect.valid) {
236    s0_pc := io.ftq_to_bpu.redirect.bits.cfiUpdate.target
237  }
238
239  io.bpu_to_ftq.resp.valid := !reset.asBool() && !io.ftq_to_bpu.redirect.valid
240
241  io.bpu_to_ftq.resp.bits := 0.U.asTypeOf(new BranchPredictionBundle)
242  io.bpu_to_ftq.resp.bits.s1.pc := s0_pc
243  io.bpu_to_ftq.resp.bits.s1.ftb_entry.pftAddr := s0_pc + (FetchWidth*4).U
244}
245
246@chiselName
247class Predictor(implicit p: Parameters) extends XSModule with HasBPUConst {
248  val io = IO(new PredictorIO)
249
250  val predictors = Module(if (useBPD) new Composer else new FakePredictor)
251
252  val s0_fire, s1_fire, s2_fire, s3_fire = Wire(Bool())
253  val s1_valid, s2_valid, s3_valid = RegInit(false.B)
254  val s1_ready, s2_ready, s3_ready = Wire(Bool())
255  val s1_components_ready, s2_components_ready, s3_components_ready = Wire(Bool())
256
257  val s0_pc = WireInit(resetVector.U)
258  val s0_pc_reg = RegNext(s0_pc, init=resetVector.U)
259  val s1_pc = RegEnable(s0_pc, s0_fire)
260  val s2_pc = RegEnable(s1_pc, s1_fire)
261  val s3_pc = RegEnable(s2_pc, s2_fire)
262
263  val s0_ghist = WireInit(0.U.asTypeOf(new GlobalHistory))
264  val s0_ghist_reg = RegNext(s0_ghist, init=0.U.asTypeOf(new GlobalHistory))
265  val s1_ghist = RegEnable(s0_ghist, 0.U.asTypeOf(new GlobalHistory), s0_fire)
266  val s2_ghist = RegEnable(s1_ghist, 0.U.asTypeOf(new GlobalHistory), s1_fire)
267  val s3_ghist = RegEnable(s2_ghist, 0.U.asTypeOf(new GlobalHistory), s2_fire)
268
269  val s0_phist = WireInit(0.U(PathHistoryLength.W))
270  val s0_phist_reg = RegNext(s0_phist, init=0.U(PathHistoryLength.W))
271  val s1_phist = RegEnable(s0_phist, 0.U, s0_fire)
272  val s2_phist = RegEnable(s1_phist, 0.U, s1_fire)
273  val s3_phist = RegEnable(s2_phist, 0.U, s2_fire)
274
275  val resp = predictors.io.out.resp
276
277
278  val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready
279
280  when(RegNext(reset.asBool) && !reset.asBool) {
281    s0_ghist := 0.U.asTypeOf(new GlobalHistory)
282    s0_phist := 0.U
283    s0_pc := resetVector.U
284  }
285
286  // when(toFtq_fire) {
287    // final_gh := s3_gh.update(io.bpu_to_ftq.resp.bits.ftb_entry.brValids.reduce(_||_) && !io.bpu_to_ftq.resp.bits.preds.taken,
288    //   io.bpu_to_ftq.resp.bits.preds.taken)
289  // }
290
291  val s1_flush, s2_flush, s3_flush = Wire(Bool())
292  val s2_redirect, s3_redirect = Wire(Bool())
293
294  // val s1_bp_resp = predictors.io.out.resp.s1
295  // val s2_bp_resp = predictors.io.out.resp.s2
296  // val s3_bp_resp = predictors.io.out.resp.s3
297
298  // predictors.io := DontCare
299  predictors.io.in.valid := s0_fire
300  predictors.io.in.bits.s0_pc := s0_pc
301  predictors.io.in.bits.ghist := s0_ghist.predHist
302  predictors.io.in.bits.phist := s0_phist
303  predictors.io.in.bits.resp_in(0) := (0.U).asTypeOf(new BranchPredictionResp)
304  // predictors.io.in.bits.resp_in(0).s1.pc := s0_pc
305  // predictors.io.in.bits.toFtq_fire := toFtq_fire
306
307  // predictors.io.out.ready := io.bpu_to_ftq.resp.ready
308
309  // Pipeline logic
310  s2_redirect := false.B
311  s3_redirect := false.B
312
313  s3_flush := io.ftq_to_bpu.redirect.valid
314  s2_flush := s3_flush || s3_redirect
315  s1_flush := s2_flush || s2_redirect
316
317  s1_components_ready := predictors.io.s1_ready
318  s1_ready := s1_fire || !s1_valid
319  s0_fire := !reset.asBool && s1_components_ready && s1_ready
320  predictors.io.s0_fire := s0_fire
321
322  s2_components_ready := predictors.io.s2_ready
323  s2_ready := s2_fire || !s2_valid
324  s1_fire := s1_valid && s2_components_ready && s2_ready && io.bpu_to_ftq.resp.ready
325
326  when(s0_fire)         { s1_valid := true.B  }
327    .elsewhen(s1_flush) { s1_valid := false.B }
328    .elsewhen(s1_fire)  { s1_valid := false.B }
329
330  predictors.io.s1_fire := s1_fire
331
332  s3_components_ready := predictors.io.s3_ready
333  s3_ready := s3_fire || !s3_valid
334  s2_fire := s2_valid && s3_components_ready && s3_ready
335
336  when(s2_flush)                    { s2_valid := false.B }
337    .elsewhen(s1_fire && !s1_flush) { s2_valid := true.B  }
338    .elsewhen(s2_fire)              { s2_valid := false.B }
339
340  predictors.io.s2_fire := s2_fire
341
342  // s3_fire := s3_valid && io.bpu_to_ftq.resp.ready
343  s3_fire := s3_valid
344
345  when(s3_flush)                    { s3_valid := false.B }
346    .elsewhen(s2_fire && !s2_flush) { s3_valid := true.B  }
347    .elsewhen(s3_fire)              { s3_valid := false.B }
348
349  predictors.io.s3_fire := s3_fire
350
351  io.bpu_to_ftq.resp.valid :=
352    s1_valid && s2_components_ready && s2_ready ||
353    s2_fire && s2_redirect ||
354    s3_fire && s3_redirect
355  io.bpu_to_ftq.resp.bits  := BpuToFtqBundle(predictors.io.out.resp)
356  io.bpu_to_ftq.resp.bits.meta  := predictors.io.out.s3_meta
357  io.bpu_to_ftq.resp.bits.s3.ghist  := s3_ghist
358  io.bpu_to_ftq.resp.bits.s3.phist  := s3_phist
359
360  s0_pc := s0_pc_reg
361  s0_ghist := s0_ghist_reg
362  s0_phist := s0_phist_reg
363
364  // History manage
365  // s1
366  val s1_predicted_ghist = s1_ghist.update(resp.s1.preds.br_valids, resp.s1.real_br_taken_mask())
367
368  XSDebug(p"[hit] ${resp.s1.preds.hit} [s1_real_br_taken_mask] ${Binary(resp.s1.real_br_taken_mask.asUInt)}\n")
369  XSDebug(p"s1_predicted_ghist=${Binary(s1_predicted_ghist.predHist)}\n")
370
371  when(s1_valid) {
372    s0_pc := resp.s1.target
373    s0_ghist := s1_predicted_ghist
374    s0_phist := (s1_phist << 1) | s1_pc(instOffsetBits)
375  }
376
377  // s2
378  val s2_predicted_ghist = s2_ghist.update(resp.s2.preds.br_valids, resp.s2.real_br_taken_mask())
379
380  val s2_correct_s1_ghist = s1_ghist =/= s2_predicted_ghist
381  val s2_correct_s0_ghist_reg = s0_ghist_reg =/= s2_predicted_ghist
382
383  val previous_s1_pred_taken = RegEnable(resp.s1.real_slot_taken_mask.asUInt.orR, init=false.B, enable=s1_fire)
384  val s2_pred_taken = resp.s2.real_slot_taken_mask.asUInt.orR
385
386  when(s2_fire) {
387    when((s1_valid && (s1_pc =/= resp.s2.target || s2_correct_s1_ghist)) ||
388      !s1_valid && (s0_pc_reg =/= resp.s2.target || s2_correct_s0_ghist_reg) ||
389      previous_s1_pred_taken =/= s2_pred_taken) {
390      s0_ghist := s2_predicted_ghist
391      s2_redirect := true.B
392      s0_pc := resp.s2.target
393      s0_phist := (s2_phist << 1) | s2_pc(instOffsetBits)
394      XSDebug(p"s1_valid=$s1_valid, s1_pc=${Hexadecimal(s1_pc)}, s2_resp_target=${Hexadecimal(resp.s2.target)}\n")
395      XSDebug(p"s2_correct_s1_ghist=$s2_correct_s1_ghist\n")
396      XSDebug(p"s1_ghist=${Binary(s1_ghist.predHist)}\n")
397      XSDebug(p"s2_predicted_ghist=${Binary(s2_predicted_ghist.predHist)}\n")
398    }
399  }
400
401  val s2_redirect_target = s2_fire && s1_valid && s1_pc =/= resp.s2.target
402  val s2_saw_s1_hit = RegEnable(resp.s1.preds.hit, s1_fire)
403  val s2_redirect_target_both_hit = s2_redirect_target &&  s2_saw_s1_hit &&  resp.s2.preds.hit
404
405  XSPerfAccumulate("s2_redirect_because_s1_not_valid", s2_fire && !s1_valid)
406  XSPerfAccumulate("s2_redirect_because_target_diff", s2_fire && s1_valid && s1_pc =/= resp.s2.target)
407  XSPerfAccumulate("s2_redirect_target_diff_s1_nhit_s2_hit", s2_redirect_target && !s2_saw_s1_hit &&  resp.s2.preds.hit)
408  XSPerfAccumulate("s2_redirect_target_diff_s1_hit_s2_nhit", s2_redirect_target &&  s2_saw_s1_hit && !resp.s2.preds.hit)
409  XSPerfAccumulate("s2_redirect_target_diff_both_hit",  s2_redirect_target &&  s2_saw_s1_hit &&  resp.s2.preds.hit)
410  XSPerfAccumulate("s2_redirect_br_direction_diff",
411    s2_redirect_target_both_hit &&
412    RegEnable(PriorityEncoder(resp.s1.preds.br_taken_mask), s1_fire) =/= PriorityEncoder(resp.s2.preds.br_taken_mask))
413  XSPerfAccumulate("s2_redirect_because_ghist_diff", s2_fire && s1_valid && s2_correct_s1_ghist)
414
415  // s3
416  val s3_predicted_ghist = s3_ghist.update(resp.s3.preds.br_valids, resp.s3.real_br_taken_mask())
417
418  val s3_correct_s2_ghist = s2_ghist =/= s3_predicted_ghist
419  val s3_correct_s1_ghist = s1_ghist =/= s3_predicted_ghist
420  val s3_correct_s0_ghist_reg = s0_ghist_reg =/= s3_predicted_ghist
421
422  val previous_s2_pred_taken = RegEnable(resp.s2.real_slot_taken_mask.asUInt.orR, init=false.B, enable=s2_fire)
423  val s3_pred_taken = resp.s3.real_slot_taken_mask.asUInt.orR
424
425  when(s3_fire) {
426    when((s2_valid && (s2_pc =/= resp.s3.target || s3_correct_s2_ghist)) ||
427      (!s2_valid && s1_valid && (s1_pc =/= resp.s3.target || s3_correct_s1_ghist)) ||
428      (!s2_valid && !s1_valid && (s0_pc_reg =/= resp.s3.target || s3_correct_s0_ghist_reg)) ||
429      previous_s2_pred_taken =/= s3_pred_taken) {
430
431      s0_ghist := s3_predicted_ghist
432      s3_redirect := true.B
433      s0_pc := resp.s3.target
434      s0_phist := (s3_phist << 1) | s3_pc(instOffsetBits)
435    }
436  }
437
438  // Send signal tell Ftq override
439  val s2_ftq_idx = RegEnable(io.ftq_to_bpu.enq_ptr, s1_fire)
440  val s3_ftq_idx = RegEnable(s2_ftq_idx, s2_fire)
441
442  io.bpu_to_ftq.resp.bits.s1.valid := s1_fire && !s1_flush
443  io.bpu_to_ftq.resp.bits.s1.hasRedirect := false.B
444  io.bpu_to_ftq.resp.bits.s1.ftq_idx := DontCare
445  io.bpu_to_ftq.resp.bits.s2.valid := s2_fire && !s2_flush
446  io.bpu_to_ftq.resp.bits.s2.hasRedirect := s2_redirect
447  io.bpu_to_ftq.resp.bits.s2.ftq_idx := s2_ftq_idx
448  io.bpu_to_ftq.resp.bits.s3.valid := s3_fire && !s3_flush
449  io.bpu_to_ftq.resp.bits.s3.hasRedirect := s3_redirect
450  io.bpu_to_ftq.resp.bits.s3.ftq_idx := s3_ftq_idx
451
452  val redirect = io.ftq_to_bpu.redirect.bits
453
454  predictors.io.update := io.ftq_to_bpu.update
455  predictors.io.redirect := io.ftq_to_bpu.redirect
456
457  when(io.ftq_to_bpu.redirect.valid) {
458    val oldGh = redirect.cfiUpdate.hist
459
460    val shift = redirect.cfiUpdate.shift
461    val addIntoHist = redirect.cfiUpdate.addIntoHist
462
463    val isBr = redirect.cfiUpdate.pd.isBr
464    val taken = redirect.cfiUpdate.taken
465
466    val updatedGh = oldGh.update(shift, taken && addIntoHist)
467    s0_ghist := updatedGh // TODO: History fix logic
468    s0_pc := redirect.cfiUpdate.target
469    val oldPh = redirect.cfiUpdate.phist
470    val phNewBit = redirect.cfiUpdate.phNewBit
471    s0_phist := (oldPh << 1) | phNewBit
472
473    XSDebug(io.ftq_to_bpu.redirect.valid, p"-------------redirect Repair------------\n")
474    // XSDebug(io.ftq_to_bpu.redirect.valid, p"taken_mask=${Binary(taken_mask.asUInt)}, brValids=${Binary(brValids.asUInt)}\n")
475    XSDebug(io.ftq_to_bpu.redirect.valid, p"isBr: ${isBr}, taken: ${taken}, addIntoHist: ${addIntoHist}, shift: ${shift}\n")
476    XSDebug(io.ftq_to_bpu.redirect.valid, p"oldGh   =${Binary(oldGh.predHist)}\n")
477    XSDebug(io.ftq_to_bpu.redirect.valid, p"updateGh=${Binary(updatedGh.predHist)}\n")
478
479  }
480
481  XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
482  XSDebug(io.ftq_to_bpu.update.valid, p"Update from ftq\n")
483  XSDebug(io.ftq_to_bpu.redirect.valid, p"Redirect from ftq\n")
484
485  XSDebug("[BP0]                 fire=%d                      pc=%x\n", s0_fire, s0_pc)
486  XSDebug("[BP1] v=%d r=%d cr=%d fire=%d             flush=%d pc=%x\n",
487    s1_valid, s1_ready, s1_components_ready, s1_fire, s1_flush, s1_pc)
488  XSDebug("[BP2] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n",
489  s2_valid, s2_ready, s2_components_ready, s2_fire, s2_redirect, s2_flush, s2_pc)
490  XSDebug("[BP3] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n",
491  s3_valid, s3_ready, s3_components_ready, s3_fire, s3_redirect, s3_flush, s3_pc)
492  XSDebug("[FTQ] ready=%d\n", io.bpu_to_ftq.resp.ready)
493  XSDebug("resp.s1.target=%x\n", resp.s1.target)
494  XSDebug("resp.s2.target=%x\n", resp.s2.target)
495  XSDebug("s0_ghist: %b\n", s0_ghist.predHist)
496  XSDebug("s1_ghist: %b\n", s1_ghist.predHist)
497  XSDebug("s2_ghist: %b\n", s2_ghist.predHist)
498  XSDebug("s3_ghist: %b\n", s3_ghist.predHist)
499  XSDebug("s2_predicted_ghist: %b\n", s2_predicted_ghist.predHist)
500  XSDebug("s3_predicted_ghist: %b\n", s3_predicted_ghist.predHist)
501  XSDebug("s3_correct_s2_ghist: %b, s3_correct_s1_ghist: %b, s2_correct_s1_ghist: %b\n",
502  s3_correct_s2_ghist,  s3_correct_s1_ghist,  s2_correct_s1_ghist)
503
504
505  io.ftq_to_bpu.update.bits.display(io.ftq_to_bpu.update.valid)
506  io.ftq_to_bpu.redirect.bits.display(io.ftq_to_bpu.redirect.valid)
507
508
509  XSPerfAccumulate("s2_redirect", s2_redirect)
510  XSPerfAccumulate("s3_redirect", s3_redirect)
511
512}
513