xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision b30c10d68f6c89b2a5fe6a41bcfed69865117e9e)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.experimental.chiselName
22import chisel3.util._
23import xiangshan._
24import utils._
25
26import scala.math.min
27
28trait HasBPUConst extends HasXSParameter {
29  val MaxMetaLength = 512 // TODO: Reduce meta length
30  val MaxBasicBlockSize = 32
31  val LHistoryLength = 32
32  // val numBr = 2
33  val useBPD = true
34  val useLHist = true
35  val shareTailSlot = true
36  val numBrSlot = if (shareTailSlot) numBr-1 else numBr
37  val totalSlot = numBrSlot + 1
38
39  def BP_STAGES = (0 until 2).map(_.U(2.W))
40  def BP_S1 = BP_STAGES(0)
41  def BP_S2 = BP_STAGES(1)
42  // def BP_S3 = BP_STAGES(2)
43  val numBpStages = BP_STAGES.length
44
45  val debug = true
46  val resetVector = 0x10000000L//TODO: set reset vec
47  // TODO: Replace log2Up by log2Ceil
48}
49
50trait HasBPUParameter extends HasXSParameter with HasBPUConst {
51  val BPUDebug = true && !env.FPGAPlatform && env.EnablePerfDebug
52  val EnableCFICommitLog = true
53  val EnbaleCFIPredLog = true
54  val EnableBPUTimeRecord = (EnableCFICommitLog || EnbaleCFIPredLog) && !env.FPGAPlatform
55  val EnableCommit = false
56}
57
58class BPUCtrl(implicit p: Parameters) extends XSBundle {
59  val ubtb_enable = Bool()
60  val btb_enable  = Bool()
61  val bim_enable  = Bool()
62  val tage_enable = Bool()
63  val sc_enable   = Bool()
64  val ras_enable  = Bool()
65  val loop_enable = Bool()
66}
67
68trait BPUUtils extends HasXSParameter {
69  // circular shifting
70  def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = {
71    val res = Wire(UInt(len.W))
72    val higher = source << shamt
73    val lower = source >> (len.U - shamt)
74    res := higher | lower
75    res
76  }
77
78  def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = {
79    val res = Wire(UInt(len.W))
80    val higher = source << (len.U - shamt)
81    val lower = source >> shamt
82    res := higher | lower
83    res
84  }
85
86  // To be verified
87  def satUpdate(old: UInt, len: Int, taken: Bool): UInt = {
88    val oldSatTaken = old === ((1 << len)-1).U
89    val oldSatNotTaken = old === 0.U
90    Mux(oldSatTaken && taken, ((1 << len)-1).U,
91      Mux(oldSatNotTaken && !taken, 0.U,
92        Mux(taken, old + 1.U, old - 1.U)))
93  }
94
95  def signedSatUpdate(old: SInt, len: Int, taken: Bool): SInt = {
96    val oldSatTaken = old === ((1 << (len-1))-1).S
97    val oldSatNotTaken = old === (-(1 << (len-1))).S
98    Mux(oldSatTaken && taken, ((1 << (len-1))-1).S,
99      Mux(oldSatNotTaken && !taken, (-(1 << (len-1))).S,
100        Mux(taken, old + 1.S, old - 1.S)))
101  }
102
103  def getFallThroughAddr(start: UInt, carry: Bool, pft: UInt) = {
104    val higher = start.head(VAddrBits-log2Ceil(PredictWidth)-instOffsetBits-1)
105    Cat(Mux(carry, higher+1.U, higher), pft, 0.U(instOffsetBits.W))
106  }
107
108  def foldTag(tag: UInt, l: Int): UInt = {
109    val nChunks = (tag.getWidth + l - 1) / l
110    val chunks = (0 until nChunks).map { i =>
111      tag(min((i+1)*l, tag.getWidth)-1, i*l)
112    }
113    ParallelXOR(chunks)
114  }
115}
116
117// class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst {
118//   val pc = UInt(VAddrBits.W)
119//   val br_offset = Vec(num_br, UInt(log2Up(MaxBasicBlockSize).W))
120//   val br_mask = Vec(MaxBasicBlockSize, Bool())
121//
122//   val jmp_valid = Bool()
123//   val jmp_type = UInt(3.W)
124//
125//   val is_NextMask = Vec(FetchWidth*2, Bool())
126//
127//   val cfi_idx = Valid(UInt(log2Ceil(MaxBasicBlockSize).W))
128//   val cfi_mispredict = Bool()
129//   val cfi_is_br = Bool()
130//   val cfi_is_jal = Bool()
131//   val cfi_is_jalr = Bool()
132//
133//   val ghist = new ShiftingGlobalHistory()
134//
135//   val target = UInt(VAddrBits.W)
136//
137//   val meta = UInt(MaxMetaLength.W)
138//   val spec_meta = UInt(MaxMetaLength.W)
139//
140//   def taken = cfi_idx.valid
141// }
142
143class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst {
144  val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)})
145  // println(gen.mkString)
146  require(gen.toSet.toList.equals(gen))
147  def getHistWithInfo(info: Tuple2[Int, Int]) = {
148    val selected = hist.filter(_.info.equals(info))
149    require(selected.length == 1)
150    selected(0)
151  }
152  def autoConnectFrom(that: AllFoldedHistories) = {
153    require(this.hist.length <= that.hist.length)
154    for (h <- this.hist) {
155      h := that.getHistWithInfo(h.info)
156    }
157  }
158  def update(ghr: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = {
159    val res = WireInit(this)
160    for (i <- 0 until this.hist.length) {
161      res.hist(i) := this.hist(i).update(ghr, ptr, shift, taken)
162    }
163    res
164  }
165  // def update(ghr: Vec[Bool], ptr: CGHPtr, br_valids: Vec[Bool], br_takens: Vec[Bool]): AllFoldedHistories = {
166  //   val last_valid_idx = PriorityMux(
167  //     br_valids.reverse :+ true.B,
168  //     (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W))
169  //   )
170  //   val first_taken_idx = PriorityEncoder(false.B +: br_takens)
171  //   val smaller = Mux(last_valid_idx < first_taken_idx,
172  //     last_valid_idx,
173  //     first_taken_idx
174  //   )
175  //   val shift = smaller
176  //   val taken = br_takens.reduce(_||_)
177  //   update(ghr, ptr, shift, taken)
178  // }
179  // def update(ghr: Vec[Bool], ptr: CGHPtr, resp: BranchPredictionBundle): AllFoldedHistories = {
180  //   update(ghr, ptr, resp.preds.br_valids, resp.real_br_taken_mask)
181  // }
182  def display(cond: Bool) = {
183    for (h <- hist) {
184      XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n")
185    }
186  }
187}
188
189class BasePredictorInput (implicit p: Parameters) extends XSBundle with HasBPUConst {
190  def nInputs = 1
191
192  val s0_pc = UInt(VAddrBits.W)
193
194  val folded_hist = new AllFoldedHistories(foldedGHistInfos)
195  val phist = UInt(PathHistoryLength.W)
196
197  val resp_in = Vec(nInputs, new BranchPredictionResp)
198
199  // val final_preds = Vec(numBpStages, new)
200  // val toFtq_fire = Bool()
201
202  // val s0_all_ready = Bool()
203}
204
205class BasePredictorOutput (implicit p: Parameters) extends XSBundle with HasBPUConst {
206  val last_stage_meta = UInt(MaxMetaLength.W) // This is use by composer
207  val resp = new BranchPredictionResp
208
209  // These store in meta, extract in composer
210  // val rasSp = UInt(log2Ceil(RasSize).W)
211  // val rasTop = new RASEntry
212  // val specCnt = Vec(PredictWidth, UInt(10.W))
213}
214
215class BasePredictorIO (implicit p: Parameters) extends XSBundle with HasBPUConst {
216  val in  = Flipped(DecoupledIO(new BasePredictorInput)) // TODO: Remove DecoupledIO
217  // val out = DecoupledIO(new BasePredictorOutput)
218  val out = Output(new BasePredictorOutput)
219  // val flush_out = Valid(UInt(VAddrBits.W))
220
221  // val ctrl = Input(new BPUCtrl())
222
223  val s0_fire = Input(Bool())
224  val s1_fire = Input(Bool())
225  val s2_fire = Input(Bool())
226
227  val s1_ready = Output(Bool())
228  val s2_ready = Output(Bool())
229
230  val update = Flipped(Valid(new BranchPredictionUpdate))
231  val redirect = Flipped(Valid(new BranchPredictionRedirect))
232}
233
234abstract class BasePredictor(implicit p: Parameters) extends XSModule with HasBPUConst with BPUUtils {
235  val meta_size = 0
236  val spec_meta_size = 0
237  val io = IO(new BasePredictorIO())
238
239  io.out.resp := io.in.bits.resp_in(0)
240
241  io.out.last_stage_meta := 0.U
242
243  io.in.ready := !io.redirect.valid
244
245  io.s1_ready := true.B
246  io.s2_ready := true.B
247
248  val s0_pc       = WireInit(io.in.bits.s0_pc) // fetchIdx(io.f0_pc)
249  val s1_pc       = RegEnable(s0_pc, resetVector.U, io.s0_fire)
250  val s2_pc       = RegEnable(s1_pc, io.s1_fire)
251
252
253  def getFoldedHistoryInfo: Option[Set[FoldedHistoryInfo]] = None
254}
255
256class FakePredictor(implicit p: Parameters) extends BasePredictor {
257  io.in.ready                 := true.B
258  io.out.last_stage_meta              := 0.U
259  io.out.resp := io.in.bits.resp_in(0)
260}
261
262class BpuToFtqIO(implicit p: Parameters) extends XSBundle {
263  val resp = DecoupledIO(new BpuToFtqBundle())
264}
265
266class PredictorIO(implicit p: Parameters) extends XSBundle {
267  val bpu_to_ftq = new BpuToFtqIO()
268  val ftq_to_bpu = Flipped(new FtqToBpuIO())
269}
270
271class FakeBPU(implicit p: Parameters) extends XSModule with HasBPUConst {
272  val io = IO(new PredictorIO)
273
274  val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready
275
276  val s0_pc = RegInit(resetVector.U)
277
278  when(toFtq_fire) {
279    s0_pc := s0_pc + (FetchWidth*4).U
280  }
281
282  when (io.ftq_to_bpu.redirect.valid) {
283    s0_pc := io.ftq_to_bpu.redirect.bits.cfiUpdate.target
284  }
285
286  io.bpu_to_ftq.resp.valid := !reset.asBool() && !io.ftq_to_bpu.redirect.valid
287
288  io.bpu_to_ftq.resp.bits := 0.U.asTypeOf(new BranchPredictionBundle)
289  io.bpu_to_ftq.resp.bits.s1.pc := s0_pc
290  io.bpu_to_ftq.resp.bits.s1.ftb_entry.pftAddr := s0_pc + (FetchWidth*4).U
291}
292
293@chiselName
294class Predictor(implicit p: Parameters) extends XSModule with HasBPUConst {
295  val io = IO(new PredictorIO)
296
297  val predictors = Module(if (useBPD) new Composer else new FakePredictor)
298
299  val folded_hist_infos = predictors.getFoldedHistoryInfo.getOrElse(Set()).toList
300  for ((len, compLen) <- folded_hist_infos) {
301    println(f"folded hist info: len $len, compLen $compLen")
302  }
303
304  val s0_fire, s1_fire, s2_fire = Wire(Bool())
305  val s1_valid, s2_valid = RegInit(false.B)
306  val s1_ready, s2_ready = Wire(Bool())
307  val s1_components_ready, s2_components_ready = Wire(Bool())
308
309  val s0_pc = WireInit(resetVector.U)
310  val s0_pc_reg = RegNext(s0_pc, init=resetVector.U)
311  val s1_pc = RegEnable(s0_pc, s0_fire)
312  val s2_pc = RegEnable(s1_pc, s1_fire)
313
314  val s0_folded_gh = Wire(new AllFoldedHistories(foldedGHistInfos))
315  val s0_folded_gh_reg = RegNext(s0_folded_gh, init=0.U.asTypeOf(s0_folded_gh))
316  val s1_folded_gh = RegEnable(s0_folded_gh, 0.U.asTypeOf(s0_folded_gh), s0_fire)
317  val s2_folded_gh = RegEnable(s1_folded_gh, 0.U.asTypeOf(s0_folded_gh), s1_fire)
318
319  val npcGen   = new PhyPriorityMuxGenerator[UInt]
320  val foldedGhGen = new PhyPriorityMuxGenerator[AllFoldedHistories]
321  val ghistPtrGen = new PhyPriorityMuxGenerator[CGHPtr]
322  val phistGen = new PhyPriorityMuxGenerator[UInt]
323  val lastPredGen = new PhyPriorityMuxGenerator[BranchPredictionBundle]
324  val ghrBitWriteGens = Seq.tabulate(HistoryLength)(n => new PhyPriorityMuxGenerator[Bool])
325
326  val ghr = RegInit(0.U.asTypeOf(Vec(HistoryLength, Bool())))
327  val ghr_wire = WireInit(ghr)
328
329  val ghr_write_datas = Wire(Vec(HistoryLength, Bool()))
330  val ghr_wens = Wire(Vec(HistoryLength, Bool()))
331
332  val s0_ghist_ptr = Wire(new CGHPtr)
333  val s0_ghist_ptr_reg = RegNext(s0_ghist_ptr, init=0.U.asTypeOf(new CGHPtr))
334  val s1_ghist_ptr = RegEnable(s0_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s0_fire)
335  val s2_ghist_ptr = RegEnable(s1_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s1_fire)
336
337  val s0_last_pred = Wire(new BranchPredictionBundle)
338  val s0_last_pred_reg = RegNext(s0_last_pred, init=0.U.asTypeOf(new BranchPredictionBundle))
339  val s1_last_pred = RegEnable(s0_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s0_fire)
340  val s2_last_pred = RegEnable(s1_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s1_fire)
341
342  val s0_phist = WireInit(0.U(PathHistoryLength.W))
343  val s0_phist_reg = RegNext(s0_phist, init=0.U(PathHistoryLength.W))
344  val s1_phist = RegEnable(s0_phist, 0.U, s0_fire)
345  val s2_phist = RegEnable(s1_phist, 0.U, s1_fire)
346
347  val resp = predictors.io.out.resp
348
349
350  val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready
351
352  val s1_flush, s2_flush = Wire(Bool())
353  val s2_redirect = Wire(Bool())
354
355  // predictors.io := DontCare
356  predictors.io.in.valid := s0_fire
357  predictors.io.in.bits.s0_pc := s0_pc
358  predictors.io.in.bits.folded_hist := s0_folded_gh
359  predictors.io.in.bits.phist := s0_phist
360  predictors.io.in.bits.resp_in(0) := (0.U).asTypeOf(new BranchPredictionResp)
361  // predictors.io.in.bits.resp_in(0).s1.pc := s0_pc
362  // predictors.io.in.bits.toFtq_fire := toFtq_fire
363
364  // predictors.io.out.ready := io.bpu_to_ftq.resp.ready
365
366  // Pipeline logic
367  s2_redirect := false.B
368
369  s2_flush := io.ftq_to_bpu.redirect.valid
370  s1_flush := s2_flush || s2_redirect
371
372  s1_components_ready := predictors.io.s1_ready
373  s1_ready := s1_fire || !s1_valid
374  s0_fire := !reset.asBool && s1_components_ready && s1_ready
375  predictors.io.s0_fire := s0_fire
376
377  s2_components_ready := predictors.io.s2_ready
378  s2_ready := s2_fire || !s2_valid
379  s1_fire := s1_valid && s2_components_ready && s2_ready && io.bpu_to_ftq.resp.ready
380
381  when(s0_fire)         { s1_valid := true.B  }
382    .elsewhen(s1_flush) { s1_valid := false.B }
383    .elsewhen(s1_fire)  { s1_valid := false.B }
384
385  predictors.io.s1_fire := s1_fire
386
387  s2_fire := s2_valid
388
389  when(s2_flush)       { s2_valid := false.B }
390    .elsewhen(s1_fire) { s2_valid := !s1_flush  }
391    .elsewhen(s2_fire) { s2_valid := false.B }
392
393  predictors.io.s2_fire := s2_fire
394
395
396  io.bpu_to_ftq.resp.valid :=
397    s1_valid && s2_components_ready && s2_ready ||
398    s2_fire && s2_redirect
399  io.bpu_to_ftq.resp.bits  := BpuToFtqBundle(predictors.io.out.resp)
400  io.bpu_to_ftq.resp.bits.meta  := predictors.io.out.last_stage_meta // TODO: change to lastStageMeta
401  io.bpu_to_ftq.resp.bits.s2.folded_hist := s2_folded_gh
402  io.bpu_to_ftq.resp.bits.s2.histPtr := s2_ghist_ptr
403  io.bpu_to_ftq.resp.bits.s2.phist  := s2_phist
404
405  npcGen.register(true.B, s0_pc_reg, Some("stallPC"), 0)
406  foldedGhGen.register(true.B, s0_folded_gh_reg, Some("stallFGH"), 0)
407  ghistPtrGen.register(true.B, s0_ghist_ptr_reg, Some("stallGHPtr"), 0)
408  phistGen.register(true.B, s0_phist_reg, Some("stallPhist"), 0)
409  lastPredGen.register(true.B, s0_last_pred_reg, Some("stallLastPred"), 0)
410
411  // History manage
412  // s1
413  val s1_possible_predicted_ghist_ptrs = (0 to numBr).map(s1_ghist_ptr - _.U)
414  val s1_predicted_ghist_ptr = Mux1H(resp.s1.lastBrPosOH, s1_possible_predicted_ghist_ptrs)
415  val s1_possible_predicted_fhs = (0 to numBr).map(i =>
416    s1_folded_gh.update(ghr, s1_ghist_ptr, i, if (i > 0) resp.s1.preds.br_taken_mask(i-1) else false.B))
417  val s1_predicted_fh = Mux1H(resp.s1.lastBrPosOH, s1_possible_predicted_fhs)
418
419  require(isPow2(HistoryLength))
420  val s1_ghr_wens = (0 until HistoryLength).map(n =>
421    (0 until numBr).map(b => (s1_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s1.shouldShiftVec(b) && s1_valid))
422  val s1_ghr_wdatas = (0 until HistoryLength).map(n =>
423    Mux1H(
424      (0 until numBr).map(b => (s1_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s1.shouldShiftVec(b)),
425      resp.s1.real_br_taken_mask()
426    )
427  )
428
429
430  npcGen.register(s1_valid, resp.s1.target, Some("s1_target"), 5)
431  foldedGhGen.register(s1_valid, s1_predicted_fh, Some("s1_FGH"), 5)
432  ghistPtrGen.register(s1_valid, s1_predicted_ghist_ptr, Some("s1_GHPtr"), 5)
433  phistGen.register(s1_valid, (s1_phist << 1) | s1_pc(instOffsetBits), Some("s1_Phist"), 5)
434  lastPredGen.register(s1_valid, resp.s1, Some("s1_lastPred"), 5)
435  ghrBitWriteGens.zip(s1_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
436    b.register(w.reduce(_||_), s1_ghr_wdatas(i), Some(s"s1_new_bit_$i"), 5)
437  }
438
439  def preds_needs_redirect(x: BranchPredictionBundle, y: BranchPredictionBundle) = {
440    x.preds.hit =/= y.preds.hit ||
441    x.real_slot_taken_mask().asUInt.orR =/= y.real_slot_taken_mask().asUInt().orR ||
442    x.preds.br_valids.asUInt =/= y.preds.br_valids.asUInt ||
443    PriorityEncoder(x.real_br_taken_mask()) =/= PriorityEncoder(y.real_br_taken_mask)
444  }
445
446  def no_need_to_redirect(x: BranchPredictionBundle, y: BranchPredictionBundle) = {
447    !x.preds.hit && !y.preds.hit ||
448    x.preds.hit && y.preds.hit && (
449      VecInit(x.lastBrPosOH).asUInt === VecInit(y.lastBrPosOH).asUInt &&
450      x.preds.taken_mask_on_slot.asUInt === y.preds.taken_mask_on_slot.asUInt
451    )
452  }
453  // s2
454  val s2_possible_predicted_ghist_ptrs = (0 to numBr).map(s2_ghist_ptr - _.U)
455  val s2_predicted_ghist_ptr = Mux1H(resp.s2.lastBrPosOH, s2_possible_predicted_ghist_ptrs)
456  val s2_possible_predicted_fhs = (0 to numBr).map(i =>
457    s2_folded_gh.update(ghr, s2_ghist_ptr, i, if (i > 0) resp.s2.preds.br_taken_mask(i-1) else false.B))
458  val s2_predicted_fh = Mux1H(resp.s2.lastBrPosOH, s2_possible_predicted_fhs)
459  val s2_ghr_wens = (0 until HistoryLength).map(n =>
460    (0 until numBr).map(b => (s2_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s2.shouldShiftVec(b) && s2_redirect))
461  val s2_ghr_wdatas = (0 until HistoryLength).map(n =>
462    Mux1H(
463      (0 until numBr).map(b => (s2_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s2.shouldShiftVec(b)),
464      resp.s2.real_br_taken_mask()
465    )
466  )
467
468  val previous_s1_pred = RegEnable(resp.s1, init=0.U.asTypeOf(resp.s1), s1_fire)
469
470  // val s2_redirect_s1_last_pred = !no_need_to_redirect(s1_last_pred, resp.s2)
471  // val s2_redirect_s0_last_pred = !no_need_to_redirect(s0_last_pred_reg, resp.s2)
472  val s2_redirect_s1_last_pred = preds_needs_redirect(s1_last_pred, resp.s2)
473  val s2_redirect_s0_last_pred = preds_needs_redirect(s0_last_pred_reg, resp.s2)
474
475  s2_redirect := s2_fire && ((s1_valid && (resp.s2.targetDiffFrom(s1_pc) || s2_redirect_s1_last_pred)) ||
476      !s1_valid && (resp.s2.targetDiffFrom(s0_pc_reg) || s2_redirect_s0_last_pred))
477
478  // when(s2_redirect) { ghist_update(s2_ghist_ptr, resp.s2) }
479  npcGen.register(s2_redirect, resp.s2.target, Some("s2_target"), 4)
480  foldedGhGen.register(s2_redirect, s2_predicted_fh, Some("s2_FGH"), 4)
481  ghistPtrGen.register(s2_redirect, s2_predicted_ghist_ptr, Some("s2_GHPtr"), 4)
482  phistGen.register(s2_redirect, (s2_phist << 1) | s2_pc(instOffsetBits), Some("s2_Phist"), 4)
483  lastPredGen.register(s2_redirect, resp.s2, Some("s2_lastPred"), 4)
484  ghrBitWriteGens.zip(s2_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
485    b.register(w.reduce(_||_), s2_ghr_wdatas(i), Some(s"s2_new_bit_$i"), 4)
486  }
487
488  val s2_redirect_target = s2_fire && s1_valid && s1_pc =/= resp.s2.target
489  val s2_saw_s1_hit = RegEnable(resp.s1.preds.hit, s1_fire)
490  val s2_redirect_target_both_hit = s2_redirect_target &&  s2_saw_s1_hit &&  resp.s2.preds.hit
491
492  XSPerfAccumulate("s2_redirect_because_s1_not_valid", s2_fire && !s1_valid)
493  XSPerfAccumulate("s2_redirect_because_target_diff", s2_fire && s1_valid && s1_pc =/= resp.s2.target)
494  XSPerfAccumulate("s2_redirect_target_diff_s1_nhit_s2_hit", s2_redirect_target && !s2_saw_s1_hit &&  resp.s2.preds.hit)
495  XSPerfAccumulate("s2_redirect_target_diff_s1_hit_s2_nhit", s2_redirect_target &&  s2_saw_s1_hit && !resp.s2.preds.hit)
496  XSPerfAccumulate("s2_redirect_target_diff_both_hit",  s2_redirect_target &&  s2_saw_s1_hit &&  resp.s2.preds.hit)
497  XSPerfAccumulate("s2_redirect_br_direction_diff",
498    s2_redirect_target_both_hit &&
499    RegEnable(PriorityEncoder(resp.s1.preds.br_taken_mask), s1_fire) =/= PriorityEncoder(resp.s2.preds.br_taken_mask))
500  // XSPerfAccumulate("s2_redirect_because_ghist_diff", s2_fire && s1_valid && s2_correct_s1_ghist)
501
502
503  // Send signal tell Ftq override
504  val s2_ftq_idx = RegEnable(io.ftq_to_bpu.enq_ptr, s1_fire)
505
506  io.bpu_to_ftq.resp.bits.s1.valid := s1_fire && !s1_flush
507  io.bpu_to_ftq.resp.bits.s1.hasRedirect := false.B
508  io.bpu_to_ftq.resp.bits.s1.ftq_idx := DontCare
509  io.bpu_to_ftq.resp.bits.s2.valid := s2_fire && !s2_flush
510  io.bpu_to_ftq.resp.bits.s2.hasRedirect := s2_redirect
511  io.bpu_to_ftq.resp.bits.s2.ftq_idx := s2_ftq_idx
512
513  val redirect = io.ftq_to_bpu.redirect.bits
514
515  predictors.io.update := io.ftq_to_bpu.update
516  predictors.io.redirect := io.ftq_to_bpu.redirect
517
518  // Redirect logic
519  val shift = redirect.cfiUpdate.shift
520  val addIntoHist = redirect.cfiUpdate.addIntoHist
521  // TODO: remove these below
522  val shouldShiftVec = Mux(shift === 0.U, VecInit(0.U((1 << (log2Ceil(numBr) + 1)).W).asBools), VecInit((LowerMask(1.U << (shift-1.U))).asBools()))
523  // TODO end
524
525  val isBr = redirect.cfiUpdate.pd.isBr
526  val taken = redirect.cfiUpdate.taken
527  val real_br_taken_mask = (0 until numBr).map(i => shift === (i+1).U && taken && addIntoHist )
528
529  val oldPtr = redirect.cfiUpdate.histPtr
530  val oldFh = redirect.cfiUpdate.folded_hist
531  val updated_ptr = oldPtr - shift
532  val updated_fh = VecInit((0 to numBr).map(i => oldFh.update(ghr, oldPtr, i, taken && addIntoHist)))(shift)
533  val redirect_ghr_wens = (0 until HistoryLength).map(n =>
534    (0 until numBr).map(b => oldPtr.value === (n.U(log2Ceil(HistoryLength).W) + b.U) && shouldShiftVec(b) && io.ftq_to_bpu.redirect.valid))
535  val redirect_ghr_wdatas = (0 until HistoryLength).map(n =>
536    Mux1H(
537      (0 until numBr).map(b => oldPtr.value === (n.U(log2Ceil(HistoryLength).W) + b.U) && shouldShiftVec(b)),
538      real_br_taken_mask
539    )
540  )
541
542
543  // val updatedGh = oldGh.update(shift, taken && addIntoHist)
544  val oldPh = redirect.cfiUpdate.phist
545  val phNewBit = redirect.cfiUpdate.phNewBit
546
547  // when(io.ftq_to_bpu.redirect.valid) { ghist_update(oldPtr, shift, taken && addIntoHist) }
548  npcGen.register(io.ftq_to_bpu.redirect.valid, redirect.cfiUpdate.target, Some("redirect_target"), 2)
549  foldedGhGen.register(io.ftq_to_bpu.redirect.valid, updated_fh, Some("redirect_FGHT"), 2)
550  ghistPtrGen.register(io.ftq_to_bpu.redirect.valid, updated_ptr, Some("redirect_GHPtr"), 2)
551  phistGen.register(io.ftq_to_bpu.redirect.valid, (oldPh << 1) | phNewBit, Some("redirect_phist"), 2)
552  ghrBitWriteGens.zip(redirect_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
553    b.register(w.reduce(_||_), redirect_ghr_wdatas(i), Some(s"redirect_new_bit_$i"), 2)
554  }
555  // no need to assign s0_last_pred
556
557  val need_reset = RegNext(reset.asBool) && !reset.asBool
558
559  // Reset
560  npcGen.register(need_reset, resetVector.U, Some("reset_pc"), 1)
561  foldedGhGen.register(need_reset, 0.U.asTypeOf(s0_folded_gh), Some("reset_FGH"), 1)
562  ghistPtrGen.register(need_reset, 0.U.asTypeOf(new CGHPtr), Some("reset_GHPtr"), 1)
563  phistGen.register(need_reset, 0.U, Some("reset_phist"), 1)
564  lastPredGen.register(need_reset, 0.U.asTypeOf(new BranchPredictionBundle), Some("reset_lastPred"), 1)
565
566  s0_pc         := npcGen()
567  s0_pc_reg     := s0_pc
568  s0_folded_gh  := foldedGhGen()
569  s0_ghist_ptr  := ghistPtrGen()
570  s0_phist      := phistGen()
571  s0_last_pred  := lastPredGen()
572  (ghr_write_datas zip ghrBitWriteGens).map{case (wd, d) => wd := d()}
573  for (i <- 0 until HistoryLength) {
574    ghr_wens(i) := Seq(s1_ghr_wens, s2_ghr_wens, redirect_ghr_wens).map(_(i).reduce(_||_)).reduce(_||_)
575    when (ghr_wens(i)) {
576      ghr(i) := ghr_write_datas(i)
577    }
578  }
579
580  XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
581  XSDebug(io.ftq_to_bpu.update.valid, p"Update from ftq\n")
582  XSDebug(io.ftq_to_bpu.redirect.valid, p"Redirect from ftq\n")
583
584  XSDebug("[BP0]                 fire=%d                      pc=%x\n", s0_fire, s0_pc)
585  XSDebug("[BP1] v=%d r=%d cr=%d fire=%d             flush=%d pc=%x\n",
586    s1_valid, s1_ready, s1_components_ready, s1_fire, s1_flush, s1_pc)
587  XSDebug("[BP2] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n",
588  s2_valid, s2_ready, s2_components_ready, s2_fire, s2_redirect, s2_flush, s2_pc)
589  XSDebug("[FTQ] ready=%d\n", io.bpu_to_ftq.resp.ready)
590  XSDebug("resp.s1.target=%x\n", resp.s1.target)
591  XSDebug("resp.s2.target=%x\n", resp.s2.target)
592  // XSDebug("s0_ghist: %b\n", s0_ghist.predHist)
593  // XSDebug("s1_ghist: %b\n", s1_ghist.predHist)
594  // XSDebug("s2_ghist: %b\n", s2_ghist.predHist)
595  // XSDebug("s2_predicted_ghist: %b\n", s2_predicted_ghist.predHist)
596  XSDebug(p"s0_ghist_ptr: $s0_ghist_ptr\n")
597  XSDebug(p"s1_ghist_ptr: $s1_ghist_ptr\n")
598  XSDebug(p"s2_ghist_ptr: $s2_ghist_ptr\n")
599
600  io.ftq_to_bpu.update.bits.display(io.ftq_to_bpu.update.valid)
601  io.ftq_to_bpu.redirect.bits.display(io.ftq_to_bpu.redirect.valid)
602
603
604  XSPerfAccumulate("s2_redirect", s2_redirect)
605
606  val perfEvents = predictors.asInstanceOf[Composer].perfEvents.map(_._1).zip(predictors.asInstanceOf[Composer].perfinfo.perfEvents.perf_events)
607  val perfinfo = IO(new Bundle(){
608    val perfEvents = Output(new PerfEventsBundle(predictors.asInstanceOf[Composer].perfinfo.perfEvents.perf_events.length))
609  })
610  perfinfo.perfEvents := predictors.asInstanceOf[Composer].perfinfo.perfEvents
611
612}
613