xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision 80d2974b083ab30ee12d60853063f1c370505af3)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import utils._
6import xiangshan._
7import xiangshan.backend.ALUOpType
8import xiangshan.backend.JumpOpType
9
10class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle {
11 def tagBits = VAddrBits - idxBits - 1
12
13 val tag = UInt(tagBits.W)
14 val idx = UInt(idxBits.W)
15 val offset = UInt(1.W)
16
17 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
18 def getTag(x: UInt) = fromUInt(x).tag
19 def getIdx(x: UInt) = fromUInt(x).idx
20 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0)
21 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks))
22}
23
24class BTBResponse extends XSBundle {
25  // the valid bits indicates whether a target is hit
26  val ubtb = new Bundle {
27    val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VaddrBits.W)))
28    val takens = Vec(PredictWidth, Bool())
29  }
30  // the valid bits indicates whether a target is hit
31  val btb = new Bundle {
32    val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VaddrBits.W)))
33    val takens = Vec(PredictWidth, Bool())
34  }
35}
36
37class BPUStageIO extends XSBundle {
38  val pc = Output(UInt(VAddrBits.W))
39  val btbResp = Output(new BTBResponse)
40  val brInfo = Output(Vec(PredictWidth, new BranchInfo))
41}
42
43
44class BPUStage1 extends XSModule {
45  val io = IO(new Bundle() {
46    val flush = Input(Bool())
47    val in = new Bundle { val pc = Flipped(ValidIO(UInt(VAddrBits.W))) }
48    val pred = Decoupled(new BranchPrediction)
49    val out = Decoupled(new BPUStageIO)
50    val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
51    val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
52  })
53
54}
55
56class BPUStage2 extends XSModule {
57  val io = IO(new Bundle() {
58    val flush = Input(Bool())
59    val in = Flipped(Decoupled(new BPUStageIO))
60    val pred = Decoupled(new BranchPrediction)
61    val out = Decoupled(new BPUStageIO)
62    val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) // delete this if useless
63    val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) // delete this if useless
64  })
65}
66
67class BPUStage3 extends XSModule {
68  val io = IO(new Bundle() {
69    val flush = Input(Bool())
70    val in = Flipped(Decoupled(new BPUStageIO))
71    val pred = Decoupled(new BranchPrediction)
72    val predecode = Flipped(ValidIO(new Predecode))
73  })
74
75}
76
77class BaseBPU extends XSModule {
78  val io = IO(new Bundle() {
79    // from backend
80    val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
81    val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo))
82    // from ifu, frontend redirect
83    val flush = Input(UInt(3.W))
84    // from if1
85    val in = new Bundle { val pc = Flipped(ValidIO(UInt(VAddrBits.W))) }
86    // to if2/if3/if4
87    val out = Vec(3, Decoupled(new BranchPrediction))
88    // from if4
89    val predecode = Flipped(ValidIO(new Predecode))
90    // to if4, some bpu info used for updating
91    val branchInfo = Decoupled(Vec(PredictWidth, new BranchInfo))
92  })
93}
94
95class FakeBPU extends BaseBPU {
96  io.out.foreach(i => {
97    i <> DontCare
98    i.redirect := false.B
99  })
100  io.branchInfo <> DontCare
101}
102
103class BPU extends BaseBPU {
104
105  val s1 = Module(new BPUStage1)
106  val s2 = Module(new BPUStage2)
107  val s3 = Module(new BPUStage3)
108
109  s1.io.flush := io.flush(0)
110  s2.io.flush := io.flush(1)
111  s3.io.flush := io.flush(2)
112
113  s1.io.in <> io.in
114  s2.io.in <> s1.io.out
115  s3.io.in <> s2.io.out
116
117  io.out(0) <> s1.io.pred
118  io.out(1) <> s2.io.pred
119  io.out(2) <> s3.io.pred
120
121  s1.io.redirect <> io.redirect
122  s1.io.outOfOrderBrInfo <> io.outOfOrderBrInfo
123  s1.io.inOrderBrInfo <> io.inOrderBrInfo
124  s2.io.outOfOrderBrInfo <> io.outOfOrderBrInfo
125  s2.io.inOrderBrInfo <> io.inOrderBrInfo
126
127  s3.io.predecode <> io.predecode
128}
129
130
131
132
133
134
135
136
137
138
139
140// class BPUStage1 extends XSModule {
141//  val io = IO(new Bundle() {
142//    val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) }
143//    // from backend
144//    val redirectInfo = Input(new RedirectInfo)
145//    // from Stage3
146//    val flush = Input(Bool())
147//    val s3RollBackHist = Input(UInt(HistoryLength.W))
148//    val s3Taken = Input(Bool())
149//    // to ifu, quick prediction result
150//    val s1OutPred = ValidIO(new BranchPrediction)
151//    // to Stage2
152//    val out = Decoupled(new Stage1To2IO)
153//  })
154
155//  io.in.pc.ready := true.B
156
157//  // flush Stage1 when io.flush
158//  val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true)
159//  val s1OutPredLatch = RegEnable(io.s1OutPred.bits, RegNext(io.in.pc.fire()))
160//  val outLatch = RegEnable(io.out.bits, RegNext(io.in.pc.fire()))
161
162//  val s1Valid = RegInit(false.B)
163//  when (io.flush) {
164//    s1Valid := true.B
165//  }.elsewhen (io.in.pc.fire()) {
166//    s1Valid := true.B
167//  }.elsewhen (io.out.fire()) {
168//    s1Valid := false.B
169//  }
170//  io.out.valid := s1Valid
171
172
173//  // global history register
174//  val ghr = RegInit(0.U(HistoryLength.W))
175//  // modify updateGhr and newGhr when updating ghr
176//  val updateGhr = WireInit(false.B)
177//  val newGhr = WireInit(0.U(HistoryLength.W))
178//  when (updateGhr) { ghr := newGhr }
179//  // use hist as global history!!!
180//  val hist = Mux(updateGhr, newGhr, ghr)
181
182//  // Tage predictor
183//  val tage = if(EnableBPD) Module(new Tage) else Module(new FakeTAGE)
184//  tage.io.req.valid := io.in.pc.fire()
185//  tage.io.req.bits.pc := io.in.pc.bits
186//  tage.io.req.bits.hist := hist
187//  tage.io.redirectInfo <> io.redirectInfo
188//  io.s1OutPred.bits.tageMeta := tage.io.meta
189
190//  // latch pc for 1 cycle latency when reading SRAM
191//  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.fire())
192//  // TODO: pass real mask in
193//  // val maskLatch = RegEnable(btb.io.in.mask, io.in.pc.fire())
194//  val maskLatch = Fill(PredictWidth, 1.U(1.W))
195
196//  val r = io.redirectInfo.redirect
197//  val updateFetchpc = r.pc - (r.fetchIdx << 1.U)
198//  // BTB
199//  val btb = Module(new BTB)
200//  btb.io.in.pc <> io.in.pc
201//  btb.io.in.pcLatch := pcLatch
202//  // TODO: pass real mask in
203//  btb.io.in.mask := Fill(PredictWidth, 1.U(1.W))
204//  btb.io.redirectValid := io.redirectInfo.valid
205//  btb.io.flush := io.flush
206
207//  // btb.io.update.fetchPC := updateFetchpc
208//  // btb.io.update.fetchIdx := r.fetchIdx
209//  btb.io.update.pc := r.pc
210//  btb.io.update.hit := r.btbHit
211//  btb.io.update.misPred := io.redirectInfo.misPred
212//  // btb.io.update.writeWay := r.btbVictimWay
213//  btb.io.update.oldCtr := r.btbPredCtr
214//  btb.io.update.taken := r.taken
215//  btb.io.update.target := r.brTarget
216//  btb.io.update.btbType := r.btbType
217//  // TODO: add RVC logic
218//  btb.io.update.isRVC := r.isRVC
219
220//  // val btbHit = btb.io.out.hit
221//  val btbTaken = btb.io.out.taken
222//  val btbTakenIdx = btb.io.out.takenIdx
223//  val btbTakenTarget = btb.io.out.target
224//  // val btbWriteWay = btb.io.out.writeWay
225//  val btbNotTakens = btb.io.out.notTakens
226//  val btbCtrs = VecInit(btb.io.out.dEntries.map(_.pred))
227//  val btbValids = btb.io.out.hits
228//  val btbTargets = VecInit(btb.io.out.dEntries.map(_.target))
229//  val btbTypes = VecInit(btb.io.out.dEntries.map(_.btbType))
230//  val btbIsRVCs = VecInit(btb.io.out.dEntries.map(_.isRVC))
231
232
233//  val jbtac = Module(new JBTAC)
234//  jbtac.io.in.pc <> io.in.pc
235//  jbtac.io.in.pcLatch := pcLatch
236//  // TODO: pass real mask in
237//  jbtac.io.in.mask := Fill(PredictWidth, 1.U(1.W))
238//  jbtac.io.in.hist := hist
239//  jbtac.io.redirectValid := io.redirectInfo.valid
240//  jbtac.io.flush := io.flush
241
242//  jbtac.io.update.fetchPC := updateFetchpc
243//  jbtac.io.update.fetchIdx := r.fetchIdx
244//  jbtac.io.update.misPred := io.redirectInfo.misPred
245//  jbtac.io.update.btbType := r.btbType
246//  jbtac.io.update.target := r.target
247//  jbtac.io.update.hist := r.hist
248//  jbtac.io.update.isRVC := r.isRVC
249
250//  val jbtacHit = jbtac.io.out.hit
251//  val jbtacTarget = jbtac.io.out.target
252//  val jbtacHitIdx = jbtac.io.out.hitIdx
253//  val jbtacIsRVC = jbtac.io.out.isRVC
254
255//  // calculate global history of each instr
256//  val firstHist = RegNext(hist)
257//  val histShift = Wire(Vec(PredictWidth, UInt(log2Up(PredictWidth).W)))
258//  val shift = Wire(Vec(PredictWidth, Vec(PredictWidth, UInt(1.W))))
259//  (0 until PredictWidth).foreach(i => shift(i) := Mux(!btbNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), PredictWidth)).asTypeOf(Vec(PredictWidth, UInt(1.W))))
260//  for (j <- 0 until PredictWidth) {
261//    var tmp = 0.U
262//    for (i <- 0 until PredictWidth) {
263//      tmp = tmp + shift(i)(j)
264//    }
265//    histShift(j) := tmp
266//  }
267
268//  // update ghr
269//  updateGhr := io.s1OutPred.bits.redirect ||
270//               RegNext(io.in.pc.fire) && ~io.s1OutPred.bits.redirect && (btbNotTakens.asUInt & maskLatch).orR || // TODO: use parallel or
271//               io.flush
272//  val brJumpIdx = Mux(!btbTaken, 0.U, UIntToOH(btbTakenIdx))
273//  val indirectIdx = Mux(!jbtacHit, 0.U, UIntToOH(jbtacHitIdx))
274//  // if backend redirects, restore history from backend;
275//  // if stage3 redirects, restore history from stage3;
276//  // if stage1 redirects, speculatively update history;
277//  // if none of above happens, check if stage1 has not-taken branches and shift zeroes accordingly
278//  newGhr := Mux(io.redirectInfo.flush(),    (r.hist << 1.U) | !(r.btbType === BTBtype.B && !r.taken),
279//            Mux(io.flush,                   Mux(io.s3Taken, (io.s3RollBackHist << 1.U) | 1.U, io.s3RollBackHist),
280//            Mux(io.s1OutPred.bits.redirect, (PriorityMux(brJumpIdx | indirectIdx, io.s1OutPred.bits.hist) << 1.U | 1.U),
281//                                            io.s1OutPred.bits.hist(0) << PopCount(btbNotTakens.asUInt & maskLatch))))
282
283//  def getInstrValid(i: Int): UInt = {
284//    val vec = Wire(Vec(PredictWidth, UInt(1.W)))
285//    for (j <- 0 until PredictWidth) {
286//      if (j <= i)
287//        vec(j) := 1.U
288//      else
289//        vec(j) := 0.U
290//    }
291//    vec.asUInt
292//  }
293
294//  // redirect based on BTB and JBTAC
295//  val takenIdx = LowestBit(brJumpIdx | indirectIdx, PredictWidth)
296
297//  // io.out.valid := RegNext(io.in.pc.fire()) && !io.flush
298
299//  // io.s1OutPred.valid := io.out.valid
300//  io.s1OutPred.valid := io.out.fire()
301//  when (RegNext(io.in.pc.fire())) {
302//    io.s1OutPred.bits.redirect := btbTaken || jbtacHit
303//    // io.s1OutPred.bits.instrValid := (maskLatch & Fill(PredictWidth, ~io.s1OutPred.bits.redirect || io.s1OutPred.bits.lateJump) |
304//    //   PriorityMux(brJumpIdx | indirectIdx, (0 until PredictWidth).map(getInstrValid(_)))).asTypeOf(Vec(PredictWidth, Bool()))
305//    io.s1OutPred.bits.instrValid := (maskLatch & Fill(PredictWidth, ~io.s1OutPred.bits.redirect) |
306//      PriorityMux(brJumpIdx | indirectIdx, (0 until PredictWidth).map(getInstrValid(_)))).asTypeOf(Vec(PredictWidth, Bool()))
307//    for (i <- 0 until (PredictWidth - 1)) {
308//      when (!io.s1OutPred.bits.lateJump && (1.U << i) === takenIdx && (!btbIsRVCs(i) && btbValids(i) || !jbtacIsRVC && (1.U << i) === indirectIdx)) {
309//        io.s1OutPred.bits.instrValid(i+1) := maskLatch(i+1)
310//      }
311//    }
312//    io.s1OutPred.bits.target := Mux(takenIdx === 0.U, pcLatch + (PopCount(maskLatch) << 1.U), Mux(takenIdx === brJumpIdx, btbTakenTarget, jbtacTarget))
313//    io.s1OutPred.bits.lateJump := btb.io.out.isRVILateJump || jbtac.io.out.isRVILateJump
314//    (0 until PredictWidth).map(i => io.s1OutPred.bits.hist(i) := firstHist << histShift(i))
315//    // io.s1OutPred.bits.btbVictimWay := btbWriteWay
316//    io.s1OutPred.bits.predCtr := btbCtrs
317//    io.s1OutPred.bits.btbHit := btbValids
318//    io.s1OutPred.bits.tageMeta := tage.io.meta // TODO: enableBPD
319//    io.s1OutPred.bits.rasSp := DontCare
320//    io.s1OutPred.bits.rasTopCtr := DontCare
321//  }.otherwise {
322//    io.s1OutPred.bits := s1OutPredLatch
323//  }
324
325//  when (RegNext(io.in.pc.fire())) {
326//    io.out.bits.pc := pcLatch
327//    io.out.bits.btb.hits := btbValids.asUInt
328//    (0 until PredictWidth).map(i => io.out.bits.btb.targets(i) := btbTargets(i))
329//    io.out.bits.jbtac.hitIdx := Mux(jbtacHit, UIntToOH(jbtacHitIdx), 0.U) // UIntToOH(jbtacHitIdx)
330//    io.out.bits.jbtac.target := jbtacTarget
331//    io.out.bits.tage <> tage.io.out
332//    // TODO: we don't need this repeatedly!
333//    io.out.bits.hist := io.s1OutPred.bits.hist
334//    io.out.bits.btbPred := io.s1OutPred
335//  }.otherwise {
336//    io.out.bits := outLatch
337//  }
338
339
340//  // debug info
341//  XSDebug("in:(%d %d)   pc=%x ghr=%b\n", io.in.pc.valid, io.in.pc.ready, io.in.pc.bits, hist)
342//  XSDebug("outPred:(%d) pc=0x%x, redirect=%d instrValid=%b tgt=%x\n",
343//    io.s1OutPred.valid, pcLatch, io.s1OutPred.bits.redirect, io.s1OutPred.bits.instrValid.asUInt, io.s1OutPred.bits.target)
344//  XSDebug(io.flush && io.redirectInfo.flush(),
345//    "flush from backend: pc=%x tgt=%x brTgt=%x btbType=%b taken=%d oldHist=%b fetchIdx=%d isExcpt=%d\n",
346//    r.pc, r.target, r.brTarget, r.btbType, r.taken, r.hist, r.fetchIdx, r.isException)
347//  XSDebug(io.flush && !io.redirectInfo.flush(),
348//    "flush from Stage3:  s3Taken=%d s3RollBackHist=%b\n", io.s3Taken, io.s3RollBackHist)
349
350// }
351
352// class Stage2To3IO extends Stage1To2IO {
353// }
354
355// class BPUStage2 extends XSModule {
356//  val io = IO(new Bundle() {
357//    // flush from Stage3
358//    val flush = Input(Bool())
359//    val in = Flipped(Decoupled(new Stage1To2IO))
360//    val out = Decoupled(new Stage2To3IO)
361//  })
362
363//  // flush Stage2 when Stage3 or banckend redirects
364//  val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
365//  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
366//  when (io.in.fire()) { inLatch := io.in.bits }
367//  val validLatch = RegInit(false.B)
368//  when (io.flush) {
369//    validLatch := false.B
370//  }.elsewhen (io.in.fire()) {
371//    validLatch := true.B
372//  }.elsewhen (io.out.fire()) {
373//    validLatch := false.B
374//  }
375
376//  io.out.valid := !io.flush && !flushS2 && validLatch
377//  io.in.ready := !validLatch || io.out.fire()
378
379//  // do nothing
380//  io.out.bits := inLatch
381
382//  // debug info
383//  XSDebug("in:(%d %d) pc=%x out:(%d %d) pc=%x\n",
384//    io.in.valid, io.in.ready, io.in.bits.pc, io.out.valid, io.out.ready, io.out.bits.pc)
385//  XSDebug("validLatch=%d pc=%x\n", validLatch, inLatch.pc)
386//  XSDebug(io.flush, "flush!!!\n")
387// }
388
389// class BPUStage3 extends XSModule {
390//  val io = IO(new Bundle() {
391//    val flush = Input(Bool())
392//    val in = Flipped(Decoupled(new Stage2To3IO))
393//    val out = Decoupled(new BranchPrediction)
394//    // from icache
395//    val predecode = Flipped(ValidIO(new Predecode))
396//    // from backend
397//    val redirectInfo = Input(new RedirectInfo)
398//    // to Stage1 and Stage2
399//    val flushBPU = Output(Bool())
400//    // to Stage1, restore ghr in stage1 when flushBPU is valid
401//    val s1RollBackHist = Output(UInt(HistoryLength.W))
402//    val s3Taken = Output(Bool())
403//  })
404
405//  val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
406//  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
407//  val validLatch = RegInit(false.B)
408//  val predecodeLatch = RegInit(0.U.asTypeOf(io.predecode.bits))
409//  val predecodeValidLatch = RegInit(false.B)
410//  when (io.in.fire()) { inLatch := io.in.bits }
411//  when (io.flush) {
412//    validLatch := false.B
413//  }.elsewhen (io.in.fire()) {
414//    validLatch := true.B
415//  }.elsewhen (io.out.fire()) {
416//    validLatch := false.B
417//  }
418
419//  when (io.predecode.valid) { predecodeLatch := io.predecode.bits }
420//  when (io.flush || io.out.fire()) {
421//    predecodeValidLatch := false.B
422//  }.elsewhen (io.predecode.valid) {
423//    predecodeValidLatch := true.B
424//  }
425
426//  val predecodeValid = io.predecode.valid || predecodeValidLatch
427//  val predecode = Mux(io.predecode.valid, io.predecode.bits, predecodeLatch)
428//  io.out.valid := validLatch && predecodeValid && !flushS3 && !io.flush
429//  io.in.ready := !validLatch || io.out.fire()
430
431//  // RAS
432//  // TODO: split retAddr and ctr
433//  def rasEntry() = new Bundle {
434//    val retAddr = UInt(VAddrBits.W)
435//    val ctr = UInt(8.W) // layer of nested call functions
436//  }
437//  val ras = RegInit(VecInit(Seq.fill(RasSize)(0.U.asTypeOf(rasEntry()))))
438//  val sp = Counter(RasSize)
439//  val rasTop = ras(sp.value)
440//  val rasTopAddr = rasTop.retAddr
441
442//  // get the first taken branch/jal/call/jalr/ret in a fetch line
443//  // brNotTakenIdx indicates all the not-taken branches before the first jump instruction
444
445//  val tageHits = inLatch.tage.hits
446//  val tageTakens = inLatch.tage.takens
447//  val btbTakens = inLatch.btbPred.bits.predCtr
448
449//  val brs = inLatch.btb.hits & Reverse(Cat(predecode.fuOpTypes.map { t => ALUOpType.isBranch(t) }).asUInt) & predecode.mask
450//  // val brTakens = brs & inLatch.tage.takens.asUInt
451//  val brTakens = if (EnableBPD) {
452//    // If tage hits, use tage takens, otherwise keep btbpreds
453//    // brs & Reverse(Cat(inLatch.tage.takens.map {t => Fill(2, t.asUInt)}).asUInt)
454//    XSDebug("tageHits=%b, tageTakens=%b\n", tageHits, tageTakens.asUInt)
455//    brs & Reverse(Cat((0 until PredictWidth).map(i => Mux(tageHits(i), tageTakens(i), btbTakens(i)(1)))))
456//  } else {
457//    brs & Reverse(Cat(inLatch.btbPred.bits.predCtr.map {c => c(1)}).asUInt)
458//  }
459//  val jals = inLatch.btb.hits & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.jal }).asUInt) & predecode.mask
460//  val calls = inLatch.btb.hits & predecode.mask & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.call }).asUInt)
461//  val jalrs = inLatch.jbtac.hitIdx & predecode.mask & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.jalr }).asUInt)
462//  val rets = predecode.mask & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.ret }).asUInt)
463
464//  val brTakenIdx = PriorityMux(brTakens, (0 until PredictWidth).map(_.U))
465//  val jalIdx = PriorityMux(jals, (0 until PredictWidth).map(_.U))
466//  val callIdx = PriorityMux(calls, (0 until PredictWidth).map(_.U))
467//  val jalrIdx = PriorityMux(jalrs, (0 until PredictWidth).map(_.U))
468//  val retIdx = PriorityMux(rets, (0 until PredictWidth).map(_.U))
469
470//  val jmps = (if (EnableRAS) {brTakens | jals | calls | jalrs | rets} else {brTakens | jals | calls | jalrs})
471//  val jmpIdx = MuxCase(0.U, (0 until PredictWidth).map(i => (jmps(i), i.U)))
472//  io.s3Taken := MuxCase(false.B, (0 until PredictWidth).map(i => (jmps(i), true.B)))
473
474//  // val brNotTakens = VecInit((0 until PredictWidth).map(i => brs(i) && ~inLatch.tage.takens(i) && i.U <= jmpIdx && io.predecode.bits.mask(i)))
475//  val brNotTakens = if (EnableBPD) {
476//    VecInit((0 until PredictWidth).map(i => brs(i) && i.U <= jmpIdx && Mux(tageHits(i), ~tageTakens(i), ~btbTakens(i)(1)) && predecode.mask(i)))
477//  } else {
478//    VecInit((0 until PredictWidth).map(i => brs(i) && i.U <= jmpIdx && ~inLatch.btbPred.bits.predCtr(i)(1) && predecode.mask(i)))
479//  }
480
481//  // TODO: what if if4 and if2 late jump to the same target?
482//  // val lateJump = io.s3Taken && PriorityMux(Reverse(predecode.mask), ((PredictWidth - 1) to 0).map(_.U)) === jmpIdx && !predecode.isRVC(jmpIdx)
483//  val lateJump = io.s3Taken && PriorityMux(Reverse(predecode.mask), (0 until PredictWidth).map {i => (PredictWidth - 1 - i).U}) === jmpIdx && !predecode.isRVC(jmpIdx)
484//  io.out.bits.lateJump := lateJump
485
486//  io.out.bits.predCtr := inLatch.btbPred.bits.predCtr
487//  io.out.bits.btbHit := inLatch.btbPred.bits.btbHit
488//  io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta
489//  //io.out.bits.btbType := Mux(jmpIdx === retIdx, BTBtype.R,
490//  //  Mux(jmpIdx === jalrIdx, BTBtype.I,
491//  //  Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J)))
492//  val firstHist = inLatch.btbPred.bits.hist(0)
493//  // there may be several notTaken branches before the first jump instruction,
494//  // so we need to calculate how many zeroes should each instruction shift in its global history.
495//  // each history is exclusive of instruction's own jump direction.
496//  val histShift = Wire(Vec(PredictWidth, UInt(log2Up(PredictWidth).W)))
497//  val shift = Wire(Vec(PredictWidth, Vec(PredictWidth, UInt(1.W))))
498//  (0 until PredictWidth).foreach(i => shift(i) := Mux(!brNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), PredictWidth)).asTypeOf(Vec(PredictWidth, UInt(1.W))))
499//  for (j <- 0 until PredictWidth) {
500//    var tmp = 0.U
501//    for (i <- 0 until PredictWidth) {
502//      tmp = tmp + shift(i)(j)
503//    }
504//    histShift(j) := tmp
505//  }
506//  (0 until PredictWidth).foreach(i => io.out.bits.hist(i) := firstHist << histShift(i))
507//  // save ras checkpoint info
508//  io.out.bits.rasSp := sp.value
509//  io.out.bits.rasTopCtr := rasTop.ctr
510
511//  // flush BPU and redirect when target differs from the target predicted in Stage1
512//  val tToNt = inLatch.btbPred.bits.redirect && ~io.s3Taken
513//  val ntToT = ~inLatch.btbPred.bits.redirect && io.s3Taken
514//  val dirDiffers = tToNt || ntToT
515//  val tgtDiffers = inLatch.btbPred.bits.redirect && io.s3Taken && io.out.bits.target =/= inLatch.btbPred.bits.target
516//  // io.out.bits.redirect := (if (EnableBPD) {dirDiffers || tgtDiffers} else false.B)
517//  io.out.bits.redirect := dirDiffers || tgtDiffers
518//  io.out.bits.target := Mux(!io.s3Taken, inLatch.pc + (PopCount(predecode.mask) << 1.U), // TODO: RVC
519//                        Mux(jmpIdx === retIdx, rasTopAddr,
520//                        Mux(jmpIdx === jalrIdx, inLatch.jbtac.target,
521//                        inLatch.btb.targets(jmpIdx))))
522//  // for (i <- 0 until FetchWidth) {
523//  //   io.out.bits.instrValid(i) := ((io.s3Taken && i.U <= jmpIdx) || ~io.s3Taken) && io.predecode.bits.mask(i)
524//  // }
525//  io.out.bits.instrValid := predecode.mask.asTypeOf(Vec(PredictWidth, Bool()))
526//  for (i <- PredictWidth - 1 to 0) {
527//    io.out.bits.instrValid(i) := (io.s3Taken && i.U <= jmpIdx || !io.s3Taken) && predecode.mask(i)
528//    if (i != (PredictWidth - 1)) {
529//      when (!lateJump && !predecode.isRVC(i) && io.s3Taken && i.U <= jmpIdx) {
530//        io.out.bits.instrValid(i+1) := predecode.mask(i+1)
531//      }
532//    }
533//  }
534//  io.flushBPU := io.out.bits.redirect && io.out.fire()
535
536//  // speculative update RAS
537//  val rasWrite = WireInit(0.U.asTypeOf(rasEntry()))
538//  val retAddr = inLatch.pc + (callIdx << 1.U) + Mux(predecode.isRVC(callIdx), 2.U, 4.U)
539//  rasWrite.retAddr := retAddr
540//  val allocNewEntry = rasWrite.retAddr =/= rasTopAddr
541//  rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U)
542//  val rasWritePosition = Mux(allocNewEntry, sp.value + 1.U, sp.value)
543//  when (io.out.fire() && io.s3Taken) {
544//    when (jmpIdx === callIdx) {
545//      ras(rasWritePosition) := rasWrite
546//      when (allocNewEntry) { sp.value := sp.value + 1.U }
547//    }.elsewhen (jmpIdx === retIdx) {
548//      when (rasTop.ctr === 1.U) {
549//        sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U)
550//      }.otherwise {
551//        ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry())
552//      }
553//    }
554//  }
555//  // use checkpoint to recover RAS
556//  val recoverSp = io.redirectInfo.redirect.rasSp
557//  val recoverCtr = io.redirectInfo.redirect.rasTopCtr
558//  when (io.redirectInfo.flush()) {
559//    sp.value := recoverSp
560//    ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry())
561//  }
562
563//  // roll back global history in S1 if S3 redirects
564//  io.s1RollBackHist := Mux(io.s3Taken, io.out.bits.hist(jmpIdx),
565//                       io.out.bits.hist(0) << PopCount(brs & predecode.mask & ~Reverse(Cat(inLatch.tage.takens.map {t => Fill(2, t.asUInt)}).asUInt)))
566
567//  // debug info
568//  XSDebug(io.in.fire(), "in:(%d %d) pc=%x\n", io.in.valid, io.in.ready, io.in.bits.pc)
569//  XSDebug(io.out.fire(), "out:(%d %d) pc=%x redirect=%d predcdMask=%b instrValid=%b tgt=%x\n",
570//    io.out.valid, io.out.ready, inLatch.pc, io.out.bits.redirect, predecode.mask, io.out.bits.instrValid.asUInt, io.out.bits.target)
571//  XSDebug("flushS3=%d\n", flushS3)
572//  XSDebug("validLatch=%d predecode.valid=%d\n", validLatch, predecodeValid)
573//  XSDebug("brs=%b brTakens=%b brNTakens=%b jals=%b jalrs=%b calls=%b rets=%b\n",
574//    brs, brTakens, brNotTakens.asUInt, jals, jalrs, calls, rets)
575//  // ?????condition is wrong
576//  // XSDebug(io.in.fire() && callIdx.orR, "[RAS]:pc=0x%x, rasWritePosition=%d, rasWriteAddr=0x%x\n",
577//  //           io.in.bits.pc, rasWritePosition, retAddr)
578// }
579
580// class BPU extends XSModule {
581//  val io = IO(new Bundle() {
582//    // from backend
583//    // flush pipeline if misPred and update bpu based on redirect signals from brq
584//    val redirectInfo = Input(new RedirectInfo)
585
586//    val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) }
587
588//    val btbOut = ValidIO(new BranchPrediction)
589//    val tageOut = Decoupled(new BranchPrediction)
590
591//    // predecode info from icache
592//    // TODO: simplify this after implement predecode unit
593//    val predecode = Flipped(ValidIO(new Predecode))
594//  })
595
596//  val s1 = Module(new BPUStage1)
597//  val s2 = Module(new BPUStage2)
598//  val s3 = Module(new BPUStage3)
599
600//  s1.io.redirectInfo <> io.redirectInfo
601//  s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
602//  s1.io.in.pc.valid := io.in.pc.valid
603//  s1.io.in.pc.bits <> io.in.pc.bits
604//  io.btbOut <> s1.io.s1OutPred
605//  s1.io.s3RollBackHist := s3.io.s1RollBackHist
606//  s1.io.s3Taken := s3.io.s3Taken
607
608//  s1.io.out <> s2.io.in
609//  s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
610
611//  s2.io.out <> s3.io.in
612//  s3.io.flush := io.redirectInfo.flush()
613//  s3.io.predecode <> io.predecode
614//  io.tageOut <> s3.io.out
615//  s3.io.redirectInfo <> io.redirectInfo
616// }
617