xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision 0ba47cca0f392f9ecff246cf8a96c84c67b5ceec)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import xiangshan._
6import xiangshan.utils._
7import xiangshan.backend.ALUOpType
8import utils._
9
10class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle {
11  def tagBits = VAddrBits - idxBits - 2
12
13  val tag = UInt(tagBits.W)
14  val idx = UInt(idxBits.W)
15  val offset = UInt(2.W)
16
17  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
18  def getTag(x: UInt) = fromUInt(x).tag
19  def getIdx(x: UInt) = fromUInt(x).idx
20  def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0)
21  def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks))
22}
23
24class Stage1To2IO extends XSBundle {
25  val pc = Output(UInt(VAddrBits.W))
26  val btb = new Bundle {
27    val hits = Output(UInt(FetchWidth.W))
28    val targets = Output(Vec(FetchWidth, UInt(VAddrBits.W)))
29  }
30  val jbtac = new Bundle {
31    val hitIdx = Output(UInt(FetchWidth.W))
32    val target = Output(UInt(VAddrBits.W))
33  }
34  val tage = new Bundle {
35    val hits = Output(UInt(FetchWidth.W))
36    val takens = Output(Vec(FetchWidth, Bool()))
37  }
38  val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W)))
39  val btbPred = ValidIO(new BranchPrediction)
40}
41
42class BPUStage1 extends XSModule {
43  val io = IO(new Bundle() {
44    val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) }
45    // from backend
46    val redirectInfo = Flipped(new RedirectInfo)
47    // from Stage3
48    val flush = Input(Bool())
49    val s3RollBackHist = Input(UInt(HistoryLength.W))
50    // to ifu, quick prediction result
51    val s1OutPred = ValidIO(new BranchPrediction)
52    // to Stage2
53    val out = Decoupled(new Stage1To2IO)
54  })
55
56<<<<<<< Updated upstream
57=======
58  // flush Stage1 when io.flush || io.redirect.valid
59
60  val predictWidth = 8
61  def btbTarget = new Bundle {
62    val addr = UInt(VAddrBits.W)
63    val pred = UInt(2.W) // 2-bit saturated counter as a quick predictor
64    val _type = UInt(2.W)
65    val offset = if (offsetBits()) Some(UInt(offsetBits().W)) else None
66
67    def offsetBits() = log2Up(FetchWidth / predictWidth)
68  }
69
70  def btbEntry() = new Bundle {
71    val valid = Bool()
72    // TODO: don't need full length of tag and target
73    val tag = UInt(btbAddr.tagBits.W)
74    val target = Vec(predictWidth, btbTarget)
75  }
76
77  val btb = List.fill(BtbWays)(List.fill(BtbBanks)(
78    Module(new SRAMTemplate(btbEntry(), set = BtbSets / BtbBanks, shouldReset = true, holdRead = true, singlePort = false))))
79
80  // val btbReadBank = btbAddr.getBank(io.in.pc.bits)
81
82  // BTB read requests
83  for (w <- 0 until BtbWays) {
84    for (b <- 0 until BtbBanks) {
85      btb(w)(b).reset := reset.asBool
86      btb(w)(b).io.r.req.valid := io.in.pc.valid && b.U === btbAddr.getBank(io.in.pc.bits)
87      btb(w)(b).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.in.pc.bits)
88    }
89  }
90
91  // latch pc for 1 cycle latency when reading SRAM
92  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.valid)
93  val btbRead = Wire(Vec(BtbWays, Vec(BtbBanks, btbEntry())))
94  val btbHits = Wire(Vec(BtbWays, Bool()))
95
96  // #(predictWidth) results
97  val btbTargets = Wire(Vec(predictWidth, UInt(VAddrBits.W)))
98  val btbTypes = Wire(Vec(predictWidth, UInt(2.W)))
99  // val btbPreds = Wire(Vec(FetchWidth, UInt(2.W)))
100  val btbTakens = Wire(Vec(predictWidth, Bool()))
101  for (w <- 0 until BtbWays) {
102    for (b <- 0 until BtbBanks) {
103      btbRead(w)(b) := btb(w)(b).io.r.resp.data(0)
104    }
105  }
106  for (i <- 0 until predictWidth) {
107    // btbHits(i) := false.B
108    for (w <- 0 until BtbWays) {
109      btbHits(w) := false.B
110      for (b <- 0 until BtbBanks) {
111        when (b.U === btbAddr.getBank(pcLatch) && btbRead(w)(b).valid && btbRead(w)(b).tag === btbAddr.getTag(pcLatch))) {
112          btbHits(w) := !flush && RegNext(btb(w)(b).io.r.req.fire(), init = false.B)
113          btbTargets(i) := btbRead(w)(b).target(i)
114          btbTypes(i) := btbRead(w)(b)._type(i)
115          btbTakens(i) := (btbRead(b)(w).pred(i))(1).asBool
116        }.otherwise {
117          btbHits(w) := false.B
118          btbTargets(i) := DontCare
119          btbTypes(i) := DontCare
120          btbTakens(i) := DontCare
121        }
122      }
123    }
124  }
125
126  val btbTakenidx := MuxCase(0.U, (0 until predictWidth).map(i => btbTakens(i)) zip (0.U until predictWidth.U))
127  val btbTakenTarget := btbTargets(btbTakenidx)
128  val btbTakenType := btbTypes(btbTakenidx)
129
130  // JBTAC, divided into 8 banks, makes prediction for indirect jump except ret.
131  val jbtacAddr = new TableAddr(log2Up(JbtacSize), JbtacBanks)
132  def jbtacEntry() = new Bundle {
133    val valid = Bool()
134    // TODO: don't need full length of tag and target
135    val tag = UInt(jbtacAddr.tagBits.W)
136    val target = UInt(VAddrBits.W)
137    val offset = UInt(log2Up(FetchWidth).W)
138  }
139
140  val jbtac = List.fill(JbtacBanks)(Module(new SRAMTemplate(jbtacEntry(), set = JbtacSize / JbtacBanks, shouldReset = true, holdRead = true, singlePort = false)))
141
142  (0 until JbtacBanks).map(b => jbtac(b).reset := reset.asBool)
143  (0 until JbtacBanks).map(b => jbtac(b).io.r.req.valid := io.in.pc.valid && b.U === jbtacAddr.getBank(io.in.pc.bits))
144  (0 until JbtacBanks).map(b => jbtac(b).io.r.req.bits.setIdx := jbtacAddr.getBankIdx(io.in.pc.bits))
145  val jbtacRead = Wire(Vec(JbtacBanks, jbtacEntry()))
146  (0 until JbtacBanks).map(b => jbtacRead(b) := jbtac(b).io.r.resp.data(0))
147
148  val jbtacHits = Wire(Vec(JbtacBanks, Bool()))
149  val jbtacHitIdxs = Wire(UInt(log2Up(FetchWidth).W))
150  val jbtacTargets = Wire(UInt(VAddrBits.W))
151
152  val jbtacHit = Wire(Bool())
153  val jbtacHitIdx = Wire(UInt(log2Up(FetchWidth).W))
154  val jbtacTarget = Wire(UInt(VAddrBits.W))
155
156  jbtacHit := jbtacRead(b).valid
157  jbtacHitIdx := jbtacRead.offset
158  jbtacTarget := jbtacRead.target
159  for (b <- 0 until JbtacBanks) {
160    when (jbtacAddr.getBank(pcLatch) === b.U && jbtacRead(b).valid && jbtacRead(b).ta === jbtacAddr.getTag(pcLatch)) {
161      jbtacHit := !flush && RegNext(jbtac(b).io.r.req.fire(), init = false.B)
162      jbtacTarget := jbtacRead(b).target
163    }.otherwise {
164      jbtacHits(i) := false.B
165      jbtacTargets(i) := DontCare
166    }
167  }
168
169  // redirect based on BTB and JBTAC
170  (0 until FetchWidth).map(i => io.predMask(i) := btbHits(i) && Mux(btbTypes(i) === BTBtype.B, btbTakens(i), true.B) || jbtacHits(i))
171  (0 until FetchWidth).map(i => io.predTargets(i) := Mux(btbHits(i) && !(btbTypes(i) === BTBtype.B && !btbTakens(i)), btbTargets(i), jbtacTargets(i)))
172
173  def getLowerMask(idx: UInt, len: Int) = (0 until len).map(i => idx >> i.U).reduce(_|_)
174  def getLowestBit(idx: UInt, len: Int) = Mux(idx(0), 1.U(len.W), Reverse(((0 until len).map(i => Reverse(idx(len - 1, 0)) >> i.U).reduce(_|_) + 1.U) >> 1.U))
175
176
177  io.s1OutPred.valid := RegNext(io.in.pc.valid)
178  io.s1OutPred.redirect := btbHits.orR && btbTakens.orR
179  io.s1OutPred.instrValid := ~getLowerMask(btbTakenidx, FetchWidth)
180  io.s1OutPred.target := btbTakenTarget
181  io.s1OutPred.hist := DontCare
182  io.s1OutPred.rasSp := DontCare
183  io.s1OutPred.rasTopCtr := DontCare
184
185
186
187>>>>>>> Stashed changes
188  // TODO: delete this!!!
189  io.in.pc.ready := true.B
190
191  io.out.valid := false.B
192  io.out.bits := DontCare
193
194  // flush Stage1 when io.flush
195  val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true)
196
197  // global history register
198  val ghr = RegInit(0.U(HistoryLength.W))
199  // modify updateGhr and newGhr when updating ghr
200  val updateGhr = WireInit(false.B)
201  val newGhr = WireInit(0.U(HistoryLength.W))
202  when (updateGhr) { ghr := newGhr }
203  // use hist as global history!!!
204  val hist = Mux(updateGhr, newGhr, ghr)
205
206  // Tage predictor
207  val tage = Module(new Tage)
208  tage.io.req.valid := io.in.pc.fire()
209  tage.io.req.bits.pc := io.in.pc.bits
210  tage.io.req.bits.hist := hist
211  tage.io.redirectInfo <> io.redirectInfo
212  io.out.bits.tage <> tage.io.out
213  io.btbOut.bits.tageMeta := tage.io.meta
214
215}
216
217class Stage2To3IO extends Stage1To2IO {
218}
219
220class BPUStage2 extends XSModule {
221  val io = IO(new Bundle() {
222    // flush from Stage3
223    val flush = Input(Bool())
224    val in = Flipped(Decoupled(new Stage1To2IO))
225    val out = Decoupled(new Stage2To3IO)
226  })
227
228  // flush Stage2 when Stage3 or banckend redirects
229  val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
230  io.out.valid := !flushS2 && RegNext(io.in.fire())
231  io.in.ready := !io.out.valid || io.out.fire()
232
233  // do nothing
234  io.out.bits := RegEnable(io.in.bits, io.in.fire())
235}
236
237class BPUStage3 extends XSModule {
238  val io = IO(new Bundle() {
239    val flush = Input(Bool())
240    val in = Flipped(Decoupled(new Stage2To3IO))
241    val out = ValidIO(new BranchPrediction)
242    // from icache
243    val predecode = Flipped(ValidIO(new Predecode))
244    // from backend
245    val redirectInfo = Flipped(new RedirectInfo)
246    // to Stage1 and Stage2
247    val flushBPU = Output(Bool())
248    // to Stage1, restore ghr in stage1 when flushBPU is valid
249    val s1RollBackHist = Output(UInt(HistoryLength.W))
250  })
251
252  val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
253  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
254  val validLatch = RegInit(false.B)
255  when (io.in.fire()) { inLatch := io.in.bits }
256  when (io.in.fire()) {
257    validLatch := !io.flush
258  }.elsewhen (io.out.valid) {
259    validLatch := false.B
260  }
261  io.out.valid := validLatch && io.predecode.valid && !flushS3
262  io.in.ready := !validLatch || io.out.valid
263
264  // RAS
265  // TODO: split retAddr and ctr
266  def rasEntry() = new Bundle {
267    val retAddr = UInt(VAddrBits.W)
268    val ctr = UInt(8.W) // layer of nested call functions
269  }
270  val ras = RegInit(VecInit(Seq.fill(RasSize)(0.U.asTypeOf(rasEntry()))))
271  val sp = Counter(RasSize)
272  val rasTop = ras(sp.value)
273  val rasTopAddr = rasTop.retAddr
274
275  // get the first taken branch/jal/call/jalr/ret in a fetch line
276  // brTakenIdx/jalIdx/callIdx/jalrIdx/retIdx/jmpIdx is one-hot encoded.
277  // brNotTakenIdx indicates all the not-taken branches before the first jump instruction.
278  val brIdx = inLatch.btb.hits & Cat(io.predecode.bits.fuTypes.map { t => ALUOpType.isBranch(t) }).asUInt & io.predecode.bits.mask
279  val brTakenIdx = LowestBit(brIdx & inLatch.tage.takens.asUInt, FetchWidth)
280  val jalIdx = LowestBit(inLatch.btb.hits & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.jal }).asUInt & io.predecode.bits.mask, FetchWidth)
281  val callIdx = LowestBit(inLatch.btb.hits & io.predecode.bits.mask & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.call }).asUInt, FetchWidth)
282  val jalrIdx = LowestBit(inLatch.jbtac.hitIdx & io.predecode.bits.mask & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.jalr }).asUInt, FetchWidth)
283  val retIdx = LowestBit(io.predecode.bits.mask & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.ret }).asUInt, FetchWidth)
284
285  val jmpIdx = LowestBit(brTakenIdx | jalIdx | callIdx | jalrIdx | retIdx, FetchWidth)
286  val brNotTakenIdx = brIdx & ~inLatch.tage.takens.asUInt & LowerMask(jmpIdx, FetchWidth)
287
288  io.out.bits.redirect := jmpIdx.orR.asBool
289  io.out.bits.target := Mux(jmpIdx === retIdx, rasTopAddr,
290    Mux(jmpIdx === jalrIdx, inLatch.jbtac.target,
291    Mux(jmpIdx === 0.U, inLatch.pc + 4.U, // TODO: RVC
292    PriorityMux(jmpIdx, inLatch.btb.targets))))
293  io.out.bits.instrValid := LowerMask(jmpIdx, FetchWidth).asTypeOf(Vec(FetchWidth, Bool()))
294  io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta
295  //io.out.bits._type := Mux(jmpIdx === retIdx, BTBtype.R,
296  //  Mux(jmpIdx === jalrIdx, BTBtype.I,
297  //  Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J)))
298  val firstHist = inLatch.btbPred.bits.hist(0)
299  // there may be several notTaken branches before the first jump instruction,
300  // so we need to calculate how many zeroes should each instruction shift in its global history.
301  // each history is exclusive of instruction's own jump direction.
302  val histShift = Wire(Vec(FetchWidth, UInt(log2Up(FetchWidth).W)))
303  val shift = Wire(Vec(FetchWidth, Vec(FetchWidth, UInt(1.W))))
304  (0 until FetchWidth).map(i => shift(i) := Mux(!brNotTakenIdx(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth)).asTypeOf(Vec(FetchWidth, UInt(1.W))))
305  for (j <- 0 until FetchWidth) {
306    var tmp = 0.U
307    for (i <- 0 until FetchWidth) {
308      tmp = tmp + shift(i)(j)
309    }
310    histShift(j) := tmp
311  }
312  (0 until FetchWidth).map(i => io.out.bits.hist(i) := firstHist << histShift(i))
313  // save ras checkpoint info
314  io.out.bits.rasSp := sp.value
315  io.out.bits.rasTopCtr := rasTop.ctr
316
317  // flush BPU and redirect when target differs from the target predicted in Stage1
318  io.out.bits.redirect := !inLatch.btbPred.bits.redirect ^ jmpIdx.orR.asBool ||
319    inLatch.btbPred.bits.redirect && jmpIdx.orR.asBool && io.out.bits.target =/= inLatch.btbPred.bits.target
320  io.flushBPU := io.out.bits.redirect && io.out.valid
321
322  // speculative update RAS
323  val rasWrite = WireInit(0.U.asTypeOf(rasEntry()))
324  rasWrite.retAddr := inLatch.pc + OHToUInt(callIdx) << 2.U + 4.U
325  val allocNewEntry = rasWrite.retAddr =/= rasTopAddr
326  rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U)
327  when (io.out.valid) {
328    when (jmpIdx === callIdx) {
329      ras(Mux(allocNewEntry, sp.value + 1.U, sp.value)) := rasWrite
330      when (allocNewEntry) { sp.value := sp.value + 1.U }
331    }.elsewhen (jmpIdx === retIdx) {
332      when (rasTop.ctr === 1.U) {
333        sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U)
334      }.otherwise {
335        ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry())
336      }
337    }
338  }
339  // use checkpoint to recover RAS
340  val recoverSp = io.redirectInfo.redirect.rasSp
341  val recoverCtr = io.redirectInfo.redirect.rasTopCtr
342  when (io.redirectInfo.valid && io.redirectInfo.misPred) {
343    sp.value := recoverSp
344    ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry())
345  }
346
347  // roll back global history in S1 if S3 redirects
348  io.s1RollBackHist := PriorityMux(jmpIdx, io.out.bits.hist)
349}
350
351class BPU extends XSModule {
352  val io = IO(new Bundle() {
353    // from backend
354    // flush pipeline if misPred and update bpu based on redirect signals from brq
355    val redirectInfo = Flipped(new RedirectInfo)
356
357    val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) }
358
359    val btbOut = ValidIO(new BranchPrediction)
360    val tageOut = ValidIO(new BranchPrediction)
361
362    // predecode info from icache
363    // TODO: simplify this after implement predecode unit
364    val predecode = Flipped(ValidIO(new Predecode))
365  })
366
367  val s1 = Module(new BPUStage1)
368  val s2 = Module(new BPUStage2)
369  val s3 = Module(new BPUStage3)
370
371  s1.io.redirectInfo <> io.redirectInfo
372  s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
373  s1.io.in.pc.valid := io.in.pc.valid
374  s1.io.in.pc.bits <> io.in.pc.bits
375  io.btbOut <> s1.io.btbOut
376  s1.io.s3RollBackHist := s3.io.s1RollBackHist
377
378  s1.io.out <> s2.io.in
379  s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
380
381  s2.io.out <> s3.io.in
382  s3.io.flush := io.redirectInfo.flush()
383  s3.io.predecode <> io.predecode
384  io.tageOut <> s3.io.out
385  s3.io.redirectInfo <> io.redirectInfo
386
387  // TODO: delete this and put BTB and JBTAC into Stage1
388  /*
389  val flush = BoolStopWatch(io.redirect.valid, io.in.pc.valid, startHighPriority = true)
390
391  // BTB makes a quick prediction for branch and direct jump, which is
392  // 4-way set-associative, and each way is divided into 4 banks.
393  val btbAddr = new TableAddr(log2Up(BtbSets), BtbBanks)
394  def btbEntry() = new Bundle {
395    val valid = Bool()
396    // TODO: don't need full length of tag and target
397    val tag = UInt(btbAddr.tagBits.W)
398    val _type = UInt(2.W)
399    val target = UInt(VAddrBits.W)
400    val pred = UInt(2.W) // 2-bit saturated counter as a quick predictor
401  }
402
403  val btb = List.fill(BtbBanks)(List.fill(BtbWays)(
404    Module(new SRAMTemplate(btbEntry(), set = BtbSets / BtbBanks, shouldReset = true, holdRead = true, singlePort = true))))
405
406  // val fetchPkgAligned = btbAddr.getBank(io.in.pc.bits) === 0.U
407  val HeadBank = btbAddr.getBank(io.in.pc.bits)
408  val TailBank = btbAddr.getBank(io.in.pc.bits + FetchWidth.U << 2.U - 4.U)
409  for (b <- 0 until BtbBanks) {
410    for (w <- 0 until BtbWays) {
411      btb(b)(w).reset := reset.asBool
412      btb(b)(w).io.r.req.valid := io.in.pc.valid && Mux(TailBank > HeadBank, b.U >= HeadBank && b.U <= TailBank, b.U >= TailBank || b.U <= HeadBank)
413      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.in.pc.bits)
414    }
415  }
416  // latch pc for 1 cycle latency when reading SRAM
417  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.valid)
418  val btbRead = Wire(Vec(BtbBanks, Vec(BtbWays, btbEntry())))
419  val btbHits = Wire(Vec(FetchWidth, Bool()))
420  val btbTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
421  val btbTypes = Wire(Vec(FetchWidth, UInt(2.W)))
422  // val btbPreds = Wire(Vec(FetchWidth, UInt(2.W)))
423  val btbTakens = Wire(Vec(FetchWidth, Bool()))
424  for (b <- 0 until BtbBanks) {
425    for (w <- 0 until BtbWays) {
426      btbRead(b)(w) := btb(b)(w).io.r.resp.data(0)
427    }
428  }
429  for (i <- 0 until FetchWidth) {
430    btbHits(i) := false.B
431    for (b <- 0 until BtbBanks) {
432      for (w <- 0 until BtbWays) {
433        when (b.U === btbAddr.getBank(pcLatch) && btbRead(b)(w).valid && btbRead(b)(w).tag === btbAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2)) {
434          btbHits(i) := !flush && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
435          btbTargets(i) := btbRead(b)(w).target
436          btbTypes(i) := btbRead(b)(w)._type
437          // btbPreds(i) := btbRead(b)(w).pred
438          btbTakens(i) := (btbRead(b)(w).pred)(1).asBool
439        }.otherwise {
440          btbHits(i) := false.B
441          btbTargets(i) := DontCare
442          btbTypes(i) := DontCare
443          btbTakens(i) := DontCare
444        }
445      }
446    }
447  }
448
449  // JBTAC, divided into 8 banks, makes prediction for indirect jump except ret.
450  val jbtacAddr = new TableAddr(log2Up(JbtacSize), JbtacBanks)
451  def jbtacEntry() = new Bundle {
452    val valid = Bool()
453    // TODO: don't need full length of tag and target
454    val tag = UInt(jbtacAddr.tagBits.W)
455    val target = UInt(VAddrBits.W)
456  }
457
458  val jbtac = List.fill(JbtacBanks)(Module(new SRAMTemplate(jbtacEntry(), set = JbtacSize / JbtacBanks, shouldReset = true, holdRead = true, singlePort = true)))
459
460  (0 until JbtacBanks).map(i => jbtac(i).reset := reset.asBool)
461  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.valid := io.in.pc.valid)
462  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.bits.setIdx := jbtacAddr.getBankIdx(Cat((io.in.pc.bits)(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2))
463
464  val jbtacRead = Wire(Vec(JbtacBanks, jbtacEntry()))
465  (0 until JbtacBanks).map(i => jbtacRead(i) := jbtac(i).io.r.resp.data(0))
466  val jbtacHits = Wire(Vec(FetchWidth, Bool()))
467  val jbtacTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
468  val jbtacHeadBank = jbtacAddr.getBank(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)))
469  for (i <- 0 until FetchWidth) {
470    jbtacHits(i) := false.B
471    for (b <- 0 until JbtacBanks) {
472      when (jbtacHeadBank + i.U === b.U) {
473        jbtacHits(i) := jbtacRead(b).valid && jbtacRead(b).tag === jbtacAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2) &&
474          !flush && RegNext(jbtac(b).io.r.req.fire(), init = false.B)
475        jbtacTargets(i) := jbtacRead(b).target
476      }.otherwise {
477        jbtacHits(i) := false.B
478        jbtacTargets(i) := DontCare
479      }
480    }
481  }
482
483  // redirect based on BTB and JBTAC
484  (0 until FetchWidth).map(i => io.predMask(i) := btbHits(i) && Mux(btbTypes(i) === BTBtype.B, btbTakens(i), true.B) || jbtacHits(i))
485  (0 until FetchWidth).map(i => io.predTargets(i) := Mux(btbHits(i) && !(btbTypes(i) === BTBtype.B && !btbTakens(i)), btbTargets(i), jbtacTargets(i)))
486
487
488  // update bpu, including BTB, JBTAC...
489  // 1. update BTB
490  // 1.1 read the selected bank
491  for (b <- 0 until BtbBanks) {
492    for (w <- 0 until BtbWays) {
493      btb(b)(w).io.r.req.valid := io.redirect.valid && btbAddr.getBank(io.redirect.bits.pc) === b.U
494      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.redirect.bits.pc)
495    }
496  }
497
498  // 1.2 match redirect pc tag with the 4 tags in a btb line, find a way to write
499  // val redirectLatch = RegEnable(io.redirect.bits, io.redirect.valid)
500  val redirectLatch = RegNext(io.redirect.bits, init = 0.U.asTypeOf(new Redirect))
501  val bankLatch = btbAddr.getBank(redirectLatch.pc)
502  val btbUpdateRead = Wire(Vec(BtbWays, btbEntry()))
503  val btbValids = Wire(Vec(BtbWays, Bool()))
504  val btbUpdateTagHits = Wire(Vec(BtbWays, Bool()))
505  for (b <- 0 until BtbBanks) {
506    for (w <- 0 until BtbWays) {
507      when (b.U === bankLatch) {
508        btbUpdateRead(w) := btb(b)(w).io.r.resp.data(0)
509        btbValids(w) := btbUpdateRead(w).valid && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
510      }.otherwise {
511        btbUpdateRead(w) := 0.U.asTypeOf(btbEntry())
512        btbValids(w) := false.B
513      }
514    }
515  }
516  (0 until BtbWays).map(w => btbUpdateTagHits(w) := btbValids(w) && btbUpdateRead(w).tag === btbAddr.getTag(redirectLatch.pc))
517  // val btbWriteWay = Wire(Vec(BtbWays, Bool()))
518  val btbWriteWay = Wire(UInt(BtbWays.W))
519  val btbInvalids = ~ btbValids.asUInt
520  when (btbUpdateTagHits.asUInt.orR) {
521    // tag hits
522    btbWriteWay := btbUpdateTagHits.asUInt
523  }.elsewhen (!btbValids.asUInt.andR) {
524    // no tag hits but there are free entries
525    btbWriteWay := Mux(btbInvalids >= 8.U, "b1000".U,
526      Mux(btbInvalids >= 4.U, "b0100".U,
527      Mux(btbInvalids >= 2.U, "b0010".U, "b0001".U)))
528  }.otherwise {
529    // no tag hits and no free entry, select a victim way
530    btbWriteWay := UIntToOH(LFSR64()(log2Up(BtbWays) - 1, 0))
531  }
532
533  // 1.3 calculate new 2-bit counter value
534  val btbWrite = WireInit(0.U.asTypeOf(btbEntry()))
535  btbWrite.valid := true.B
536  btbWrite.tag := btbAddr.getTag(redirectLatch.pc)
537  btbWrite._type := redirectLatch._type
538  btbWrite.target := redirectLatch.brTarget
539  val oldPred = WireInit("b01".U)
540  oldPred := PriorityMux(btbWriteWay.asTypeOf(Vec(BtbWays, Bool())), btbUpdateRead.map{ e => e.pred })
541  val newPred = Mux(redirectLatch.taken, Mux(oldPred === "b11".U, "b11".U, oldPred + 1.U),
542    Mux(oldPred === "b00".U, "b00".U, oldPred - 1.U))
543  btbWrite.pred := Mux(btbUpdateTagHits.asUInt.orR && redirectLatch._type === BTBtype.B, newPred, "b01".U)
544
545  // 1.4 write BTB
546  for (b <- 0 until BtbBanks) {
547    for (w <- 0 until BtbWays) {
548      when (b.U === bankLatch) {
549        btb(b)(w).io.w.req.valid := OHToUInt(btbWriteWay) === w.U &&
550          RegNext(io.redirect.valid, init = false.B) &&
551          (redirectLatch._type === BTBtype.B || redirectLatch._type === BTBtype.J)
552        btb(b)(w).io.w.req.bits.setIdx := btbAddr.getBankIdx(redirectLatch.pc)
553        btb(b)(w).io.w.req.bits.data := btbWrite
554      }.otherwise {
555        btb(b)(w).io.w.req.valid := false.B
556        btb(b)(w).io.w.req.bits.setIdx := DontCare
557        btb(b)(w).io.w.req.bits.data := DontCare
558      }
559    }
560  }
561
562  // 2. update JBTAC
563  val jbtacWrite = WireInit(0.U.asTypeOf(jbtacEntry()))
564  jbtacWrite.valid := true.B
565  jbtacWrite.tag := jbtacAddr.getTag(io.redirect.bits.pc)
566  jbtacWrite.target := io.redirect.bits.target
567  (0 until JbtacBanks).map(b =>
568    jbtac(b).io.w.req.valid := io.redirect.valid &&
569      b.U === jbtacAddr.getBank(io.redirect.bits.pc) &&
570      io.redirect.bits._type === BTBtype.I)
571  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.setIdx := jbtacAddr.getBankIdx(io.redirect.bits.pc))
572  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.data := jbtacWrite)
573  */
574}
575