xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision 1e7d14a8473b44f2d12d3b03452c03cee05a1921)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import xiangshan._
6import xiangshan.utils._
7import utils._
8
9class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle {
10  def tagBits = VAddrBits - idxBits - 2
11
12  val tag = UInt(tagBits.W)
13  val idx = UInt(idxBits.W)
14  val offset = UInt(2.W)
15
16  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
17  def getTag(x: UInt) = fromUInt(x).tag
18  def getIdx(x: UInt) = fromUInt(x).idx
19  def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0)
20  def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks))
21}
22
23class Stage1To2IO extends XSBundle {
24  val pc = Output(UInt(VAddrBits.W))
25  val btb = new Bundle {
26    val hits = Output(UInt(FetchWidth.W))
27    val targets = Output(Vec(FetchWidth, UInt(VAddrBits.B)))
28  }
29  val jbtac = new Bundle {
30    val hitIdx = Output(UInt(FetchWidth.W))
31    val target = Output(UInt(VAddrBits.W))
32  }
33  val tage = new Bundle {
34    val hits = Output(UInt(FetchWidth.W))
35    val takens = Output(Vec(FetchWidth, Bool()))
36  }
37  val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W)))
38  val btbPred = ValidIO(new BranchPrediction)
39}
40
41class BPUStage1 extends XSModule {
42  val io = IO(new Bundle() {
43    val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) }
44    // from backend
45    val redirectInfo = Flipped(new RedirectInfo)
46    // from Stage3
47    val flush = Input(Bool())
48    val s3RollBackHist = Input(UInt(HistoryLength.W))
49    // to ifu, quick prediction result
50    val btbOut = ValidIO(new BranchPrediction)
51    // to Stage2
52    val out = Decoupled(new Stage1To2IO)
53  })
54
55  // flush Stage1 when io.flush
56  val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true)
57
58  // global history register
59  val ghr = RegInit(0.U(HistoryLength.W))
60  // modify updateGhr and newGhr when updating ghr
61  val updateGhr = WireInit(false.B)
62  val newGhr = WireInit(0.U(HistoryLength.W))
63  when (updateGhr) { ghr := newGhr }
64  // use hist as global history!!!
65  val hist = Mux(updateGhr, newGhr, ghr)
66
67  // Tage predictor
68  val tage = Module(new Tage)
69  tage.io.req.valid := io.in.pc.fire()
70  tage.io.req.bits.pc := io.in.pc.bits
71  tage.io.req.bits.hist := hist
72  tage.io.redirectInfo <> io.redirectInfo
73  io.out.bits.tage <> tage.io.out
74  io.btbOut.bits.tageMeta := tage.io.meta
75
76}
77
78class Stage2To3IO extends Stage1To2IO {
79}
80
81class BPUStage2 extends XSModule {
82  val io = IO(new Bundle() {
83    // flush from Stage3
84    val flush = Input(Bool())
85    val in = Flipped(Decoupled(new Stage1To2IO))
86    val out = Decoupled(new Stage2To3IO)
87  })
88
89  // flush Stage2 when Stage3 or banckend redirects
90  val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
91  io.out.valid := !flushS2 && RegNext(io.in.fire())
92  io.in.ready := !io.out.valid || io.out.fire()
93
94  // do nothing
95  io.out.bits := RegEnable(io.in.bits, io.in.fire())
96}
97
98class BPUStage3 extends XSModule {
99  val io = IO(new Bundle() {
100    val flush = Input(Bool())
101    val in = Flipped(Decoupled(new Stage2To3IO))
102    val out = ValidIO(new BranchPrediction)
103    // from icache
104    val predecode = Flipped(ValidIO(new Predecode))
105    // from backend
106    val redirectInfo = Flipped(new RedirectInfo)
107    // to Stage1 and Stage2
108    val flushBPU = Output(Bool())
109    // to Stage1, restore ghr in stage1 when flushBPU is valid
110    val s1RollBackHist = Output(UInt(HistoryLength.W))
111  })
112
113  val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
114  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
115  val validLatch = RegInit(false.B)
116  when (io.in.fire()) { inLatch := io.in.bits }
117  when (io.in.fire()) {
118    validLatch := !io.in.flush
119  }.elsewhen (io.out.valid) {
120    validLatch := false.B
121  }
122  io.out.valid := validLatch && io.predecode.valid && !flushS3
123  io.in.ready := !validLatch || io.out.valid
124
125  // RAS
126  // TODO: split retAddr and ctr
127  def rasEntry() = new Bundle {
128    val retAddr = UInt(VAddrBits.W)
129    val ctr = UInt(8.W) // layer of nested call functions
130  }
131  val ras = RegInit(VecInit(RasSize, 0.U.asTypeOf(rasEntry())))
132  val sp = Counter(RasSize)
133  val rasTop = ras(sp.value)
134  val rasTopAddr = rasTop.retAddr
135
136  // get the first taken branch/jal/call/jalr/ret in a fetch line
137  // brTakenIdx/jalIdx/callIdx/jalrIdx/retIdx/jmpIdx is one-hot encoded.
138  // brNotTakenIdx indicates all the not-taken branches before the first jump instruction.
139  val brIdx = inLatch.btb.hits & io.predecode.bits.fuTypes.map { t => ALUOpType.isBranch(t) }.asUInt & io.predecode.bits.mask
140  val brTakenIdx = LowestBit(brIdx & inLatch.tage.takens.asUInt, FetchWidth)
141  val jalIdx = LowestBit(inLatch.btb.hits & io.predecode.bits.fuTypes.map { t => t === ALUOpType.jal }.asUInt & io.predecode.bits.mask, FetchWidth)
142  val callIdx = LowestBit(inLatch.btb.hits & io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.call }.asUInt, FetchWidth)
143  val jalrIdx = LowestBit(inLatch.jbtac.hitIdx & io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.jalr }.asUInt, FetchWidth)
144  val retIdx = LowestBit(io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.ret }.asUInt, FetchWidth)
145
146  val jmpIdx = LowestBit(brTakenIdx | jalIdx | callIdx | jalrIdx | retIdx, FetchWidth)
147  val brNotTakenIdx = brIdx & ~inLatch.tage.takens.asUInt & LowerMask(jmpIdx, FetchWidth)
148
149  io.out.bits.redirect := jmpIdx.orR.asBool
150  io.out.bits.target := Mux(jmpIdx === retIdx, rasTopAddr,
151    Mux(jmpIdx === jalrIdx, inLatch.jbtac.target,
152    Mux(jmpIdx === 0.U, inLatch.pc + 4.U, // TODO: RVC
153    PriorityMux(jmpIdx, inLatch.btb.targets))))
154  io.out.bits.instrValid := LowerMask(jmpIdx, FetchWidth).asTypeOf(Vec(FetchWidth, Bool()))
155  io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta
156  //io.out.bits._type := Mux(jmpIdx === retIdx, BTBtype.R,
157  //  Mux(jmpIdx === jalrIdx, BTBtype.I,
158  //  Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J)))
159  val firstHist = inLatch.btbPred.bits.hist
160  // there may be several notTaken branches before the first jump instruction,
161  // so we need to calculate how many zeroes should each instruction shift in its global history.
162  // each history is exclusive of instruction's own jump direction.
163  val histShift = WireInit(VecInit(FetchWidth, 0.U(log2Up(FetchWidth).W)))
164  histShift := (0 until FetchWidth).map(i => Mux(!brNotTakenIdx(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth))).reduce(_+_)
165  (0 until FetchWidth).map(i => io.out.bits.hist(i) := firstHist << histShift)
166  // save ras checkpoint info
167  io.out.bits.rasSp := sp.value
168  io.out.bits.rasTopCtr := rasTop.ctr
169
170  // flush BPU and redirect when target differs from the target predicted in Stage1
171  io.out.bits.redirect := !inLatch.btbPred.bits.redirect ^ jmpIdx.orR.asBool ||
172    inLatch.btbPred.bits.redirect && jmpIdx.orR.asBool && io.out.bits.target =/= inLatch.btbPred.bits.target
173  io.flushBPU := io.out.bits.redirect && io.out.valid
174
175  // speculative update RAS
176  val rasWrite = WireInit(0.U.asTypeOf(rasEntry()))
177  rasWrite.retAddr := inLatch.pc + OHToUInt(callIdx) << 2.U + 4.U
178  val allocNewEntry = rasWrite.retAddr =/= rasTopAddr
179  rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U)
180  when (io.out.valid) {
181    when (jmpIdx === callIdx) {
182      ras(Mux(allocNewEntry, sp.value + 1.U, sp.value)) := rasWrite
183      when (allocNewEntry) { sp.value := sp.value + 1.U }
184    }.elsewhen (jmpIdx === retIdx) {
185      when (rasTop.ctr === 1.U) {
186        sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U)
187      }.otherwise {
188        ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry())
189      }
190    }
191  }
192  // use checkpoint to recover RAS
193  val recoverSp = io.redirectInfo.redirect.rasSp
194  val recoverCtr = io.redirectInfo.redirect.rasTopCtr
195  when (io.redirectInfo.valid && io.redirectInfo.misPred) {
196    sp.value := recoverSp
197    ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry())
198  }
199
200  // roll back global history in S1 if S3 redirects
201  io.s1RollBackHist := PriorityMux(jmpIdx, io.out.bits.hist)
202}
203
204class BPU extends XSModule {
205  val io = IO(new Bundle() {
206    // from backend
207    // flush pipeline if misPred and update bpu based on redirect signals from brq
208    val redirectInfo = Flipped(new RedirectInfo)
209
210    val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) }
211
212    val btbOut = ValidIO(new BranchPrediction)
213    val tageOut = ValidIO(new BranchPrediction)
214
215    // predecode info from icache
216    // TODO: simplify this after implement predecode unit
217    val predecode = Flipped(ValidIO(new Predecode))
218  })
219
220  val s1 = Module(new BPUStage1)
221  val s2 = Module(new BPUStage2)
222  val s3 = Module(new BPUStage3)
223
224  s1.io.redirectInfo <> io.redirectInfo
225  s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
226  s1.io.in.pc.valid := io.in.pc.valid
227  s1.io.in.pc.bits <> io.in.pc.bits
228  io.btbOut <> s1.io.btbOut
229  s1.io.s3RollBackHist := s3.io.s1RollBackHist
230
231  s1.io.out <> s2.io.in
232  s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
233
234  s2.io.out <> s3.io.in
235  s3.io.flush := io.redirectInfo.flush()
236  s3.io.predecode <> io.predecode
237  io.tageOut <> s3.io.out
238  s3.io.redirectInfo <> io.redirectInfo
239
240  // TODO: delete this and put BTB and JBTAC into Stage1
241  /*
242  val flush = BoolStopWatch(io.redirect.valid, io.in.pc.valid, startHighPriority = true)
243
244  // BTB makes a quick prediction for branch and direct jump, which is
245  // 4-way set-associative, and each way is divided into 4 banks.
246  val btbAddr = new TableAddr(log2Up(BtbSets), BtbBanks)
247  def btbEntry() = new Bundle {
248    val valid = Bool()
249    // TODO: don't need full length of tag and target
250    val tag = UInt(btbAddr.tagBits.W)
251    val _type = UInt(2.W)
252    val target = UInt(VAddrBits.W)
253    val pred = UInt(2.W) // 2-bit saturated counter as a quick predictor
254  }
255
256  val btb = List.fill(BtbBanks)(List.fill(BtbWays)(
257    Module(new SRAMTemplate(btbEntry(), set = BtbSets / BtbBanks, shouldReset = true, holdRead = true, singlePort = true))))
258
259  // val fetchPkgAligned = btbAddr.getBank(io.in.pc.bits) === 0.U
260  val HeadBank = btbAddr.getBank(io.in.pc.bits)
261  val TailBank = btbAddr.getBank(io.in.pc.bits + FetchWidth.U << 2.U - 4.U)
262  for (b <- 0 until BtbBanks) {
263    for (w <- 0 until BtbWays) {
264      btb(b)(w).reset := reset.asBool
265      btb(b)(w).io.r.req.valid := io.in.pc.valid && Mux(TailBank > HeadBank, b.U >= HeadBank && b.U <= TailBank, b.U >= TailBank || b.U <= HeadBank)
266      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.in.pc.bits)
267    }
268  }
269  // latch pc for 1 cycle latency when reading SRAM
270  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.valid)
271  val btbRead = Wire(Vec(BtbBanks, Vec(BtbWays, btbEntry())))
272  val btbHits = Wire(Vec(FetchWidth, Bool()))
273  val btbTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
274  val btbTypes = Wire(Vec(FetchWidth, UInt(2.W)))
275  // val btbPreds = Wire(Vec(FetchWidth, UInt(2.W)))
276  val btbTakens = Wire(Vec(FetchWidth, Bool()))
277  for (b <- 0 until BtbBanks) {
278    for (w <- 0 until BtbWays) {
279      btbRead(b)(w) := btb(b)(w).io.r.resp.data(0)
280    }
281  }
282  for (i <- 0 until FetchWidth) {
283    btbHits(i) := false.B
284    for (b <- 0 until BtbBanks) {
285      for (w <- 0 until BtbWays) {
286        when (b.U === btbAddr.getBank(pcLatch) && btbRead(b)(w).valid && btbRead(b)(w).tag === btbAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2)) {
287          btbHits(i) := !flush && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
288          btbTargets(i) := btbRead(b)(w).target
289          btbTypes(i) := btbRead(b)(w)._type
290          // btbPreds(i) := btbRead(b)(w).pred
291          btbTakens(i) := (btbRead(b)(w).pred)(1).asBool
292        }.otherwise {
293          btbHits(i) := false.B
294          btbTargets(i) := DontCare
295          btbTypes(i) := DontCare
296          btbTakens(i) := DontCare
297        }
298      }
299    }
300  }
301
302  // JBTAC, divided into 8 banks, makes prediction for indirect jump except ret.
303  val jbtacAddr = new TableAddr(log2Up(JbtacSize), JbtacBanks)
304  def jbtacEntry() = new Bundle {
305    val valid = Bool()
306    // TODO: don't need full length of tag and target
307    val tag = UInt(jbtacAddr.tagBits.W)
308    val target = UInt(VAddrBits.W)
309  }
310
311  val jbtac = List.fill(JbtacBanks)(Module(new SRAMTemplate(jbtacEntry(), set = JbtacSize / JbtacBanks, shouldReset = true, holdRead = true, singlePort = true)))
312
313  (0 until JbtacBanks).map(i => jbtac(i).reset := reset.asBool)
314  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.valid := io.in.pc.valid)
315  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.bits.setIdx := jbtacAddr.getBankIdx(Cat((io.in.pc.bits)(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2))
316
317  val jbtacRead = Wire(Vec(JbtacBanks, jbtacEntry()))
318  (0 until JbtacBanks).map(i => jbtacRead(i) := jbtac(i).io.r.resp.data(0))
319  val jbtacHits = Wire(Vec(FetchWidth, Bool()))
320  val jbtacTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
321  val jbtacHeadBank = jbtacAddr.getBank(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)))
322  for (i <- 0 until FetchWidth) {
323    jbtacHits(i) := false.B
324    for (b <- 0 until JbtacBanks) {
325      when (jbtacHeadBank + i.U === b.U) {
326        jbtacHits(i) := jbtacRead(b).valid && jbtacRead(b).tag === jbtacAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2) &&
327          !flush && RegNext(jbtac(b).io.r.req.fire(), init = false.B)
328        jbtacTargets(i) := jbtacRead(b).target
329      }.otherwise {
330        jbtacHits(i) := false.B
331        jbtacTargets(i) := DontCare
332      }
333    }
334  }
335
336  // redirect based on BTB and JBTAC
337  (0 until FetchWidth).map(i => io.predMask(i) := btbHits(i) && Mux(btbTypes(i) === BTBtype.B, btbTakens(i), true.B) || jbtacHits(i))
338  (0 until FetchWidth).map(i => io.predTargets(i) := Mux(btbHits(i) && !(btbTypes(i) === BTBtype.B && !btbTakens(i)), btbTargets(i), jbtacTargets(i)))
339
340
341  // update bpu, including BTB, JBTAC...
342  // 1. update BTB
343  // 1.1 read the selected bank
344  for (b <- 0 until BtbBanks) {
345    for (w <- 0 until BtbWays) {
346      btb(b)(w).io.r.req.valid := io.redirect.valid && btbAddr.getBank(io.redirect.bits.pc) === b.U
347      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.redirect.bits.pc)
348    }
349  }
350
351  // 1.2 match redirect pc tag with the 4 tags in a btb line, find a way to write
352  // val redirectLatch = RegEnable(io.redirect.bits, io.redirect.valid)
353  val redirectLatch = RegNext(io.redirect.bits, init = 0.U.asTypeOf(new Redirect))
354  val bankLatch = btbAddr.getBank(redirectLatch.pc)
355  val btbUpdateRead = Wire(Vec(BtbWays, btbEntry()))
356  val btbValids = Wire(Vec(BtbWays, Bool()))
357  val btbUpdateTagHits = Wire(Vec(BtbWays, Bool()))
358  for (b <- 0 until BtbBanks) {
359    for (w <- 0 until BtbWays) {
360      when (b.U === bankLatch) {
361        btbUpdateRead(w) := btb(b)(w).io.r.resp.data(0)
362        btbValids(w) := btbUpdateRead(w).valid && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
363      }.otherwise {
364        btbUpdateRead(w) := 0.U.asTypeOf(btbEntry())
365        btbValids(w) := false.B
366      }
367    }
368  }
369  (0 until BtbWays).map(w => btbUpdateTagHits(w) := btbValids(w) && btbUpdateRead(w).tag === btbAddr.getTag(redirectLatch.pc))
370  // val btbWriteWay = Wire(Vec(BtbWays, Bool()))
371  val btbWriteWay = Wire(UInt(BtbWays.W))
372  val btbInvalids = ~ btbValids.asUInt
373  when (btbUpdateTagHits.asUInt.orR) {
374    // tag hits
375    btbWriteWay := btbUpdateTagHits.asUInt
376  }.elsewhen (!btbValids.asUInt.andR) {
377    // no tag hits but there are free entries
378    btbWriteWay := Mux(btbInvalids >= 8.U, "b1000".U,
379      Mux(btbInvalids >= 4.U, "b0100".U,
380      Mux(btbInvalids >= 2.U, "b0010".U, "b0001".U)))
381  }.otherwise {
382    // no tag hits and no free entry, select a victim way
383    btbWriteWay := UIntToOH(LFSR64()(log2Up(BtbWays) - 1, 0))
384  }
385
386  // 1.3 calculate new 2-bit counter value
387  val btbWrite = WireInit(0.U.asTypeOf(btbEntry()))
388  btbWrite.valid := true.B
389  btbWrite.tag := btbAddr.getTag(redirectLatch.pc)
390  btbWrite._type := redirectLatch._type
391  btbWrite.target := redirectLatch.brTarget
392  val oldPred = WireInit("b01".U)
393  oldPred := PriorityMux(btbWriteWay.asTypeOf(Vec(BtbWays, Bool())), btbUpdateRead.map{ e => e.pred })
394  val newPred = Mux(redirectLatch.taken, Mux(oldPred === "b11".U, "b11".U, oldPred + 1.U),
395    Mux(oldPred === "b00".U, "b00".U, oldPred - 1.U))
396  btbWrite.pred := Mux(btbUpdateTagHits.asUInt.orR && redirectLatch._type === BTBtype.B, newPred, "b01".U)
397
398  // 1.4 write BTB
399  for (b <- 0 until BtbBanks) {
400    for (w <- 0 until BtbWays) {
401      when (b.U === bankLatch) {
402        btb(b)(w).io.w.req.valid := OHToUInt(btbWriteWay) === w.U &&
403          RegNext(io.redirect.valid, init = false.B) &&
404          (redirectLatch._type === BTBtype.B || redirectLatch._type === BTBtype.J)
405        btb(b)(w).io.w.req.bits.setIdx := btbAddr.getBankIdx(redirectLatch.pc)
406        btb(b)(w).io.w.req.bits.data := btbWrite
407      }.otherwise {
408        btb(b)(w).io.w.req.valid := false.B
409        btb(b)(w).io.w.req.bits.setIdx := DontCare
410        btb(b)(w).io.w.req.bits.data := DontCare
411      }
412    }
413  }
414
415  // 2. update JBTAC
416  val jbtacWrite = WireInit(0.U.asTypeOf(jbtacEntry()))
417  jbtacWrite.valid := true.B
418  jbtacWrite.tag := jbtacAddr.getTag(io.redirect.bits.pc)
419  jbtacWrite.target := io.redirect.bits.target
420  (0 until JbtacBanks).map(b =>
421    jbtac(b).io.w.req.valid := io.redirect.valid &&
422      b.U === jbtacAddr.getBank(io.redirect.bits.pc) &&
423      io.redirect.bits._type === BTBtype.I)
424  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.setIdx := jbtacAddr.getBankIdx(io.redirect.bits.pc))
425  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.data := jbtacWrite)
426  */
427}
428