xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision f5c046cd9efc0040f5c664f8214b24d9a2cbcc04)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import xiangshan._
6import xiangshan.utils._
7import xiangshan.backend.ALUOpType
8import utils._
9
10class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle {
11  def tagBits = VAddrBits - idxBits - 2
12
13  val tag = UInt(tagBits.W)
14  val idx = UInt(idxBits.W)
15  val offset = UInt(2.W)
16
17  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
18  def getTag(x: UInt) = fromUInt(x).tag
19  def getIdx(x: UInt) = fromUInt(x).idx
20  def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0)
21  def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks))
22}
23
24class Stage1To2IO extends XSBundle {
25  val pc = Output(UInt(VAddrBits.W))
26  val btb = new Bundle {
27    val hits = Output(UInt(FetchWidth.W))
28    val targets = Output(Vec(FetchWidth, UInt(VAddrBits.W)))
29  }
30  val jbtac = new Bundle {
31    val hitIdx = Output(UInt(FetchWidth.W))
32    val target = Output(UInt(VAddrBits.W))
33  }
34  val tage = new Bundle {
35    val hits = Output(UInt(FetchWidth.W))
36    val takens = Output(Vec(FetchWidth, Bool()))
37  }
38  val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W)))
39  val btbPred = ValidIO(new BranchPrediction)
40}
41
42class BPUStage1 extends XSModule {
43  val io = IO(new Bundle() {
44    val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) }
45    // from backend
46    val redirectInfo = Flipped(new RedirectInfo)
47    // from Stage3
48    val flush = Input(Bool())
49    val s3RollBackHist = Input(UInt(HistoryLength.W))
50    // to ifu, quick prediction result
51    val btbOut = ValidIO(new BranchPrediction)
52    // to Stage2
53    val out = Decoupled(new Stage1To2IO)
54  })
55
56  // TODO: delete this!!!
57  io.in.pc.ready := true.B
58  io.btbOut.valid := false.B
59  io.btbOut.bits := DontCare
60  io.out.valid := false.B
61  io.out.bits := DontCare
62
63  // flush Stage1 when io.flush
64  val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true)
65
66  // global history register
67  val ghr = RegInit(0.U(HistoryLength.W))
68  // modify updateGhr and newGhr when updating ghr
69  val updateGhr = WireInit(false.B)
70  val newGhr = WireInit(0.U(HistoryLength.W))
71  when (updateGhr) { ghr := newGhr }
72  // use hist as global history!!!
73  val hist = Mux(updateGhr, newGhr, ghr)
74
75  // Tage predictor
76  val tage = Module(new Tage)
77  tage.io.req.valid := io.in.pc.fire()
78  tage.io.req.bits.pc := io.in.pc.bits
79  tage.io.req.bits.hist := hist
80  tage.io.redirectInfo <> io.redirectInfo
81  io.out.bits.tage <> tage.io.out
82  io.btbOut.bits.tageMeta := tage.io.meta
83
84}
85
86class Stage2To3IO extends Stage1To2IO {
87}
88
89class BPUStage2 extends XSModule {
90  val io = IO(new Bundle() {
91    // flush from Stage3
92    val flush = Input(Bool())
93    val in = Flipped(Decoupled(new Stage1To2IO))
94    val out = Decoupled(new Stage2To3IO)
95  })
96
97  // flush Stage2 when Stage3 or banckend redirects
98  val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
99  io.out.valid := !flushS2 && RegNext(io.in.fire())
100  io.in.ready := !io.out.valid || io.out.fire()
101
102  // do nothing
103  io.out.bits := RegEnable(io.in.bits, io.in.fire())
104}
105
106class BPUStage3 extends XSModule {
107  val io = IO(new Bundle() {
108    val flush = Input(Bool())
109    val in = Flipped(Decoupled(new Stage2To3IO))
110    val out = ValidIO(new BranchPrediction)
111    // from icache
112    val predecode = Flipped(ValidIO(new Predecode))
113    // from backend
114    val redirectInfo = Flipped(new RedirectInfo)
115    // to Stage1 and Stage2
116    val flushBPU = Output(Bool())
117    // to Stage1, restore ghr in stage1 when flushBPU is valid
118    val s1RollBackHist = Output(UInt(HistoryLength.W))
119  })
120
121  val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
122  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
123  val validLatch = RegInit(false.B)
124  when (io.in.fire()) { inLatch := io.in.bits }
125  when (io.in.fire()) {
126    validLatch := !io.flush
127  }.elsewhen (io.out.valid) {
128    validLatch := false.B
129  }
130  io.out.valid := validLatch && io.predecode.valid && !flushS3
131  io.in.ready := !validLatch || io.out.valid
132
133  // RAS
134  // TODO: split retAddr and ctr
135  def rasEntry() = new Bundle {
136    val retAddr = UInt(VAddrBits.W)
137    val ctr = UInt(8.W) // layer of nested call functions
138  }
139  val ras = RegInit(VecInit(Seq.fill(RasSize)(0.U.asTypeOf(rasEntry()))))
140  val sp = Counter(RasSize)
141  val rasTop = ras(sp.value)
142  val rasTopAddr = rasTop.retAddr
143
144  // get the first taken branch/jal/call/jalr/ret in a fetch line
145  // brTakenIdx/jalIdx/callIdx/jalrIdx/retIdx/jmpIdx is one-hot encoded.
146  // brNotTakenIdx indicates all the not-taken branches before the first jump instruction.
147  val brIdx = inLatch.btb.hits & Cat(io.predecode.bits.fuTypes.map { t => ALUOpType.isBranch(t) }).asUInt & io.predecode.bits.mask
148  val brTakenIdx = LowestBit(brIdx & inLatch.tage.takens.asUInt, FetchWidth)
149  val jalIdx = LowestBit(inLatch.btb.hits & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.jal }).asUInt & io.predecode.bits.mask, FetchWidth)
150  val callIdx = LowestBit(inLatch.btb.hits & io.predecode.bits.mask & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.call }).asUInt, FetchWidth)
151  val jalrIdx = LowestBit(inLatch.jbtac.hitIdx & io.predecode.bits.mask & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.jalr }).asUInt, FetchWidth)
152  val retIdx = LowestBit(io.predecode.bits.mask & Cat(io.predecode.bits.fuTypes.map { t => t === ALUOpType.ret }).asUInt, FetchWidth)
153
154  val jmpIdx = LowestBit(brTakenIdx | jalIdx | callIdx | jalrIdx | retIdx, FetchWidth)
155  val brNotTakenIdx = brIdx & ~inLatch.tage.takens.asUInt & LowerMask(jmpIdx, FetchWidth)
156
157  io.out.bits.redirect := jmpIdx.orR.asBool
158  io.out.bits.target := Mux(jmpIdx === retIdx, rasTopAddr,
159    Mux(jmpIdx === jalrIdx, inLatch.jbtac.target,
160    Mux(jmpIdx === 0.U, inLatch.pc + 4.U, // TODO: RVC
161    PriorityMux(jmpIdx, inLatch.btb.targets))))
162  io.out.bits.instrValid := LowerMask(jmpIdx, FetchWidth).asTypeOf(Vec(FetchWidth, Bool()))
163  io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta
164  //io.out.bits._type := Mux(jmpIdx === retIdx, BTBtype.R,
165  //  Mux(jmpIdx === jalrIdx, BTBtype.I,
166  //  Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J)))
167  val firstHist = inLatch.btbPred.bits.hist(0)
168  // there may be several notTaken branches before the first jump instruction,
169  // so we need to calculate how many zeroes should each instruction shift in its global history.
170  // each history is exclusive of instruction's own jump direction.
171  val histShift = Wire(Vec(FetchWidth, UInt(log2Up(FetchWidth).W)))
172  val shift = Wire(Vec(FetchWidth, Vec(FetchWidth, UInt(1.W))))
173  (0 until FetchWidth).map(i => shift(i) := Mux(!brNotTakenIdx(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth)).asTypeOf(Vec(FetchWidth, UInt(1.W))))
174  for (j <- 0 until FetchWidth) {
175    var tmp = 0.U
176    for (i <- 0 until FetchWidth) {
177      tmp = tmp + shift(i)(j)
178    }
179    histShift(j) := tmp
180  }
181  (0 until FetchWidth).map(i => io.out.bits.hist(i) := firstHist << histShift(i))
182  // save ras checkpoint info
183  io.out.bits.rasSp := sp.value
184  io.out.bits.rasTopCtr := rasTop.ctr
185
186  // flush BPU and redirect when target differs from the target predicted in Stage1
187  io.out.bits.redirect := !inLatch.btbPred.bits.redirect ^ jmpIdx.orR.asBool ||
188    inLatch.btbPred.bits.redirect && jmpIdx.orR.asBool && io.out.bits.target =/= inLatch.btbPred.bits.target
189  io.flushBPU := io.out.bits.redirect && io.out.valid
190
191  // speculative update RAS
192  val rasWrite = WireInit(0.U.asTypeOf(rasEntry()))
193  rasWrite.retAddr := inLatch.pc + OHToUInt(callIdx) << 2.U + 4.U
194  val allocNewEntry = rasWrite.retAddr =/= rasTopAddr
195  rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U)
196  when (io.out.valid) {
197    when (jmpIdx === callIdx) {
198      ras(Mux(allocNewEntry, sp.value + 1.U, sp.value)) := rasWrite
199      when (allocNewEntry) { sp.value := sp.value + 1.U }
200    }.elsewhen (jmpIdx === retIdx) {
201      when (rasTop.ctr === 1.U) {
202        sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U)
203      }.otherwise {
204        ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry())
205      }
206    }
207  }
208  // use checkpoint to recover RAS
209  val recoverSp = io.redirectInfo.redirect.rasSp
210  val recoverCtr = io.redirectInfo.redirect.rasTopCtr
211  when (io.redirectInfo.valid && io.redirectInfo.misPred) {
212    sp.value := recoverSp
213    ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry())
214  }
215
216  // roll back global history in S1 if S3 redirects
217  io.s1RollBackHist := PriorityMux(jmpIdx, io.out.bits.hist)
218}
219
220class BPU extends XSModule {
221  val io = IO(new Bundle() {
222    // from backend
223    // flush pipeline if misPred and update bpu based on redirect signals from brq
224    val redirectInfo = Flipped(new RedirectInfo)
225
226    val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) }
227
228    val btbOut = ValidIO(new BranchPrediction)
229    val tageOut = ValidIO(new BranchPrediction)
230
231    // predecode info from icache
232    // TODO: simplify this after implement predecode unit
233    val predecode = Flipped(ValidIO(new Predecode))
234  })
235
236  val s1 = Module(new BPUStage1)
237  val s2 = Module(new BPUStage2)
238  val s3 = Module(new BPUStage3)
239
240  s1.io.redirectInfo <> io.redirectInfo
241  s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
242  s1.io.in.pc.valid := io.in.pc.valid
243  s1.io.in.pc.bits <> io.in.pc.bits
244  io.btbOut <> s1.io.btbOut
245  s1.io.s3RollBackHist := s3.io.s1RollBackHist
246
247  s1.io.out <> s2.io.in
248  s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
249
250  s2.io.out <> s3.io.in
251  s3.io.flush := io.redirectInfo.flush()
252  s3.io.predecode <> io.predecode
253  io.tageOut <> s3.io.out
254  s3.io.redirectInfo <> io.redirectInfo
255
256  // TODO: delete this and put BTB and JBTAC into Stage1
257  /*
258  val flush = BoolStopWatch(io.redirect.valid, io.in.pc.valid, startHighPriority = true)
259
260  // BTB makes a quick prediction for branch and direct jump, which is
261  // 4-way set-associative, and each way is divided into 4 banks.
262  val btbAddr = new TableAddr(log2Up(BtbSets), BtbBanks)
263  def btbEntry() = new Bundle {
264    val valid = Bool()
265    // TODO: don't need full length of tag and target
266    val tag = UInt(btbAddr.tagBits.W)
267    val _type = UInt(2.W)
268    val target = UInt(VAddrBits.W)
269    val pred = UInt(2.W) // 2-bit saturated counter as a quick predictor
270  }
271
272  val btb = List.fill(BtbBanks)(List.fill(BtbWays)(
273    Module(new SRAMTemplate(btbEntry(), set = BtbSets / BtbBanks, shouldReset = true, holdRead = true, singlePort = true))))
274
275  // val fetchPkgAligned = btbAddr.getBank(io.in.pc.bits) === 0.U
276  val HeadBank = btbAddr.getBank(io.in.pc.bits)
277  val TailBank = btbAddr.getBank(io.in.pc.bits + FetchWidth.U << 2.U - 4.U)
278  for (b <- 0 until BtbBanks) {
279    for (w <- 0 until BtbWays) {
280      btb(b)(w).reset := reset.asBool
281      btb(b)(w).io.r.req.valid := io.in.pc.valid && Mux(TailBank > HeadBank, b.U >= HeadBank && b.U <= TailBank, b.U >= TailBank || b.U <= HeadBank)
282      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.in.pc.bits)
283    }
284  }
285  // latch pc for 1 cycle latency when reading SRAM
286  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.valid)
287  val btbRead = Wire(Vec(BtbBanks, Vec(BtbWays, btbEntry())))
288  val btbHits = Wire(Vec(FetchWidth, Bool()))
289  val btbTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
290  val btbTypes = Wire(Vec(FetchWidth, UInt(2.W)))
291  // val btbPreds = Wire(Vec(FetchWidth, UInt(2.W)))
292  val btbTakens = Wire(Vec(FetchWidth, Bool()))
293  for (b <- 0 until BtbBanks) {
294    for (w <- 0 until BtbWays) {
295      btbRead(b)(w) := btb(b)(w).io.r.resp.data(0)
296    }
297  }
298  for (i <- 0 until FetchWidth) {
299    btbHits(i) := false.B
300    for (b <- 0 until BtbBanks) {
301      for (w <- 0 until BtbWays) {
302        when (b.U === btbAddr.getBank(pcLatch) && btbRead(b)(w).valid && btbRead(b)(w).tag === btbAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2)) {
303          btbHits(i) := !flush && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
304          btbTargets(i) := btbRead(b)(w).target
305          btbTypes(i) := btbRead(b)(w)._type
306          // btbPreds(i) := btbRead(b)(w).pred
307          btbTakens(i) := (btbRead(b)(w).pred)(1).asBool
308        }.otherwise {
309          btbHits(i) := false.B
310          btbTargets(i) := DontCare
311          btbTypes(i) := DontCare
312          btbTakens(i) := DontCare
313        }
314      }
315    }
316  }
317
318  // JBTAC, divided into 8 banks, makes prediction for indirect jump except ret.
319  val jbtacAddr = new TableAddr(log2Up(JbtacSize), JbtacBanks)
320  def jbtacEntry() = new Bundle {
321    val valid = Bool()
322    // TODO: don't need full length of tag and target
323    val tag = UInt(jbtacAddr.tagBits.W)
324    val target = UInt(VAddrBits.W)
325  }
326
327  val jbtac = List.fill(JbtacBanks)(Module(new SRAMTemplate(jbtacEntry(), set = JbtacSize / JbtacBanks, shouldReset = true, holdRead = true, singlePort = true)))
328
329  (0 until JbtacBanks).map(i => jbtac(i).reset := reset.asBool)
330  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.valid := io.in.pc.valid)
331  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.bits.setIdx := jbtacAddr.getBankIdx(Cat((io.in.pc.bits)(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2))
332
333  val jbtacRead = Wire(Vec(JbtacBanks, jbtacEntry()))
334  (0 until JbtacBanks).map(i => jbtacRead(i) := jbtac(i).io.r.resp.data(0))
335  val jbtacHits = Wire(Vec(FetchWidth, Bool()))
336  val jbtacTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
337  val jbtacHeadBank = jbtacAddr.getBank(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)))
338  for (i <- 0 until FetchWidth) {
339    jbtacHits(i) := false.B
340    for (b <- 0 until JbtacBanks) {
341      when (jbtacHeadBank + i.U === b.U) {
342        jbtacHits(i) := jbtacRead(b).valid && jbtacRead(b).tag === jbtacAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2) &&
343          !flush && RegNext(jbtac(b).io.r.req.fire(), init = false.B)
344        jbtacTargets(i) := jbtacRead(b).target
345      }.otherwise {
346        jbtacHits(i) := false.B
347        jbtacTargets(i) := DontCare
348      }
349    }
350  }
351
352  // redirect based on BTB and JBTAC
353  (0 until FetchWidth).map(i => io.predMask(i) := btbHits(i) && Mux(btbTypes(i) === BTBtype.B, btbTakens(i), true.B) || jbtacHits(i))
354  (0 until FetchWidth).map(i => io.predTargets(i) := Mux(btbHits(i) && !(btbTypes(i) === BTBtype.B && !btbTakens(i)), btbTargets(i), jbtacTargets(i)))
355
356
357  // update bpu, including BTB, JBTAC...
358  // 1. update BTB
359  // 1.1 read the selected bank
360  for (b <- 0 until BtbBanks) {
361    for (w <- 0 until BtbWays) {
362      btb(b)(w).io.r.req.valid := io.redirect.valid && btbAddr.getBank(io.redirect.bits.pc) === b.U
363      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.redirect.bits.pc)
364    }
365  }
366
367  // 1.2 match redirect pc tag with the 4 tags in a btb line, find a way to write
368  // val redirectLatch = RegEnable(io.redirect.bits, io.redirect.valid)
369  val redirectLatch = RegNext(io.redirect.bits, init = 0.U.asTypeOf(new Redirect))
370  val bankLatch = btbAddr.getBank(redirectLatch.pc)
371  val btbUpdateRead = Wire(Vec(BtbWays, btbEntry()))
372  val btbValids = Wire(Vec(BtbWays, Bool()))
373  val btbUpdateTagHits = Wire(Vec(BtbWays, Bool()))
374  for (b <- 0 until BtbBanks) {
375    for (w <- 0 until BtbWays) {
376      when (b.U === bankLatch) {
377        btbUpdateRead(w) := btb(b)(w).io.r.resp.data(0)
378        btbValids(w) := btbUpdateRead(w).valid && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
379      }.otherwise {
380        btbUpdateRead(w) := 0.U.asTypeOf(btbEntry())
381        btbValids(w) := false.B
382      }
383    }
384  }
385  (0 until BtbWays).map(w => btbUpdateTagHits(w) := btbValids(w) && btbUpdateRead(w).tag === btbAddr.getTag(redirectLatch.pc))
386  // val btbWriteWay = Wire(Vec(BtbWays, Bool()))
387  val btbWriteWay = Wire(UInt(BtbWays.W))
388  val btbInvalids = ~ btbValids.asUInt
389  when (btbUpdateTagHits.asUInt.orR) {
390    // tag hits
391    btbWriteWay := btbUpdateTagHits.asUInt
392  }.elsewhen (!btbValids.asUInt.andR) {
393    // no tag hits but there are free entries
394    btbWriteWay := Mux(btbInvalids >= 8.U, "b1000".U,
395      Mux(btbInvalids >= 4.U, "b0100".U,
396      Mux(btbInvalids >= 2.U, "b0010".U, "b0001".U)))
397  }.otherwise {
398    // no tag hits and no free entry, select a victim way
399    btbWriteWay := UIntToOH(LFSR64()(log2Up(BtbWays) - 1, 0))
400  }
401
402  // 1.3 calculate new 2-bit counter value
403  val btbWrite = WireInit(0.U.asTypeOf(btbEntry()))
404  btbWrite.valid := true.B
405  btbWrite.tag := btbAddr.getTag(redirectLatch.pc)
406  btbWrite._type := redirectLatch._type
407  btbWrite.target := redirectLatch.brTarget
408  val oldPred = WireInit("b01".U)
409  oldPred := PriorityMux(btbWriteWay.asTypeOf(Vec(BtbWays, Bool())), btbUpdateRead.map{ e => e.pred })
410  val newPred = Mux(redirectLatch.taken, Mux(oldPred === "b11".U, "b11".U, oldPred + 1.U),
411    Mux(oldPred === "b00".U, "b00".U, oldPred - 1.U))
412  btbWrite.pred := Mux(btbUpdateTagHits.asUInt.orR && redirectLatch._type === BTBtype.B, newPred, "b01".U)
413
414  // 1.4 write BTB
415  for (b <- 0 until BtbBanks) {
416    for (w <- 0 until BtbWays) {
417      when (b.U === bankLatch) {
418        btb(b)(w).io.w.req.valid := OHToUInt(btbWriteWay) === w.U &&
419          RegNext(io.redirect.valid, init = false.B) &&
420          (redirectLatch._type === BTBtype.B || redirectLatch._type === BTBtype.J)
421        btb(b)(w).io.w.req.bits.setIdx := btbAddr.getBankIdx(redirectLatch.pc)
422        btb(b)(w).io.w.req.bits.data := btbWrite
423      }.otherwise {
424        btb(b)(w).io.w.req.valid := false.B
425        btb(b)(w).io.w.req.bits.setIdx := DontCare
426        btb(b)(w).io.w.req.bits.data := DontCare
427      }
428    }
429  }
430
431  // 2. update JBTAC
432  val jbtacWrite = WireInit(0.U.asTypeOf(jbtacEntry()))
433  jbtacWrite.valid := true.B
434  jbtacWrite.tag := jbtacAddr.getTag(io.redirect.bits.pc)
435  jbtacWrite.target := io.redirect.bits.target
436  (0 until JbtacBanks).map(b =>
437    jbtac(b).io.w.req.valid := io.redirect.valid &&
438      b.U === jbtacAddr.getBank(io.redirect.bits.pc) &&
439      io.redirect.bits._type === BTBtype.I)
440  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.setIdx := jbtacAddr.getBankIdx(io.redirect.bits.pc))
441  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.data := jbtacWrite)
442  */
443}
444