xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision bc1fc6712d369d27278d1c8bc5593a02653d4d4b)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import xiangshan._
6import utils._
7
8class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle {
9  def tagBits = VAddrBits - idxBits - 2
10
11  val tag = UInt(tagBits.W)
12  val idx = UInt(idxBits.W)
13  val offset = UInt(2.W)
14
15  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
16  def getTag(x: UInt) = fromUInt(x).tag
17  def getIdx(x: UInt) = fromUInt(x).idx
18  def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0)
19  def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks))
20}
21
22class Stage1To2IO extends XSBundle {
23  val pc = Output(UInt(VAddrBits.W))
24  val btb = new Bundle {
25    val hits = Output(UInt(FetchWidth.W))
26    val targets = Output(Vec(FetchWidth, UInt(VAddrBits.B)))
27  }
28  val jbtac = new Bundle {
29    val hitIdx = Output(UInt(FetchWidth.W))
30    val target = Output(UInt(VAddrBits.W))
31  }
32  val tage = new Bundle {
33    val hits = Output(UInt(FetchWidth.W))
34    val takens = Output(Vec(FetchWidth, Bool()))
35  }
36  val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W)))
37  val btbPred = ValidIO(new BranchPrediction)
38}
39
40class BPUStage1 extends XSModule {
41  val io = IO(new Bundle() {
42    val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) }
43    // from backend
44    val redirect = Flipped(ValidIO(new Redirect))
45    // from Stage3
46    val flush = Input(Bool())
47    val s3RollBackHist = Input(UInt(HistoryLength.W))
48    // to ifu, quick prediction result
49    val btbOut = ValidIO(new BranchPrediction)
50    // to Stage2
51    val out = Decoupled(new Stage1To2IO)
52  })
53
54  // flush Stage1 when io.flush || io.redirect.valid
55
56  // TODO: delete this!!!
57  io.in.pc.ready := true.B
58  io.btbOut.valid := false.B
59  io.btbOut.bits := DontCare
60  io.out.valid := false.B
61  io.out.bits := DontCare
62
63}
64
65class Stage2To3IO extends Stage1To2IO {
66}
67
68class BPUStage2 extends XSModule {
69  val io = IO(new Bundle() {
70    // flush from Stage3
71    val flush = Input(Bool())
72    val in = Flipped(Decoupled(new Stage1To2IO))
73    val out = Decoupled(new Stage2To3IO)
74  })
75
76  // flush Stage2 when Stage3 or banckend redirects
77  val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
78  io.out.valid := !flushS2 && RegNext(io.in.valid)
79  io.in.ready := !io.out.valid || io.out.fire()
80
81  // do nothing
82  io.out.bits := RegEnable(io.in.bits, io.in.valid)
83}
84
85class BPUStage3 extends XSModule {
86  val io = IO(new Bundle() {
87    val flush = Input(Bool())
88    val in = Flipped(Decoupled(new Stage2To3IO))
89    val predecode = Flipped(ValidIO(new Predecode))
90    val out = ValidIO(new BranchPrediction)
91    // from backend
92    val redirect = Flipped(ValidIO(new Redirect)) // only need isCall here
93    // to Stage1 and Stage2
94    val flushBPU = Output(Bool())
95    // to Stage1, restore ghr in stage1 when flushBPU is valid
96    val s1RollBackHist = Output(UInt(HistoryLength.W))
97  })
98
99  val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
100  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
101  val validLatch = RegInit(false.B)
102  when (io.in.fire()) { inLatch := io.in.bits }
103  when (io.in.fire()) {
104    validLatch := !io.in.flush
105  }.elsewhen (validLatch && io.predecode.valid && !flushS3) {
106    validLatch := false.B
107  }
108  io.in.ready := !validLatch || validLatch && io.predecode.valid && !flushS3
109
110  // RAS
111  def rasEntry() = new Bundle {
112    val retAddr = UInt(VAddrBits.W)
113    val ctr = UInt(8.W) // layer of nested call functions
114  }
115  val ras = Mem(RasSize, rasEntry())
116  val sp = Counter(RasSize)
117  val rasTop = ras.read(sp.value)
118  val rasTopAddr = rasTop.retAddr
119
120  // get the first taken branch/jal/call/jalr/ret in a fetch line
121  // for example, getLowerMask("b00101100".U, 8) = "b00111111", getLowestBit("b00101100".U, 8) = "b00000100".U
122  def getLowerMask(idx: UInt, len: Int) = (0 until len).map(i => idx >> i.U).reduce(_|_)
123  def getLowestBit(idx: UInt, len: Int) = Mux(idx(0), 1.U(len.W), Reverse(((0 until len).map(i => Reverse(idx(len - 1, 0)) >> i.U).reduce(_|_) + 1.U) >> 1.U))
124
125  val brIdx = inLatch.btb.hits & io.predecode.bits.fuTypes.map { t => ALUOpType.isBranch(t) }.asUInt & io.predecode.bits.mask
126  val brTakenIdx = getLowestBit(brIdx & inLatch.tage.takens.asUInt, FetchWidth)
127  val brNotTakenIdx = brIdx & ~inLatch.tage.takens.asUInt & getLowerMask(brTakenIdx, FetchWidth)
128  val jalIdx = getLowestBit(inLatch.btb.hits & io.predecode.bits.fuTypes.map { t => t === ALUOpType.jal }.asUInt & io.predecode.bits.mask, FetchWidth)
129  val callIdx = getLowestBit(inLatch.btb.hits & io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.call }.asUInt, FetchWidth)
130  val jalrIdx = getLowestBit(inLatch.jbtac.hitIdx & io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.jalr }.asUInt, FetchWidth)
131  val retIdx = getLowestBit(io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.ret }.asUInt, FetchWidth)
132
133  val jmpIdx = getLowestBit(brTakenIdx | jalIdx | callIdx | jalrIdx | retIdx, FetchWidth)
134  io.out.bits.target := Mux(jmpIdx === retIdx, rasTopAddr,
135    Mux(jmpIdx === jalrIdx, inLatch.jbtac.target,
136    PriorityMux(jmpIdx, inLatch.btb.targets)))
137  io.out.bits.instrValid := getLowerMask(jmpIdx, FetchWidth).asTypeOf(Vec(FetchWidth, Bool()))
138  io.out.bits._type := Mux(jmpIdx === retIdx, BTBtype.R,
139    Mux(jmpIdx === jalrIdx, BTBtype.I,
140    Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J)))
141  val firstHist = inLatch.btbPred.bits.hist
142  // there may be several notTaken branches before the first jump instruction,
143  // so we need to calculate how many zeroes should each instruction shift in its global history.
144  // each history is exclusive of instruction's own jump direction.
145  val histShift = WireInit(VecInit(FetchWidth, 0.U(log2Up(FetchWidth).W)))
146  histShift := (0 until FetchWidth).map(i => Mux(!brNotTakenIdx(i), 0.U, ~getLowerMask(UIntToOH(i.U), FetchWidth))).reduce(_+_)
147  (0 until FetchWidth).map(i => io.out.bits.hist(i) := firstHist << histShift)
148  // flush BPU and redirect when target differs from the target predicted in Stage1
149  val isTargetDiff = !inLatch.btbPred.valid || io.out.bits.target =/= inLatch.btbPred.bits.target
150  io.out.valid := jmpIdx.orR && validLatch && io.predecode.valid && !flushS3 && isTargetDiff
151  io.flushBPU := io.out.valid
152
153  // update RAS
154  val rasWrite = WireInit(0.U.asTypeOf(rasEntry()))
155  rasWrite.retAddr := inLatch.pc + OHToUInt(callIdx) << 2.U + 4.U
156  val allocNewEntry = rasWrite.retAddr =/= rasTopAddr
157  rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U)
158  when (io.out.valid) {
159    when (jmpIdx === callIdx) {
160      ras.write(Mux(allocNewEntry, sp.value + 1.U, sp.value), rasWrite)
161      when (allocNewEntry) { sp.value := sp.value + 1.U }
162    }.elsewhen (jmpIdx === retIdx) {
163      when (rasTop.ctr === 1.U) {
164        sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U)
165      }.otherwise {
166        ras.write(sp.value, Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry()))
167      }
168    }
169  }
170  // TODO: back-up stack for ras
171
172  // roll back global history in S1 if S3 redirects
173  io.s1RollBackHist := PriorityMux(jmpIdx, io.out.bits.hist)
174}
175
176class BPU extends XSModule {
177  val io = IO(new Bundle() {
178    // flush pipeline and update bpu based on redirect signals from brq
179    val redirect = Flipped(ValidIO(new Redirect))
180    val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) }
181    // val predMask = Output(Vec(FetchWidth, Bool()))
182    // val predTargets = Output(Vec(FetchWidth, UInt(VAddrBits.W)))
183    val btbOut = ValidIO(new BranchPrediction)
184    val tageOut = ValidIO(new BranchPrediction)
185
186    // predecode info from icache
187    // TODO: simplify this after implement predecode unit
188    val predecode = Flipped(ValidIO(new Predecode))
189  })
190
191  val s1 = Module(new BPUStage1)
192  val s2 = Module(new BPUStage2)
193  val s3 = Module(new BPUStage3)
194
195  s1.io.redirect <> io.redirect
196  // flush Stage1 when s1.io.flush || s1.io.redirect.valid
197  s1.io.flush := s3.io.flushBPU// || io.redirect.valid
198  s1.io.in.pc.valid := io.in.pc.valid
199  s1.io.in.pc.bits <> io.in.pc.bits
200  io.btbOut <> s1.io.btbOut
201  s1.io.s3RollBackHist := s3.io.s1RollBackHist
202
203  s1.io.out <> s2.io.in
204  s2.io.flush := s3.io.flushBPU || io.redirect.valid
205
206  s2.io.out <> s3.io.in
207  s3.io.flush := io.redirect.valid
208  s3.io.predecode <> io.predecode
209  io.tageOut <> s3.io.out
210  s3.io.redirect <> io.redirect
211
212  // TODO: delete this and put BTB and JBTAC into Stage1
213  /*
214  val flush = BoolStopWatch(io.redirect.valid, io.in.pc.valid, startHighPriority = true)
215
216  // BTB makes a quick prediction for branch and direct jump, which is
217  // 4-way set-associative, and each way is divided into 4 banks.
218  val btbAddr = new TableAddr(log2Up(BtbSets), BtbBanks)
219  def btbEntry() = new Bundle {
220    val valid = Bool()
221    // TODO: don't need full length of tag and target
222    val tag = UInt(btbAddr.tagBits.W)
223    val _type = UInt(2.W)
224    val target = UInt(VAddrBits.W)
225    val pred = UInt(2.W) // 2-bit saturated counter as a quick predictor
226  }
227
228  val btb = List.fill(BtbBanks)(List.fill(BtbWays)(
229    Module(new SRAMTemplate(btbEntry(), set = BtbSets / BtbBanks, shouldReset = true, holdRead = true, singlePort = true))))
230
231  // val fetchPkgAligned = btbAddr.getBank(io.in.pc.bits) === 0.U
232  val HeadBank = btbAddr.getBank(io.in.pc.bits)
233  val TailBank = btbAddr.getBank(io.in.pc.bits + FetchWidth.U << 2.U - 4.U)
234  for (b <- 0 until BtbBanks) {
235    for (w <- 0 until BtbWays) {
236      btb(b)(w).reset := reset.asBool
237      btb(b)(w).io.r.req.valid := io.in.pc.valid && Mux(TailBank > HeadBank, b.U >= HeadBank && b.U <= TailBank, b.U >= TailBank || b.U <= HeadBank)
238      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.in.pc.bits)
239    }
240  }
241  // latch pc for 1 cycle latency when reading SRAM
242  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.valid)
243  val btbRead = Wire(Vec(BtbBanks, Vec(BtbWays, btbEntry())))
244  val btbHits = Wire(Vec(FetchWidth, Bool()))
245  val btbTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
246  val btbTypes = Wire(Vec(FetchWidth, UInt(2.W)))
247  // val btbPreds = Wire(Vec(FetchWidth, UInt(2.W)))
248  val btbTakens = Wire(Vec(FetchWidth, Bool()))
249  for (b <- 0 until BtbBanks) {
250    for (w <- 0 until BtbWays) {
251      btbRead(b)(w) := btb(b)(w).io.r.resp.data(0)
252    }
253  }
254  for (i <- 0 until FetchWidth) {
255    btbHits(i) := false.B
256    for (b <- 0 until BtbBanks) {
257      for (w <- 0 until BtbWays) {
258        when (b.U === btbAddr.getBank(pcLatch) && btbRead(b)(w).valid && btbRead(b)(w).tag === btbAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2)) {
259          btbHits(i) := !flush && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
260          btbTargets(i) := btbRead(b)(w).target
261          btbTypes(i) := btbRead(b)(w)._type
262          // btbPreds(i) := btbRead(b)(w).pred
263          btbTakens(i) := (btbRead(b)(w).pred)(1).asBool
264        }.otherwise {
265          btbHits(i) := false.B
266          btbTargets(i) := DontCare
267          btbTypes(i) := DontCare
268          btbTakens(i) := DontCare
269        }
270      }
271    }
272  }
273
274  // JBTAC, divided into 8 banks, makes prediction for indirect jump except ret.
275  val jbtacAddr = new TableAddr(log2Up(JbtacSize), JbtacBanks)
276  def jbtacEntry() = new Bundle {
277    val valid = Bool()
278    // TODO: don't need full length of tag and target
279    val tag = UInt(jbtacAddr.tagBits.W)
280    val target = UInt(VAddrBits.W)
281  }
282
283  val jbtac = List.fill(JbtacBanks)(Module(new SRAMTemplate(jbtacEntry(), set = JbtacSize / JbtacBanks, shouldReset = true, holdRead = true, singlePort = true)))
284
285  (0 until JbtacBanks).map(i => jbtac(i).reset := reset.asBool)
286  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.valid := io.in.pc.valid)
287  (0 until JbtacBanks).map(i => jbtac(i).io.r.req.bits.setIdx := jbtacAddr.getBankIdx(Cat((io.in.pc.bits)(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2))
288
289  val jbtacRead = Wire(Vec(JbtacBanks, jbtacEntry()))
290  (0 until JbtacBanks).map(i => jbtacRead(i) := jbtac(i).io.r.resp.data(0))
291  val jbtacHits = Wire(Vec(FetchWidth, Bool()))
292  val jbtacTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W)))
293  val jbtacHeadBank = jbtacAddr.getBank(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)))
294  for (i <- 0 until FetchWidth) {
295    jbtacHits(i) := false.B
296    for (b <- 0 until JbtacBanks) {
297      when (jbtacHeadBank + i.U === b.U) {
298        jbtacHits(i) := jbtacRead(b).valid && jbtacRead(b).tag === jbtacAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2) &&
299          !flush && RegNext(jbtac(b).io.r.req.fire(), init = false.B)
300        jbtacTargets(i) := jbtacRead(b).target
301      }.otherwise {
302        jbtacHits(i) := false.B
303        jbtacTargets(i) := DontCare
304      }
305    }
306  }
307
308  // redirect based on BTB and JBTAC
309  (0 until FetchWidth).map(i => io.predMask(i) := btbHits(i) && Mux(btbTypes(i) === BTBtype.B, btbTakens(i), true.B) || jbtacHits(i))
310  (0 until FetchWidth).map(i => io.predTargets(i) := Mux(btbHits(i) && !(btbTypes(i) === BTBtype.B && !btbTakens(i)), btbTargets(i), jbtacTargets(i)))
311
312
313  // update bpu, including BTB, JBTAC...
314  // 1. update BTB
315  // 1.1 read the selected bank
316  for (b <- 0 until BtbBanks) {
317    for (w <- 0 until BtbWays) {
318      btb(b)(w).io.r.req.valid := io.redirect.valid && btbAddr.getBank(io.redirect.bits.pc) === b.U
319      btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.redirect.bits.pc)
320    }
321  }
322
323  // 1.2 match redirect pc tag with the 4 tags in a btb line, find a way to write
324  // val redirectLatch = RegEnable(io.redirect.bits, io.redirect.valid)
325  val redirectLatch = RegNext(io.redirect.bits, init = 0.U.asTypeOf(new Redirect))
326  val bankLatch = btbAddr.getBank(redirectLatch.pc)
327  val btbUpdateRead = Wire(Vec(BtbWays, btbEntry()))
328  val btbValids = Wire(Vec(BtbWays, Bool()))
329  val btbUpdateTagHits = Wire(Vec(BtbWays, Bool()))
330  for (b <- 0 until BtbBanks) {
331    for (w <- 0 until BtbWays) {
332      when (b.U === bankLatch) {
333        btbUpdateRead(w) := btb(b)(w).io.r.resp.data(0)
334        btbValids(w) := btbUpdateRead(w).valid && RegNext(btb(b)(w).io.r.req.fire(), init = false.B)
335      }.otherwise {
336        btbUpdateRead(w) := 0.U.asTypeOf(btbEntry())
337        btbValids(w) := false.B
338      }
339    }
340  }
341  (0 until BtbWays).map(w => btbUpdateTagHits(w) := btbValids(w) && btbUpdateRead(w).tag === btbAddr.getTag(redirectLatch.pc))
342  // val btbWriteWay = Wire(Vec(BtbWays, Bool()))
343  val btbWriteWay = Wire(UInt(BtbWays.W))
344  val btbInvalids = ~ btbValids.asUInt
345  when (btbUpdateTagHits.asUInt.orR) {
346    // tag hits
347    btbWriteWay := btbUpdateTagHits.asUInt
348  }.elsewhen (!btbValids.asUInt.andR) {
349    // no tag hits but there are free entries
350    btbWriteWay := Mux(btbInvalids >= 8.U, "b1000".U,
351      Mux(btbInvalids >= 4.U, "b0100".U,
352      Mux(btbInvalids >= 2.U, "b0010".U, "b0001".U)))
353  }.otherwise {
354    // no tag hits and no free entry, select a victim way
355    btbWriteWay := UIntToOH(LFSR64()(log2Up(BtbWays) - 1, 0))
356  }
357
358  // 1.3 calculate new 2-bit counter value
359  val btbWrite = WireInit(0.U.asTypeOf(btbEntry()))
360  btbWrite.valid := true.B
361  btbWrite.tag := btbAddr.getTag(redirectLatch.pc)
362  btbWrite._type := redirectLatch._type
363  btbWrite.target := redirectLatch.brTarget
364  val oldPred = WireInit("b01".U)
365  oldPred := PriorityMux(btbWriteWay.asTypeOf(Vec(BtbWays, Bool())), btbUpdateRead.map{ e => e.pred })
366  val newPred = Mux(redirectLatch.taken, Mux(oldPred === "b11".U, "b11".U, oldPred + 1.U),
367    Mux(oldPred === "b00".U, "b00".U, oldPred - 1.U))
368  btbWrite.pred := Mux(btbUpdateTagHits.asUInt.orR && redirectLatch._type === BTBtype.B, newPred, "b01".U)
369
370  // 1.4 write BTB
371  for (b <- 0 until BtbBanks) {
372    for (w <- 0 until BtbWays) {
373      when (b.U === bankLatch) {
374        btb(b)(w).io.w.req.valid := OHToUInt(btbWriteWay) === w.U &&
375          RegNext(io.redirect.valid, init = false.B) &&
376          (redirectLatch._type === BTBtype.B || redirectLatch._type === BTBtype.J)
377        btb(b)(w).io.w.req.bits.setIdx := btbAddr.getBankIdx(redirectLatch.pc)
378        btb(b)(w).io.w.req.bits.data := btbWrite
379      }.otherwise {
380        btb(b)(w).io.w.req.valid := false.B
381        btb(b)(w).io.w.req.bits.setIdx := DontCare
382        btb(b)(w).io.w.req.bits.data := DontCare
383      }
384    }
385  }
386
387  // 2. update JBTAC
388  val jbtacWrite = WireInit(0.U.asTypeOf(jbtacEntry()))
389  jbtacWrite.valid := true.B
390  jbtacWrite.tag := jbtacAddr.getTag(io.redirect.bits.pc)
391  jbtacWrite.target := io.redirect.bits.target
392  (0 until JbtacBanks).map(b =>
393    jbtac(b).io.w.req.valid := io.redirect.valid &&
394      b.U === jbtacAddr.getBank(io.redirect.bits.pc) &&
395      io.redirect.bits._type === BTBtype.I)
396  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.setIdx := jbtacAddr.getBankIdx(io.redirect.bits.pc))
397  (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.data := jbtacWrite)
398  */
399}
400