xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision 92c37e892ada2d2632ef5b8f18c0b0d40b5dd680)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import utils._
6import xiangshan._
7import xiangshan.backend.ALUOpType
8import xiangshan.backend.JumpOpType
9import chisel3.util.experimental.BoringUtils
10import xiangshan.backend.decode.XSTrap
11
12class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle {
13  def tagBits = VAddrBits - idxBits - 1
14
15  val tag = UInt(tagBits.W)
16  val idx = UInt(idxBits.W)
17  val offset = UInt(1.W)
18
19  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
20  def getTag(x: UInt) = fromUInt(x).tag
21  def getIdx(x: UInt) = fromUInt(x).idx
22  def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0)
23  def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks))
24}
25
26class Stage1To2IO extends XSBundle {
27  val pc = Output(UInt(VAddrBits.W))
28  val btb = new Bundle {
29    val hits = Output(UInt(FetchWidth.W))
30    val targets = Output(Vec(FetchWidth, UInt(VAddrBits.W)))
31  }
32  val jbtac = new Bundle {
33    val hitIdx = Output(UInt(FetchWidth.W))
34    val target = Output(UInt(VAddrBits.W))
35  }
36  val tage = new Bundle {
37    val hits = Output(UInt(FetchWidth.W))
38    val takens = Output(Vec(FetchWidth, Bool()))
39  }
40  val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W)))
41  val btbPred = ValidIO(new BranchPrediction)
42}
43
44class BPUStage1 extends XSModule {
45  val io = IO(new Bundle() {
46    val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) }
47    // from backend
48    val redirectInfo = Input(new RedirectInfo)
49    // from Stage3
50    val flush = Input(Bool())
51    val s3RollBackHist = Input(UInt(HistoryLength.W))
52    val s3Taken = Input(Bool())
53    // to ifu, quick prediction result
54    val s1OutPred = ValidIO(new BranchPrediction)
55    // to Stage2
56    val out = Decoupled(new Stage1To2IO)
57  })
58
59  io.in.pc.ready := true.B
60
61  // flush Stage1 when io.flush
62  val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true)
63
64  // global history register
65  val ghr = RegInit(0.U(HistoryLength.W))
66  // modify updateGhr and newGhr when updating ghr
67  val updateGhr = WireInit(false.B)
68  val newGhr = WireInit(0.U(HistoryLength.W))
69  when (updateGhr) { ghr := newGhr }
70  // use hist as global history!!!
71  val hist = Mux(updateGhr, newGhr, ghr)
72
73  // Tage predictor
74  // val tage = Module(new FakeTAGE)
75  val tage = if(EnableBPD) Module(new Tage) else Module(new FakeTAGE)
76  tage.io.req.valid := io.in.pc.fire()
77  tage.io.req.bits.pc := io.in.pc.bits
78  tage.io.req.bits.hist := hist
79  tage.io.redirectInfo <> io.redirectInfo
80  io.out.bits.tage <> tage.io.out
81  io.s1OutPred.bits.tageMeta := tage.io.meta
82
83  // latch pc for 1 cycle latency when reading SRAM
84  val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.fire())
85
86  val r = io.redirectInfo.redirect
87  val updateFetchpc = r.pc - (r.fetchIdx << 2.U)
88  // BTB
89  val btb = Module(new BTB)
90  btb.io.in.pc <> io.in.pc
91  btb.io.in.pcLatch := pcLatch
92  // TODO: pass real mask in
93  btb.io.in.mask := "b1111111111111111".asUInt
94  btb.io.redirectValid := io.redirectInfo.valid
95  btb.io.flush := io.flush
96
97  // btb.io.update.fetchPC := updateFetchpc
98  // btb.io.update.fetchIdx := r.fetchIdx
99  btb.io.update.pc := r.pc
100  btb.io.update.hit := r.btbHitWay
101  btb.io.update.misPred := io.redirectInfo.misPred
102  // btb.io.update.writeWay := r.btbVictimWay
103  btb.io.update.oldCtr := r.btbPredCtr
104  btb.io.update.taken := r.taken
105  btb.io.update.target := r.brTarget
106  btb.io.update.btbType := r.btbType
107  // TODO: add RVC logic
108  btb.io.update.isRVC := DontCare
109
110  val btbHit = btb.io.out.hit
111  val btbTaken = btb.io.out.taken
112  val btbTakenIdx = btb.io.out.takenIdx
113  val btbTakenTarget = btb.io.out.target
114  // val btbWriteWay = btb.io.out.writeWay
115  val btbNotTakens = btb.io.out.notTakens
116  val btbCtrs = VecInit(btb.io.out.dEntries.map(_.pred))
117  val btbValids = btb.io.out.hits
118  val btbTargets = VecInit(btb.io.out.dEntries.map(_.target))
119  val btbTypes = VecInit(btb.io.out.dEntries.map(_.btbType))
120
121
122  val jbtac = Module(new JBTAC)
123  jbtac.io.in.pc <> io.in.pc
124  jbtac.io.in.pcLatch := pcLatch
125  jbtac.io.in.hist := hist
126  jbtac.io.redirectValid := io.redirectInfo.valid
127  jbtac.io.flush := io.flush
128
129  jbtac.io.update.fetchPC := updateFetchpc
130  jbtac.io.update.fetchIdx := r.fetchIdx << 1
131  jbtac.io.update.misPred := io.redirectInfo.misPred
132  jbtac.io.update.btbType := r.btbType
133  jbtac.io.update.target := r.target
134  jbtac.io.update.hist := r.hist
135
136  val jbtacHit = jbtac.io.out.hit
137  val jbtacTarget = jbtac.io.out.target
138  val jbtacHitIdx = jbtac.io.out.hitIdx
139
140  // calculate global history of each instr
141  val firstHist = RegNext(hist)
142  val histShift = Wire(Vec(FetchWidth, UInt(log2Up(FetchWidth).W)))
143  val shift = Wire(Vec(FetchWidth, Vec(FetchWidth, UInt(1.W))))
144  (0 until FetchWidth).foreach(i => shift(i) := Mux(!btbNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth)).asTypeOf(Vec(FetchWidth, UInt(1.W))))
145  for (j <- 0 until FetchWidth) {
146    var tmp = 0.U
147    for (i <- 0 until FetchWidth) {
148      tmp = tmp + shift(i)(j)
149    }
150    histShift(j) := tmp
151  }
152  (0 until FetchWidth).foreach(i => io.s1OutPred.bits.hist(i) := firstHist << histShift(i))
153
154  // update ghr
155  updateGhr := io.s1OutPred.bits.redirect || io.flush
156  val brJumpIdx = Mux(!(btbHit && btbTaken), 0.U, UIntToOH(btbTakenIdx))
157  val indirectIdx = Mux(!jbtacHit, 0.U, UIntToOH(jbtacHitIdx))
158  //val newTaken = Mux(io.redirectInfo.flush(), !(r.btbType === BTBtype.B && !r.taken), )
159  newGhr := Mux(io.redirectInfo.flush(),    (r.hist << 1.U) | !(r.btbType === BTBtype.B && !r.taken),
160            Mux(io.flush,                   Mux(io.s3Taken, (io.s3RollBackHist << 1.U) | 1.U, io.s3RollBackHist),
161            Mux(io.s1OutPred.bits.redirect, ((PriorityMux(brJumpIdx | indirectIdx, io.s1OutPred.bits.hist) << 1.U) | 1.U),
162                                            io.s1OutPred.bits.hist(0) << PopCount(btbNotTakens))))
163
164  // redirect based on BTB and JBTAC
165  // io.out.valid := RegNext(io.in.pc.fire()) && !flushS1u
166  io.out.valid := RegNext(io.in.pc.fire()) && !io.flush
167
168  io.s1OutPred.valid := io.out.valid
169  io.s1OutPred.bits.redirect := btbHit && btbTaken || jbtacHit
170
171
172  def getInstrValid(i: Int): UInt = {
173    val mask = Wire(UInt(FetchWidth.W))
174    val vec = Wire(Vec(FetchWidth, UInt(1.W)))
175    for (j <- 0 until FetchWidth) {
176      if (j <= i)
177        vec(j) := 1.U
178      else
179        vec(j) := 0.U
180    }
181    mask := vec.asUInt
182    mask
183  }
184  io.s1OutPred.bits.instrValid := (Fill(FetchWidth, ~io.s1OutPred.bits.redirect).asUInt |
185    PriorityMux(brJumpIdx | indirectIdx, (0 until FetchWidth).map(getInstrValid(_)))).asTypeOf(Vec(FetchWidth, Bool()))
186  io.s1OutPred.bits.target := Mux(brJumpIdx === LowestBit(brJumpIdx | indirectIdx, FetchWidth), btbTakenTarget, jbtacTarget)
187  io.s1OutPred.bits.predCtr := btbCtrs
188  io.s1OutPred.bits.btbHitWay := btbHit
189  io.s1OutPred.bits.rasSp := DontCare
190  io.s1OutPred.bits.rasTopCtr := DontCare
191
192  io.out.bits.pc := pcLatch
193  io.out.bits.btb.hits := btbValids.asUInt
194  (0 until FetchWidth).foreach(i => io.out.bits.btb.targets(i) := btbTargets(i))
195  io.out.bits.jbtac.hitIdx := UIntToOH(jbtacHitIdx)
196  io.out.bits.jbtac.target := jbtacTarget
197  // TODO: we don't need this repeatedly!
198  io.out.bits.hist := io.s1OutPred.bits.hist
199  io.out.bits.btbPred := io.s1OutPred
200
201
202
203  // debug info
204  XSDebug(true.B, "in:(%d %d)   pc=%x ghr=%b\n", io.in.pc.valid, io.in.pc.ready, io.in.pc.bits, hist)
205  XSDebug(true.B, "outPred:(%d) pc=0x%x, redirect=%d instrValid=%b tgt=%x\n",
206    io.s1OutPred.valid, pcLatch, io.s1OutPred.bits.redirect, io.s1OutPred.bits.instrValid.asUInt, io.s1OutPred.bits.target)
207  XSDebug(io.flush && io.redirectInfo.flush(),
208    "flush from backend: pc=%x tgt=%x brTgt=%x btbType=%b taken=%d oldHist=%b fetchIdx=%d isExcpt=%d\n",
209    r.pc, r.target, r.brTarget, r.btbType, r.taken, r.hist, r.fetchIdx, r.isException)
210  XSDebug(io.flush && !io.redirectInfo.flush(),
211    "flush from Stage3:  s3Taken=%d s3RollBackHist=%b\n", io.s3Taken, io.s3RollBackHist)
212
213}
214
215class Stage2To3IO extends Stage1To2IO {
216}
217
218class BPUStage2 extends XSModule {
219  val io = IO(new Bundle() {
220    // flush from Stage3
221    val flush = Input(Bool())
222    val in = Flipped(Decoupled(new Stage1To2IO))
223    val out = Decoupled(new Stage2To3IO)
224  })
225
226  // flush Stage2 when Stage3 or banckend redirects
227  val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
228  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
229  when (io.in.fire()) { inLatch := io.in.bits }
230  val validLatch = RegInit(false.B)
231  when (io.flush) {
232    validLatch := false.B
233  }.elsewhen (io.in.fire()) {
234    validLatch := true.B
235  }.elsewhen (io.out.fire()) {
236    validLatch := false.B
237  }
238
239  io.out.valid := !io.flush && !flushS2 && validLatch
240  io.in.ready := !validLatch || io.out.fire()
241
242  // do nothing
243  io.out.bits := inLatch
244
245  // debug info
246  XSDebug(true.B, "in:(%d %d) pc=%x out:(%d %d) pc=%x\n",
247    io.in.valid, io.in.ready, io.in.bits.pc, io.out.valid, io.out.ready, io.out.bits.pc)
248  XSDebug(true.B, "validLatch=%d pc=%x\n", validLatch, inLatch.pc)
249  XSDebug(io.flush, "flush!!!\n")
250}
251
252class BPUStage3 extends XSModule {
253  val io = IO(new Bundle() {
254    val flush = Input(Bool())
255    val in = Flipped(Decoupled(new Stage2To3IO))
256    val out = ValidIO(new BranchPrediction)
257    // from icache
258    val predecode = Flipped(ValidIO(new Predecode))
259    // from backend
260    val redirectInfo = Input(new RedirectInfo)
261    // to Stage1 and Stage2
262    val flushBPU = Output(Bool())
263    // to Stage1, restore ghr in stage1 when flushBPU is valid
264    val s1RollBackHist = Output(UInt(HistoryLength.W))
265    val s3Taken = Output(Bool())
266  })
267
268  val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true)
269  val inLatch = RegInit(0.U.asTypeOf(io.in.bits))
270  val validLatch = RegInit(false.B)
271  when (io.in.fire()) { inLatch := io.in.bits }
272  when (io.flush) {
273    validLatch := false.B
274  }.elsewhen (io.in.fire()) {
275    validLatch := true.B
276  }.elsewhen (io.out.valid) {
277    validLatch := false.B
278  }
279  io.out.valid := validLatch && io.predecode.valid && !flushS3 && !io.flush
280  io.in.ready := !validLatch || io.out.valid
281
282  // RAS
283  // TODO: split retAddr and ctr
284  def rasEntry() = new Bundle {
285    val retAddr = UInt(VAddrBits.W)
286    val ctr = UInt(8.W) // layer of nested call functions
287  }
288  val ras = RegInit(VecInit(Seq.fill(RasSize)(0.U.asTypeOf(rasEntry()))))
289  val sp = Counter(RasSize)
290  val rasTop = ras(sp.value)
291  val rasTopAddr = rasTop.retAddr
292
293  // get the first taken branch/jal/call/jalr/ret in a fetch line
294  // brNotTakenIdx indicates all the not-taken branches before the first jump instruction
295
296
297  val brs = inLatch.btb.hits & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => ALUOpType.isBranch(t) }).asUInt) & io.predecode.bits.mask
298  val brTakens = brs & inLatch.tage.takens.asUInt
299  val jals = inLatch.btb.hits & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.jal }).asUInt) & io.predecode.bits.mask
300  val calls = inLatch.btb.hits & io.predecode.bits.mask & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.call }).asUInt)
301  val jalrs = inLatch.jbtac.hitIdx & io.predecode.bits.mask & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.jalr }).asUInt)
302  val rets = io.predecode.bits.mask & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.ret }).asUInt)
303
304  val brTakenIdx = PriorityMux(brTakens, (0 until FetchWidth).map(_.U))
305  val jalIdx = PriorityMux(jals, (0 until FetchWidth).map(_.U))
306  val callIdx = PriorityMux(calls, (0 until FetchWidth).map(_.U))
307  val jalrIdx = PriorityMux(jalrs, (0 until FetchWidth).map(_.U))
308  val retIdx = PriorityMux(rets, (0 until FetchWidth).map(_.U))
309
310  val jmps = (if (EnableRAS) {brTakens | jals | calls | jalrs | rets} else {brTakens | jals | calls | jalrs})
311  val jmpIdx = MuxCase(0.U, (0 until FetchWidth).map(i => (jmps(i), i.U)))
312  io.s3Taken := MuxCase(false.B, (0 until FetchWidth).map(i => (jmps(i), true.B)))
313
314  val brNotTakens = VecInit((0 until FetchWidth).map(i => brs(i) && ~inLatch.tage.takens(i) && i.U <= jmpIdx && io.predecode.bits.mask(i)))
315
316
317  io.out.bits.predCtr := inLatch.btbPred.bits.predCtr
318  io.out.bits.btbHitWay := inLatch.btbPred.bits.btbHitWay
319  io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta
320  //io.out.bits.btbType := Mux(jmpIdx === retIdx, BTBtype.R,
321  //  Mux(jmpIdx === jalrIdx, BTBtype.I,
322  //  Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J)))
323  val firstHist = inLatch.btbPred.bits.hist(0)
324  // there may be several notTaken branches before the first jump instruction,
325  // so we need to calculate how many zeroes should each instruction shift in its global history.
326  // each history is exclusive of instruction's own jump direction.
327  val histShift = Wire(Vec(FetchWidth, UInt(log2Up(FetchWidth).W)))
328  val shift = Wire(Vec(FetchWidth, Vec(FetchWidth, UInt(1.W))))
329  (0 until FetchWidth).foreach(i => shift(i) := Mux(!brNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth)).asTypeOf(Vec(FetchWidth, UInt(1.W))))
330  for (j <- 0 until FetchWidth) {
331    var tmp = 0.U
332    for (i <- 0 until FetchWidth) {
333      tmp = tmp + shift(i)(j)
334    }
335    histShift(j) := tmp
336  }
337  (0 until FetchWidth).foreach(i => io.out.bits.hist(i) := firstHist << histShift(i))
338  // save ras checkpoint info
339  io.out.bits.rasSp := sp.value
340  io.out.bits.rasTopCtr := rasTop.ctr
341
342  // flush BPU and redirect when target differs from the target predicted in Stage1
343  val tToNt = inLatch.btbPred.bits.redirect && ~io.s3Taken
344  val ntToT = ~inLatch.btbPred.bits.redirect && io.s3Taken
345  val dirDiffers = tToNt || ntToT
346  val tgtDiffers = inLatch.btbPred.bits.redirect && io.s3Taken && io.out.bits.target =/= inLatch.btbPred.bits.target
347  io.out.bits.redirect := (if (EnableBPD) {dirDiffers || tgtDiffers} else false.B)
348  io.out.bits.target := Mux(!io.s3Taken, inLatch.pc + (PopCount(io.predecode.bits.mask) << 2.U), // TODO: RVC
349    Mux(jmpIdx === retIdx, rasTopAddr,
350    Mux(jmpIdx === jalrIdx, inLatch.jbtac.target,
351    inLatch.btb.targets(jmpIdx))))
352  for (i <- 0 until FetchWidth) {
353    io.out.bits.instrValid(i) := ((io.s3Taken && i.U <= jmpIdx) || ~io.s3Taken) && io.predecode.bits.mask(i)
354  }
355  io.flushBPU := io.out.bits.redirect && io.out.valid
356
357  // speculative update RAS
358  val rasWrite = WireInit(0.U.asTypeOf(rasEntry()))
359  val retAddr = inLatch.pc + (callIdx << 2.U) + 4.U
360  rasWrite.retAddr := retAddr
361  val allocNewEntry = rasWrite.retAddr =/= rasTopAddr
362  rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U)
363  val rasWritePosition = Mux(allocNewEntry, sp.value + 1.U, sp.value)
364  when (io.out.valid) {
365    when (jmpIdx === callIdx) {
366      ras(rasWritePosition) := rasWrite
367      when (allocNewEntry) { sp.value := sp.value + 1.U }
368    }.elsewhen (jmpIdx === retIdx) {
369      when (rasTop.ctr === 1.U) {
370        sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U)
371      }.otherwise {
372        ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry())
373      }
374    }
375  }
376  // use checkpoint to recover RAS
377  val recoverSp = io.redirectInfo.redirect.rasSp
378  val recoverCtr = io.redirectInfo.redirect.rasTopCtr
379  when (io.redirectInfo.valid && io.redirectInfo.misPred) {
380    sp.value := recoverSp
381    ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry())
382  }
383
384  // roll back global history in S1 if S3 redirects
385  io.s1RollBackHist := Mux(io.s3Taken, io.out.bits.hist(jmpIdx), io.out.bits.hist(0) << PopCount(brs & ~inLatch.tage.takens.asUInt))
386
387  XSDebug(io.in.fire() && callIdx.orR, "[RAS]:pc=0x%x, rasWritePosition=%d, rasWriteAddr=0x%x\n",
388            io.in.bits.pc, rasWritePosition, retAddr)
389
390  // debug info
391  XSDebug(io.in.fire(), "in:(%d %d) pc=%x\n", io.in.valid, io.in.ready, io.in.bits.pc)
392  XSDebug(io.out.valid, "out:%d pc=%x redirect=%d predcdMask=%b instrValid=%b tgt=%x\n",
393    io.out.valid, inLatch.pc, io.out.bits.redirect, io.predecode.bits.mask, io.out.bits.instrValid.asUInt, io.out.bits.target)
394  XSDebug(true.B, "flushS3=%d\n", flushS3)
395  XSDebug(true.B, "validLatch=%d predecode.valid=%d\n", validLatch, io.predecode.valid)
396  XSDebug(true.B, "jmpIdx=%d, brs=%b brTakenIdx=%d brNTakens=%b jalIdx=%d jalrIdx=%d callIdx=%d retIdx=%d\n",
397    jmpIdx, brs, brTakenIdx, brNotTakens.asUInt, jalIdx, jalrIdx, callIdx, retIdx)
398  XSDebug(true.B, "tgtDiffers:%d, dirDiffers:%d, s3taken=%d\n", tgtDiffers, dirDiffers, io.s3Taken)
399
400  // BPU's TEMP Perf Cnt
401  // BoringUtils.addSource(io.out.valid, "MbpS3Cnt")
402  // BoringUtils.addSource(io.out.valid && io.out.bits.redirect, "MbpS3TageRed")
403  // BoringUtils.addSource(io.out.valid && (inLatch.btbPred.bits.redirect ^ io.s3Taken), "MbpS3TageRedDir")
404  // BoringUtils.addSource(io.out.valid && (inLatch.btbPred.bits.redirect
405  //             && io.s3Taken && (io.out.bits.target =/= inLatch.btbPred.bits.target)), "MbpS3TageRedTar")
406}
407
408class BPU extends XSModule {
409  val io = IO(new Bundle() {
410    // from backend
411    // flush pipeline if misPred and update bpu based on redirect signals from brq
412    val redirectInfo = Input(new RedirectInfo)
413
414    val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) }
415
416    val btbOut = ValidIO(new BranchPrediction)
417    val tageOut = ValidIO(new BranchPrediction)
418
419    // predecode info from icache
420    // TODO: simplify this after implement predecode unit
421    val predecode = Flipped(ValidIO(new Predecode))
422  })
423
424  val s1 = Module(new BPUStage1)
425  val s2 = Module(new BPUStage2)
426  val s3 = Module(new BPUStage3)
427
428  s1.io.redirectInfo <> io.redirectInfo
429  s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
430  s1.io.in.pc.valid := io.in.pc.valid
431  s1.io.in.pc.bits <> io.in.pc.bits
432  io.btbOut <> s1.io.s1OutPred
433  s1.io.s3RollBackHist := s3.io.s1RollBackHist
434  s1.io.s3Taken := s3.io.s3Taken
435
436  s1.io.out <> s2.io.in
437  s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush()
438
439  s2.io.out <> s3.io.in
440  s3.io.flush := io.redirectInfo.flush()
441  s3.io.predecode <> io.predecode
442  io.tageOut <> s3.io.out
443  s3.io.redirectInfo <> io.redirectInfo
444
445  // TODO: temp and ugly code, when perf counters is added( may after adding CSR), please mv the below counter
446  // val bpuPerfCntList = List(
447  //   ("MbpInstr","         "),
448  //   ("MbpRight","         "),
449  //   ("MbpWrong","         "),
450  //   ("MbpBRight","        "),
451  //   ("MbpBWrong","        "),
452  //   ("MbpJRight","        "),
453  //   ("MbpJWrong","        "),
454  //   ("MbpIRight","        "),
455  //   ("MbpIWrong","        "),
456  //   ("MbpRRight","        "),
457  //   ("MbpRWrong","        "),
458  //   ("MbpS3Cnt","         "),
459  //   ("MbpS3TageRed","     "),
460  //   ("MbpS3TageRedDir","  "),
461  //   ("MbpS3TageRedTar","  ")
462  // )
463
464  // val bpuPerfCnts = List.fill(bpuPerfCntList.length)(RegInit(0.U(XLEN.W)))
465  // val bpuPerfCntConds = List.fill(bpuPerfCntList.length)(WireInit(false.B))
466  // (bpuPerfCnts zip bpuPerfCntConds) map { case (cnt, cond) => { when (cond) { cnt := cnt + 1.U }}}
467
468  // for(i <- bpuPerfCntList.indices) {
469  //   BoringUtils.addSink(bpuPerfCntConds(i), bpuPerfCntList(i)._1)
470  // }
471
472  // val xsTrap = WireInit(false.B)
473  // BoringUtils.addSink(xsTrap, "XSTRAP_BPU")
474
475  // // if (!p.FPGAPlatform) {
476  //   when (xsTrap) {
477  //     printf("=================BPU's PerfCnt================\n")
478  //     for(i <- bpuPerfCntList.indices) {
479  //       printf(bpuPerfCntList(i)._1 + bpuPerfCntList(i)._2 + " <- " + "%d\n", bpuPerfCnts(i))
480  //     }
481  //   }
482  // // }
483}