1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.ALUOpType 8import xiangshan.backend.JumpOpType 9 10class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 11 def tagBits = VAddrBits - idxBits - 1 12 13 val tag = UInt(tagBits.W) 14 val idx = UInt(idxBits.W) 15 val offset = UInt(1.W) 16 17 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 18 def getTag(x: UInt) = fromUInt(x).tag 19 def getIdx(x: UInt) = fromUInt(x).idx 20 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 21 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 22} 23 24class PredictorResponse extends XSBundle { 25 class UbtbResp extends XSBundle { 26 // the valid bits indicates whether a target is hit 27 val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VAddrBits.W))) 28 val takens = Vec(PredictWidth, Bool()) 29 val notTakens = Vec(PredictWidth, Bool()) 30 val isRVC = Vec(PredictWidth, Bool()) 31 } 32 class BtbResp extends XSBundle { 33 // the valid bits indicates whether a target is hit 34 val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VAddrBits.W))) 35 val types = Vec(PredictWidth, UInt(2.W)) 36 val isRVC = Vec(PredictWidth, Bool()) 37 } 38 class BimResp extends XSBundle { 39 val ctrs = Vec(PredictWidth, ValidUndirectioned(UInt(2.W))) 40 } 41 class TageResp extends XSBundle { 42 // the valid bits indicates whether a prediction is hit 43 val takens = Vec(PredictWidth, ValidUndirectioned(Bool())) 44 } 45 46 val ubtb = new UbtbResp 47 val btb = new BtbResp 48 val bim = new BimResp 49 val tage = new TageResp 50} 51 52abstract class BasePredictor extends XSModule { 53 val metaLen = 0 54 55 // An implementation MUST extend the IO bundle with a response 56 // and the special input from other predictors, as well as 57 // the metas to store in BRQ 58 abstract class Resp extends PredictorResponse {} 59 abstract class FromOthers extends XSBundle {} 60 abstract class Meta extends XSBundle {} 61 62 class DefaultBasePredictorIO extends XSBundle { 63 val flush = Input(Bool()) 64 val pc = Flipped(ValidIO(UInt(VAddrBits.W))) 65 val hist = Input(UInt(HistoryLength.W)) 66 val inMask = Input(UInt(PredictWidth.W)) 67 val update = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 68 } 69 70 val io = new DefaultBasePredictorIO 71 72 // circular shifting 73 def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = { 74 val res = Wire(UInt(len.W)) 75 val higher = source << shamt 76 val lower = source >> (len.U - shamt) 77 res := higher | lower 78 res 79 } 80} 81 82class BPUStageIO extends XSBundle { 83 val pc = UInt(VAddrBits.W) 84 val mask = UInt(PredictWidth.W) 85 val resp = new PredictorResponse 86 val target = UInt(VAddrBits.W) 87 val brInfo = Vec(PredictWidth, new BranchInfo) 88} 89 90 91abstract class BPUStage extends XSModule { 92 class DefaultIO extends XSBundle { 93 val flush = Input(Bool()) 94 val in = Flipped(Decoupled(new BPUStageIO)) 95 val pred = Decoupled(new BranchPrediction) 96 val out = Decoupled(new BPUStageIO) 97 } 98 val io = new DefaultIO 99 100 def npc(pc: UInt, instCount: UInt) = pc + (instCount << 1.U) 101 102 io.in.ready := !predValid || io.out.fire() && io.pred.fire() 103 val inFire = io.in.fire() 104 val inLatch = RegEnable(io.in.bits, inFire) 105 106 val predValid = RegInit(false.B) 107 val outFire = io.out.fire() 108 109 // Each stage has its own logic to decide 110 // takens, notTakens and target 111 112 val takens = Vec(PredictWidth, Bool()) 113 val notTakens = Vec(PredictWidth, Bool()) 114 val hasNTBr = (0 until PredictWidth).map(i => i.U <= jmpIdx && notTakens(i)).reduce(_||_) 115 val taken = takens.reduce(_||_) 116 val jmpIdx = PriorityEncoder(takens) 117 // get the last valid inst 118 val lastValidPos = PriorityMux((PredictWidth-1 to 0).map(i => (inLatch.mask(i), i.U))) 119 val target = UInt(VAddrBits.W) 120 121 io.pred.bits <> DontCare 122 io.pred.bits.taken := taken 123 io.pred.bits.jmpIdx := jmpIdx 124 io.pred.bits.hasNotTakenBrs := hasNTBr 125 io.pred.bits.target := target 126 127 io.out.bits <> DontCare 128 io.out.bits.pc := inLatch.pc 129 io.out.bits.mask := inLatch.mask 130 io.out.bits.target := target 131 io.out.bits.resp <> inLatch.resp 132 io.out.bits.brInfo := inLatch.brInfo 133 134 // Default logic 135 // pred.ready not taken into consideration 136 // could be broken 137 when (io.flush) { 138 predValid := false.B 139 }.elsewhen (inFire) { 140 predValid := true.B 141 }.elsewhen (outFire) { 142 predValid := false.B 143 }.otherwise { 144 predValid := predValid 145 } 146 147 io.out.valid := predValid && !io.flush 148 io.pred.valid := predValid && !io.flush 149} 150 151class BPUStage1 extends BPUStage { 152 153 // 'overrides' default logic 154 // when flush, the prediction should also starts 155 override val predValid = BoolStopWatch(io.flush || inFire, outFire, true) 156 io.out.valid := predValid 157 158 // ubtb is accessed with inLatch pc in s1, 159 // so we use io.in instead of inLatch 160 val ubtbResp = io.in.bits.resp.ubtb 161 // the read operation is already masked, so we do not need to mask here 162 takens := VecInit((0 until PredictWidth).map(i => ubtbResp.targets(i).valid && ubtbResp.takens(i))) 163 notTakens := VecInit((0 until PredictWidth).map(i => ubtbResp.targets(i).valid && ubtbResp.notTakens(i))) 164 target := Mux(taken, ubtbResp.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 165 166 io.pred.bits.redirect := taken 167 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !ubtbResp.isRVC(lastValidPos) 168 169 // resp and brInfo are from the components, 170 // so it does not need to be latched 171 io.out.bits.resp <> io.in.bits.resp 172 io.out.bits.brInfo := io.in.bits.brInfo 173} 174 175class BPUStage2 extends BPUStage { 176 177 // Use latched response from s1 178 val btbResp = inLatch.resp.btb 179 val bimResp = inLatch.resp.bim 180 takens := VecInit((0 until PredictWidth).map(i => btbResp.targets(i).valid && bimResp.ctrs(i).bits(1))) 181 notTakens := VecInit((0 until PredictWidth).map(i => btbResp.targets(i).valid && btbResp.types(i) === BrType.branch && !bimResp.ctrs(i).bits(1))) 182 target := Mux(taken, btbResp.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 183 184 io.pred.bits.redirect := target =/= inLatch.target 185 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !btbResp.isRVC(lastValidPos) 186} 187 188class BPUStage3 extends BPUStage { 189 class S3IO extends DefaultIO { 190 val predecode = Flipped(ValidIO(new Predecode)) 191 } 192 override val io = new S3IO 193 io.out.valid := predValid && io.predecode.valid && !io.flush 194 195 // TAGE has its own pipelines and the 196 // response comes directly from s3, 197 // so we do not use those from inLatch 198 val tageResp = io.in.bits.resp.tage 199 val tageValidTakens = VecInit(tageResp.takens.map(t => t.valid && t.bits)) 200 201 val pdMask = io.predecode.bits.mask 202 val pds = io.predecode.bits.pd 203 204 val btbHits = VecInit(inLatch.resp.btb.targets.map(_.valid)).asUInt 205 val bimTakens = VecInit(inLatch.resp.bim.ctrs.map(_.bits(1))) 206 207 val brs = pdMask & Reverse(Cat(pds.map(_.isBr))) 208 val jals = pdMask & Reverse(Cat(pds.map(_.isJal))) 209 val jalrs = pdMask & Reverse(Cat(pds.map(_.isJalr))) 210 val calls = pdMask & Reverse(Cat(pds.map(_.isCall))) 211 val rets = pdMask & Reverse(Cat(pds.map(_.isRet))) 212 213 val callIdx = PriorityEncoder(calls) 214 val retIdx = PriorityEncoder(rets) 215 216 val brTakens = 217 if (EnableBPD) { 218 brs & Reverse(Cat((0 until PredictWidth).map(i => btbHits(i) && tageValidTakens(i)))) 219 } else { 220 brs & Reverse(Cat((0 until PredictWidth).map(i => btbHits(i) && bimTakens(i)))) 221 } 222 223 takens := VecInit((0 until PredictWidth).map(i => brTakens(i) || jals(i) || jalrs(i))) 224 // Whether should we count in branches that are not recorded in btb? 225 // PS: Currently counted in. Whenever tage does not provide a valid 226 // taken prediction, the branch is counted as a not taken branch 227 notTakens := VecInit((0 until PredictWidth).map(i => brs(i) && !tageValidTakens(i))) 228 target := Mux(taken, inLatch.resp.btb.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 229 230 io.pred.bits.redirect := target =/= inLatch.target 231 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !pds(lastValidPos).isRVC 232 233 // Wrap tage resp and tage meta in 234 // This is ugly 235 io.out.bits.resp.tage <> io.in.bits.resp.tage 236 for (i <- 0 until PredictWidth) { 237 io.out.bits.brInfo(i).tageMeta := io.in.bits.brInfo(i).tageMeta 238 } 239} 240 241trait BranchPredictorComponents extends HasXSParameter { 242 val ubtb = Module(new MicroBTB) 243 val btb = Module(new BTB) 244 val bim = Module(new BIM) 245 val tage = Module(new Tage) 246 val preds = Seq(ubtb, btb, bim, tage) 247 preds.map(_.io := DontCare) 248} 249 250class BPUReq extends XSBundle { 251 val pc = UInt(VAddrBits.W) 252 val hist = UInt(HistoryLength.W) 253 val inMask = UInt(PredictWidth.W) 254} 255 256class BranchUpdateInfoWithHist extends BranchUpdateInfo { 257 val hist = UInt(HistoryLength.W) 258} 259 260abstract class BaseBPU extends XSModule with BranchPredictorComponents{ 261 val io = IO(new Bundle() { 262 // from backend 263 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 264 // from ifu, frontend redirect 265 val flush = Input(UInt(3.W)) 266 // from if1 267 val in = Flipped(ValidIO(new BPUReq)) 268 // to if2/if3/if4 269 val out = Vec(3, Decoupled(new BranchPrediction)) 270 // from if4 271 val predecode = Flipped(ValidIO(new Predecode)) 272 // to if4, some bpu info used for updating 273 val branchInfo = Decoupled(Vec(PredictWidth, new BranchInfo)) 274 }) 275 276 val s1 = Module(new BPUStage1) 277 val s2 = Module(new BPUStage2) 278 val s3 = Module(new BPUStage3) 279 280 // TODO: whether to update ubtb when btb successfully 281 // corrects the wrong prediction from ubtb 282 preds.map(_.io.update <> io.inOrderBrInfo) 283 284 s1.io.flush := io.flush(0) 285 s2.io.flush := io.flush(1) 286 s3.io.flush := io.flush(2) 287 288 s1.io.in <> DontCare 289 s2.io.in <> s1.io.out 290 s3.io.in <> s2.io.out 291 292 io.out(0) <> s1.io.pred 293 io.out(1) <> s2.io.pred 294 io.out(2) <> s3.io.pred 295 296 s3.io.predecode <> io.predecode 297 298 io.branchInfo.valid := s3.io.out.valid 299 io.branchInfo.bits := s3.io.out.bits.brInfo 300 s3.io.out.ready := io.branchInfo.ready 301 302} 303 304 305class FakeBPU extends BaseBPU { 306 io.out.foreach(i => { 307 // Provide not takens 308 i.valid := true.B 309 i.bits := false.B 310 }) 311 io.branchInfo <> DontCare 312} 313 314class BPU extends BaseBPU { 315 316 317 //**********************Stage 1****************************// 318 val s1_fire = s1.io.in.fire() 319 val s1_resp_in = new PredictorResponse 320 val s1_brInfo_in = Wire(Vec(PredictWidth, new BranchInfo)) 321 322 s1_resp_in := DontCare 323 s1_brInfo_in := DontCare 324 325 val s1_inLatch = RegEnable(io.in, s1_fire) 326 ubtb.io.flush := io.flush(0) // TODO: fix this 327 ubtb.io.pc.valid := s1_inLatch.valid 328 ubtb.io.pc.bits := s1_inLatch.bits.pc 329 ubtb.io.inMask := s1_inLatch.bits.inMask 330 331 // Wrap ubtb response into resp_in and brInfo_in 332 s1_resp_in.ubtb <> ubtb.io.out 333 for (i <- 0 until PredictWidth) { 334 s1_brInfo_in(i).ubtbWriteWay := ubtb.io.uBTBBranchInfo.writeWay(i) 335 s1_brInfo_in(i).ubtbHits := ubtb.io.uBTBBranchInfo.hits(i) 336 } 337 338 btb.io.flush := io.flush(0) // TODO: fix this 339 btb.io.pc.valid := io.in.valid 340 btb.io.pc.bits := io.in.bits.pc 341 btb.io.inMask := io.in.bits.inMask 342 343 // Wrap btb response into resp_in and brInfo_in 344 s1_resp_in.btb <> btb.io.resp 345 for (i <- 0 until PredictWidth) { 346 s1_brInfo_in(i).btbWriteWay := btb.io.meta.writeWay(i) 347 } 348 349 bim.io.flush := io.flush(0) // TODO: fix this 350 bim.io.pc.valid := io.in.valid 351 bim.io.pc.bits := io.in.bits.pc 352 bim.io.inMask := io.in.bits.inMask 353 354 // Wrap bim response into resp_in and brInfo_in 355 s1_resp_in.bim <> bim.io.resp 356 for (i <- 0 until PredictWidth) { 357 s1_brInfo_in(i).bimCtr := bim.io.meta.ctrs(i) 358 } 359 360 361 s1.io.in.valid := io.in.valid 362 s1.io.in.bits.pc := io.in.bits.pc 363 s1.io.in.bits.mask := io.in.bits.inMask 364 s1.io.in.bits.target := DontCare 365 s1.io.in.bits.resp := s1_resp_in 366 s1.io.in.bits.brInfo <> s1_brInfo_in 367 368 369 tage.io.flush := io.flush(1) // TODO: fix this 370 tage.io.pc.valid := s1.io.out.fire() 371 tage.io.pc.bits := s1.io.out.bits.pc // PC from s1 372 tage.io.hist := io.in.bits.hist // The inst is from s1 373 tage.io.inMask := s1.io.out.bits.mask 374 tage.io.s3Fire := s3.io.in.fire() // Tell tage to march 1 stage 375 tage.io.bim <> s1.io.out.bits.resp.bim // Use bim results from s1 376 377 378 // Wrap tage response and meta into s3.io.in.bits 379 // This is ugly 380 s3.io.in.bits.resp.tage <> tage.io.resp 381 for (i <- 0 until PredictWidth) { 382 s3.io.in.bits.brInfo(i).tageMeta := tage.io.meta(i) 383 } 384 385} 386