1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.ALUOpType 8import xiangshan.backend.JumpOpType 9 10class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 11 def tagBits = VAddrBits - idxBits - 1 12 13 val tag = UInt(tagBits.W) 14 val idx = UInt(idxBits.W) 15 val offset = UInt(1.W) 16 17 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 18 def getTag(x: UInt) = fromUInt(x).tag 19 def getIdx(x: UInt) = fromUInt(x).idx 20 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 21 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 22} 23 24class PredictorResponse extends XSBundle { 25 class UbtbResp extends XSBundle { 26 // the valid bits indicates whether a target is hit 27 val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VAddrBits.W))) 28 val takens = Vec(PredictWidth, Bool()) 29 val notTakens = Vec(PredictWidth, Bool()) 30 val isRVC = Vec(PredictWidth, Bool()) 31 } 32 class BtbResp extends XSBundle { 33 // the valid bits indicates whether a target is hit 34 val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VAddrBits.W))) 35 val types = Vec(PredictWidth, UInt(2.W)) 36 val isRVC = Vec(PredictWidth, Bool()) 37 } 38 class BimResp extends XSBundle { 39 val ctrs = Vec(PredictWidth, ValidUndirectioned(UInt(2.W))) 40 } 41 class TageResp extends XSBundle { 42 // the valid bits indicates whether a prediction is hit 43 val takens = Vec(PredictWidth, ValidUndirectioned(Bool())) 44 } 45 46 val ubtb = new UbtbResp 47 val btb = new BtbResp 48 val bim = new BimResp 49 val tage = new TageResp 50} 51 52abstract class BasePredictor extends XSModule { 53 val metaLen = 0 54 55 // An implementation MUST extend the IO bundle with a response 56 // and the special input from other predictors, as well as 57 // the metas to store in BRQ 58 abstract class Resp extends PredictorResponse {} 59 abstract class FromOthers extends XSBundle {} 60 abstract class Meta extends XSBundle {} 61 62 class DefaultBasePredictorIO extends XSBundle { 63 val flush = Input(Bool()) 64 val pc = Flipped(ValidIO(UInt(VAddrBits.W))) 65 val hist = Input(UInt(HistoryLength.W)) 66 val inMask = Input(UInt(PredictWidth.W)) 67 val update = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 68 } 69 70 val io = IO(new DefaultBasePredictorIO) 71 72 // circular shifting 73 def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = { 74 val res = Wire(UInt(len.W)) 75 val higher = source << shamt 76 val lower = source >> (len.U - shamt) 77 res := higher | lower 78 res 79 } 80 81 def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = { 82 val res = Wire(UInt(len.W)) 83 val higher = source << (len.U - shamt) 84 val lower = source >> shamt 85 res := higher | lower 86 res 87 } 88} 89 90class BetweenBPUStage extends XSBundle { 91 val pc = UInt(VAddrBits.W) 92 val mask = UInt(PredictWidth.W) 93 val resp = new PredictorResponse 94 val target = UInt(VAddrBits.W) 95 val brInfo = Vec(PredictWidth, new BranchInfo) 96} 97 98class BPUStageIO extends XSBundle { 99 val flush = Input(Bool()) 100 val in = Flipped(Decoupled(new BetweenBPUStage)) 101 val predecode = Flipped(ValidIO(new Predecode)) 102 val pred = Decoupled(new BranchPrediction) 103 val out = Decoupled(new BetweenBPUStage) 104} 105 106abstract class BPUStage extends XSModule { 107 val io = IO(new Bundle() { 108 val flush = Input(Bool()) 109 val in = Flipped(Decoupled(new BetweenBPUStage)) 110 val predecode = Flipped(ValidIO(new Predecode)) 111 val pred = Decoupled(new BranchPrediction) 112 val out = Decoupled(new BetweenBPUStage) 113 }) 114 115 def npc(pc: UInt, instCount: UInt) = pc + (instCount << 1.U) 116 117 io.in.ready := !predValid || io.out.fire() && io.pred.fire() 118 val inFire = io.in.fire() 119 val inLatch = RegEnable(io.in.bits, inFire) 120 121 val predValid = RegInit(false.B) 122 val outFire = io.out.fire() 123 124 // Each stage has its own logic to decide 125 // takens, notTakens and target 126 127 val takens = Wire(Vec(PredictWidth, Bool())) 128 val notTakens = Wire(Vec(PredictWidth, Bool())) 129 val hasNTBr = (0 until PredictWidth).map(i => i.U <= jmpIdx && notTakens(i)).reduce(_||_) 130 val taken = takens.reduce(_||_) 131 val jmpIdx = PriorityEncoder(takens) 132 // get the last valid inst 133 val lastValidPos = PriorityMux((PredictWidth-1 to 0).map(i => (inLatch.mask(i), i.U))) 134 val target = Wire(UInt(VAddrBits.W)) 135 136 io.pred.bits <> DontCare 137 io.pred.bits.taken := taken 138 io.pred.bits.jmpIdx := jmpIdx 139 io.pred.bits.hasNotTakenBrs := hasNTBr 140 io.pred.bits.target := target 141 142 io.out.bits <> DontCare 143 io.out.bits.pc := inLatch.pc 144 io.out.bits.mask := inLatch.mask 145 io.out.bits.target := target 146 io.out.bits.resp <> inLatch.resp 147 io.out.bits.brInfo := inLatch.brInfo 148 149 // Default logic 150 // pred.ready not taken into consideration 151 // could be broken 152 when (io.flush) { 153 predValid := false.B 154 }.elsewhen (inFire) { 155 predValid := true.B 156 }.elsewhen (outFire) { 157 predValid := false.B 158 }.otherwise { 159 predValid := predValid 160 } 161 162 io.out.valid := predValid && !io.flush 163 io.pred.valid := predValid && !io.flush 164} 165 166class BPUStage1 extends BPUStage { 167 168 // 'overrides' default logic 169 // when flush, the prediction should also starts 170 override val predValid = BoolStopWatch(io.flush || inFire, outFire, true) 171 io.out.valid := predValid 172 173 // ubtb is accessed with inLatch pc in s1, 174 // so we use io.in instead of inLatch 175 val ubtbResp = io.in.bits.resp.ubtb 176 // the read operation is already masked, so we do not need to mask here 177 takens := VecInit((0 until PredictWidth).map(i => ubtbResp.targets(i).valid && ubtbResp.takens(i))) 178 notTakens := VecInit((0 until PredictWidth).map(i => ubtbResp.targets(i).valid && ubtbResp.notTakens(i))) 179 target := Mux(taken, ubtbResp.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 180 181 io.pred.bits.redirect := taken 182 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !ubtbResp.isRVC(lastValidPos) 183 184 // resp and brInfo are from the components, 185 // so it does not need to be latched 186 io.out.bits.resp := io.in.bits.resp 187 io.out.bits.brInfo := io.in.bits.brInfo 188} 189 190class BPUStage2 extends BPUStage { 191 // Use latched response from s1 192 val btbResp = inLatch.resp.btb 193 val bimResp = inLatch.resp.bim 194 takens := VecInit((0 until PredictWidth).map(i => btbResp.targets(i).valid && bimResp.ctrs(i).bits(1))) 195 notTakens := VecInit((0 until PredictWidth).map(i => btbResp.targets(i).valid && btbResp.types(i) === BrType.branch && !bimResp.ctrs(i).bits(1))) 196 target := Mux(taken, btbResp.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 197 198 io.pred.bits.redirect := target =/= inLatch.target 199 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !btbResp.isRVC(lastValidPos) 200} 201 202class BPUStage3 extends BPUStage { 203 204 io.out.valid := predValid && io.predecode.valid && !io.flush 205 206 // TAGE has its own pipelines and the 207 // response comes directly from s3, 208 // so we do not use those from inLatch 209 val tageResp = io.in.bits.resp.tage 210 val tageValidTakens = VecInit(tageResp.takens.map(t => t.valid && t.bits)) 211 212 val pdMask = io.predecode.bits.mask 213 val pds = io.predecode.bits.pd 214 215 val btbHits = VecInit(inLatch.resp.btb.targets.map(_.valid)).asUInt 216 val bimTakens = VecInit(inLatch.resp.bim.ctrs.map(_.bits(1))) 217 218 val brs = pdMask & Reverse(Cat(pds.map(_.isBr))) 219 val jals = pdMask & Reverse(Cat(pds.map(_.isJal))) 220 val jalrs = pdMask & Reverse(Cat(pds.map(_.isJalr))) 221 val calls = pdMask & Reverse(Cat(pds.map(_.isCall))) 222 val rets = pdMask & Reverse(Cat(pds.map(_.isRet))) 223 224 val callIdx = PriorityEncoder(calls) 225 val retIdx = PriorityEncoder(rets) 226 227 val brTakens = 228 if (EnableBPD) { 229 brs & Reverse(Cat((0 until PredictWidth).map(i => btbHits(i) && tageValidTakens(i)))) 230 } else { 231 brs & Reverse(Cat((0 until PredictWidth).map(i => btbHits(i) && bimTakens(i)))) 232 } 233 234 takens := VecInit((0 until PredictWidth).map(i => brTakens(i) || jals(i) || jalrs(i))) 235 // Whether should we count in branches that are not recorded in btb? 236 // PS: Currently counted in. Whenever tage does not provide a valid 237 // taken prediction, the branch is counted as a not taken branch 238 notTakens := VecInit((0 until PredictWidth).map(i => brs(i) && !tageValidTakens(i))) 239 target := Mux(taken, inLatch.resp.btb.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 240 241 io.pred.bits.redirect := target =/= inLatch.target 242 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !pds(lastValidPos).isRVC 243 244 // Wrap tage resp and tage meta in 245 // This is ugly 246 io.out.bits.resp.tage := io.in.bits.resp.tage 247 for (i <- 0 until PredictWidth) { 248 io.out.bits.brInfo(i).tageMeta := io.in.bits.brInfo(i).tageMeta 249 } 250} 251 252// trait BranchPredictorComponents extends HasXSParameter { 253// val ubtb = Module(new MicroBTB) 254// val btb = Module(new BTB) 255// val bim = Module(new BIM) 256// val tage = Module(new Tage) 257// val preds = Seq(ubtb, btb, bim, tage) 258// preds.map(_.io := DontCare) 259// } 260 261class BPUReq extends XSBundle { 262 val pc = UInt(VAddrBits.W) 263 val hist = UInt(HistoryLength.W) 264 val inMask = UInt(PredictWidth.W) 265} 266 267class BranchUpdateInfoWithHist extends BranchUpdateInfo { 268 val hist = UInt(HistoryLength.W) 269} 270 271// class BaseBPU extends XSModule { 272// val io = IO(new Bundle() { 273// // from backend 274// val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 275// // from ifu, frontend redirect 276// val flush = Input(UInt(3.W)) 277// // from if1 278// val in = Flipped(ValidIO(new BPUReq)) 279// // to if2/if3/if4 280// val out = Vec(3, Decoupled(new BranchPrediction)) 281// // from if4 282// val predecode = Flipped(ValidIO(new Predecode)) 283// // to if4, some bpu info used for updating 284// val branchInfo = Decoupled(Vec(PredictWidth, new BranchInfo)) 285// }) 286// } 287 288 289 290 291class BaseBPU extends XSModule { 292 val io = IO(new Bundle() { 293 // from backend 294 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 295 // from ifu, frontend redirect 296 val flush = Input(UInt(3.W)) 297 // from if1 298 val in = Flipped(ValidIO(new BPUReq)) 299 // to if2/if3/if4 300 val out = Vec(3, Decoupled(new BranchPrediction)) 301 // from if4 302 val predecode = Flipped(ValidIO(new Predecode)) 303 // to if4, some bpu info used for updating 304 val branchInfo = Decoupled(Vec(PredictWidth, new BranchInfo)) 305 }) 306 307 val s1 = Module(new BPUStage1) 308 val s2 = Module(new BPUStage2) 309 val s3 = Module(new BPUStage3) 310 311 s1.io.flush := io.flush(0) 312 s2.io.flush := io.flush(1) 313 s3.io.flush := io.flush(2) 314 315 s1.io.in <> DontCare 316 s2.io.in <> s1.io.out 317 s3.io.in <> s2.io.out 318 319 io.out(0) <> s1.io.pred 320 io.out(1) <> s2.io.pred 321 io.out(2) <> s3.io.pred 322 323 val ubtb = Module(new MicroBTB) 324 val btb = Module(new BTB) 325 val bim = Module(new BIM) 326 val tage = Module(new Tage) 327 val preds = Seq(ubtb, btb, bim, tage) 328 preds.map(_.io.update <> io.inOrderBrInfo) 329 330 //**********************Stage 1****************************// 331 val s1_fire = s1.io.in.fire() 332 val s1_resp_in = new PredictorResponse 333 val s1_brInfo_in = Wire(Vec(PredictWidth, new BranchInfo)) 334 335 s1_resp_in := DontCare 336 s1_brInfo_in := DontCare 337 338 val s1_inLatch = RegEnable(io.in, s1_fire) 339 ubtb.io.flush := io.flush(0) // TODO: fix this 340 ubtb.io.pc.valid := s1_inLatch.valid 341 ubtb.io.pc.bits := s1_inLatch.bits.pc 342 ubtb.io.inMask := s1_inLatch.bits.inMask 343 344 // Wrap ubtb response into resp_in and brInfo_in 345 s1_resp_in.ubtb <> ubtb.io.out 346 for (i <- 0 until PredictWidth) { 347 s1_brInfo_in(i).ubtbWriteWay := ubtb.io.uBTBBranchInfo.writeWay(i) 348 s1_brInfo_in(i).ubtbHits := ubtb.io.uBTBBranchInfo.hits(i) 349 } 350 351 btb.io.flush := io.flush(0) // TODO: fix this 352 btb.io.pc.valid := io.in.valid 353 btb.io.pc.bits := io.in.bits.pc 354 btb.io.inMask := io.in.bits.inMask 355 356 // Wrap btb response into resp_in and brInfo_in 357 s1_resp_in.btb <> btb.io.resp 358 for (i <- 0 until PredictWidth) { 359 s1_brInfo_in(i).btbWriteWay := btb.io.meta.writeWay(i) 360 } 361 362 bim.io.flush := io.flush(0) // TODO: fix this 363 bim.io.pc.valid := io.in.valid 364 bim.io.pc.bits := io.in.bits.pc 365 bim.io.inMask := io.in.bits.inMask 366 367 // Wrap bim response into resp_in and brInfo_in 368 s1_resp_in.bim <> bim.io.resp 369 for (i <- 0 until PredictWidth) { 370 s1_brInfo_in(i).bimCtr := bim.io.meta.ctrs(i) 371 } 372 373 374 s1.io.in.valid := io.in.valid 375 s1.io.in.bits.pc := io.in.bits.pc 376 s1.io.in.bits.mask := io.in.bits.inMask 377 s1.io.in.bits.target := DontCare 378 s1.io.in.bits.resp := s1_resp_in 379 s1.io.in.bits.brInfo <> s1_brInfo_in 380 381 //**********************Stage 2****************************// 382 tage.io.flush := io.flush(1) // TODO: fix this 383 tage.io.pc.valid := s1.io.out.fire() 384 tage.io.pc.bits := s1.io.out.bits.pc // PC from s1 385 tage.io.hist := io.in.bits.hist // The inst is from s1 386 tage.io.inMask := s1.io.out.bits.mask 387 tage.io.s3Fire := s3.io.in.fire() // Tell tage to march 1 stage 388 tage.io.bim <> s1.io.out.bits.resp.bim // Use bim results from s1 389 390 //**********************Stage 3****************************// 391 // Wrap tage response and meta into s3.io.in.bits 392 // This is ugly 393 394 s3.io.in.bits.resp.tage <> tage.io.resp 395 for (i <- 0 until PredictWidth) { 396 s3.io.in.bits.brInfo(i).tageMeta := tage.io.meta(i) 397 } 398 399 s3.io.predecode <> io.predecode 400 401 s3.io.out.ready := io.branchInfo.ready 402 403 io.branchInfo.valid := s3.io.out.valid 404 io.branchInfo.bits := s3.io.out.bits.brInfo 405} 406 407class BPU extends BaseBPU {} 408 409 410class FakeBPU extends BaseBPU { 411 io.out.foreach(i => { 412 // Provide not takens 413 i.valid := true.B 414 i.bits <> DontCare 415 i.bits.redirect := false.B 416 }) 417 io.branchInfo <> DontCare 418}