1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.ALUOpType 8import xiangshan.backend.JumpOpType 9 10class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 11 def tagBits = VAddrBits - idxBits - 1 12 13 val tag = UInt(tagBits.W) 14 val idx = UInt(idxBits.W) 15 val offset = UInt(1.W) 16 17 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 18 def getTag(x: UInt) = fromUInt(x).tag 19 def getIdx(x: UInt) = fromUInt(x).idx 20 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 21 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 22} 23 24class PredictorResponse extends XSBundle { 25 class UbtbResp extends XSBundle { 26 // the valid bits indicates whether a target is hit 27 val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VAddrBits.W))) 28 val takens = Vec(PredictWidth, Bool()) 29 val notTakens = Vec(PredictWidth, Bool()) 30 val isRVC = Vec(PredictWidth, Bool()) 31 } 32 class BtbResp extends XSBundle { 33 // the valid bits indicates whether a target is hit 34 val targets = Vec(PredictWidth, ValidUndirectioned(UInt(VAddrBits.W))) 35 val types = Vec(PredictWidth, UInt(2.W)) 36 val isRVC = Vec(PredictWidth, Bool()) 37 } 38 class BimResp extends XSBundle { 39 val ctrs = Vec(PredictWidth, ValidUndirectioned(UInt(2.W))) 40 } 41 class TageResp extends XSBundle { 42 // the valid bits indicates whether a prediction is hit 43 val takens = Vec(PredictWidth, ValidUndirectioned(Bool())) 44 } 45 46 val ubtb = new UbtbResp 47 val btb = new BtbResp 48 val bim = new BimResp 49 val tage = new TageResp 50} 51 52abstract class BasePredictor extends XSModule { 53 val metaLen = 0 54 55 // An implementation MUST extend the IO bundle with a response 56 // and the special input from other predictors, as well as 57 // the metas to store in BRQ 58 abstract class Resp extends PredictorResponse {} 59 abstract class FromOthers extends XSBundle {} 60 abstract class Meta extends XSBundle {} 61 62 class DefaultBasePredictorIO extends XSBundle { 63 val flush = Input(Bool()) 64 val pc = Flipped(ValidIO(UInt(VAddrBits.W))) 65 val hist = Input(UInt(HistoryLength.W)) 66 val inMask = Input(UInt(PredictWidth.W)) 67 val update = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 68 } 69 70 // circular shifting 71 def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = { 72 val res = Wire(UInt(len.W)) 73 val higher = source << shamt 74 val lower = source >> (len.U - shamt) 75 res := higher | lower 76 res 77 } 78} 79 80class BPUStageIO extends XSBundle { 81 val pc = UInt(VAddrBits.W) 82 val mask = UInt(PredictWidth.W) 83 val resp = new PredictorResponse 84 val target = UInt(VAddrBits.W) 85 val brInfo = Vec(PredictWidth, new BranchInfo) 86} 87 88 89abstract class BPUStage extends XSModule { 90 class DefaultIO extends XSBundle { 91 val flush = Input(Bool()) 92 val in = Flipped(Decoupled(new BPUStageIO)) 93 val pred = Decoupled(new BranchPrediction) 94 val out = Decoupled(new BPUStageIO) 95 } 96 def npc(pc: UInt, instCount: UInt) = pc + (instCount << 1.U) 97 98 io.in.ready := !outValid || io.out.fire() && io.pred.fire() 99 val inFire = io.in.fire() 100 val inLatch = RegEnable(io.in.bits, inFire) 101 102 val predValid = RegInit(false.B) 103 val outFire = io.out.fire() 104 105 // Each stage has its own logic to decide 106 // takens, notTakens and target 107 108 val takens = Vec(PredictWidth, Bool()) 109 val notTakens = Vec(PredictWidth, Bool()) 110 val hasNTBr = (0 until PredictWidth).map(i => i.U <= jmpIdx && notTakens(i)).reduce(_||_) 111 val taken = takens.reduce(_||_) 112 val jmpIdx = PriorityEncoder(takens) 113 // get the last valid inst 114 val lastValidPos = PriorityMux((PredictWidth-1 to 0).map(i => (inLatch.mask(i), i.U))) 115 val target = UInt(VaddrBits.W) 116 117 io.pred.bits <> DontCare 118 io.pred.bits.taken := taken 119 io.pred.bits.jmpIdx := jmpIdx 120 io.pred.bits.hasNotTakenBrs := hasNTBr 121 io.pred.bits.target := target 122 123 io.out.bits <> DontCare 124 io.out.bits.pc := inLatch.pc.bits 125 io.out.bits.mask := inLatch.inMask 126 io.out.bits.target := target 127 io.out.bits.resp <> inLatch.resp 128 io.out.bits.brInfo := inLatch.brInfo 129 130 // Default logic 131 // pred.ready not taken into consideration 132 // could be broken 133 when (io.flush) { 134 predValid := false.B 135 }.elsewhen (inFire) { 136 predValid := true.B 137 }.elsewhen (outFire) { 138 predValid := false.B 139 }.otherwise { 140 predValid := outValid 141 } 142 143 io.out.valid := predValid && !io.flush 144 io.pred.valid := predValid && !io.flush 145} 146 147class BPUStage1 extends BPUStage { 148 149 val io = new DefaultIO 150 151 // 'overrides' default logic 152 // when flush, the prediction should also starts 153 override val predValid = BoolStopWatch(io.flush || inFire, outFire, true) 154 io.out.valid := predValid 155 156 // ubtb is accessed with inLatch pc in s1, 157 // so we use io.in instead of inLatch 158 val ubtbResp = io.in.bits.ubtb 159 // the read operation is already masked, so we do not need to mask here 160 takens := VecInit((0 until PredictWidth).map(i => ubtbResp.targets(i).valid && ubtbResp.takens(i))) 161 notTakens := VecInit((0 until PredictWidth).map(i => ubtbResp.targets(i).valid && ubtbResp.notTakens(i))) 162 target := Mux(taken, ubtbResp.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 163 164 io.pred.bits.redirect := taken 165 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !ubtbResp.isRVC(lastValidPos) 166 167 // resp and brInfo are from the components, 168 // so it does not need to be latched 169 io.out.bits.resp <> io.in.bits.resp 170 io.out.bits.brInfo := io.in.bits.brInfo 171} 172 173class BPUStage2 extends XSModule { 174 val io = new DefaultIO 175 176 // Use latched response from s1 177 val btbResp = inLatch.btb 178 val bimResp = inLatch.bim 179 takens := VecInit((0 until PredictWidth).map(i => btbResp.targets(i).valid && bimResp.ctrs(i)(1))) 180 notTakens := VecInit((0 until PredictWidth).map(i => btbResp.targets(i).valid && btbResp.types(i) === BrType.branch && !bimResp.ctrs(i)(1))) 181 target := Mux(taken, btbResp.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 182 183 io.pred.bits.redirect := target =/= inLatch.target 184 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !btbResp.isRVC(lastValidPos) 185} 186 187class BPUStage3 extends XSModule { 188 class S3IO extends DefaultIO { 189 val predecode = Flipped(ValidIO(new Predecode)) 190 } 191 val io = new S3IO 192 io.out.valid := outValid && io.predecode.valid && !io.flush 193 194 // TAGE has its own pipelines and the 195 // response comes directly from s3, 196 // so we do not use those from inLatch 197 val tageResp = io.in.bits.tage 198 val tageValidTakens = VecInit(tageResp.takens.map(t => t.valid && t.bits)) 199 200 val pdMask = io.predecode.bits.mask 201 val pds = io.predecode.bits.pd 202 203 val btbHits = VecInit(inLatch.btb.targets.map(_.valid)).asUInt 204 val bimTakens = VecInit(inLatch.bim.ctrs.map(_.bits(1))) 205 206 val brs = pdMask & Reverse(Cat(pds.map(_.isBr))) 207 val jals = pdMask & Reverse(Cat(pds.map(_.isJal))) 208 val jalrs = pdMask & Reverse(Cat(pds.map(_.isJalr))) 209 val calls = pdMask & Reverse(Cat(pds.map(_.isCall))) 210 val rets = pdMask & Reverse(Cat(pds.map(_.isRet))) 211 212 val callIdx = PriorityEncoder(calls) 213 val retIdx = PriorityEncoder(rets) 214 215 val brTakens = 216 if (EnableBPD) { 217 brs & Reverse(Cat((0 until PredictWidth).map(i => btbHits(i) && tageValidTakens(i)))) 218 } else { 219 brs & Reverse(Cat((0 until PredictWidth).map(i => btbHits(i) && bimTakens(i)))) 220 } 221 222 takens := VecInit((0 until PredictWidth).map(i => brTakens(i) || jals(i) || jalrs(i))) 223 // Whether should we count in branches that are not recorded in btb? 224 // PS: Currently counted in. Whenever tage does not provide a valid 225 // taken prediction, the branch is counted as a not taken branch 226 notTakens := VecInit((0 until PredictWidth).map(i => brs(i) && !tageValidTakens(i))) 227 target := Mux(taken, inLatch.btb.targets(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 228 229 io.pred.bits.redirect := target =/= inLatch.target 230 io.pred.bits.saveHalfRVI := ((lastValidPos === jmpIdx && taken) || !taken ) && !pds(lastValidPos).isRVC 231 232 // Wrap tage resp and tage meta in 233 // This is ugly 234 io.out.bits.resp.tage <> io.in.bits.tage 235 for (i <- 0 until PredictWidth) { 236 io.out.bits.brInfo(i).tageMeta := io.in.bits.brInfo(i).tageMeta 237 } 238} 239 240trait BranchPredictorComponents extends HasXSParameter { 241 val ubtb = new Module(MicroBTB) 242 val btb = new Module(BTB) 243 val bim = new Module(BIM) 244 val tage = new Module(Tage) 245 val preds = Seq(ubtb, btb, bim, tage) 246 preds.map(_.io := DontCare) 247} 248 249class BPUReq extends XSBundle { 250 val pc = UInt(VAddrBits.W) 251 val hist = UInt(HistoryLength.W) 252 val inMask = UInt(PredictWidth.W) 253} 254 255class BranchUpdateInfoWithHist extends BranchUpdateInfo { 256 val hist = UInt(HistoryLength.W) 257} 258 259abstract class BaseBPU extends XSModule with BranchPredictorComponents{ 260 val io = IO(new Bundle() { 261 // from backend 262 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 263 // from ifu, frontend redirect 264 val flush = Input(UInt(3.W)) 265 // from if1 266 val in = Flipped(ValidIO(new BPUReq)) 267 // to if2/if3/if4 268 val out = Vec(3, Decoupled(new BranchPrediction)) 269 // from if4 270 val predecode = Flipped(ValidIO(new Predecode)) 271 // to if4, some bpu info used for updating 272 val branchInfo = Decoupled(Vec(PredictWidth, new BranchInfo)) 273 }) 274 275 val s1 = Module(new BPUStage1) 276 val s2 = Module(new BPUStage2) 277 val s3 = Module(new BPUStage3) 278 279 // TODO: whether to update ubtb when btb successfully 280 // corrects the wrong prediction from ubtb 281 preds.map(_.io.update <> io.inOrderBrInfo) 282 283 s1.io.flush := io.flush(0) 284 s2.io.flush := io.flush(1) 285 s3.io.flush := io.flush(2) 286 287 s1.io.in <> DontCare 288 s2.io.in <> s1.io.out 289 s3.io.in <> s2.io.out 290 291 io.out(0) <> s1.io.pred 292 io.out(1) <> s2.io.pred 293 io.out(2) <> s3.io.pred 294 295 s3.io.predecode <> io.predecode 296 297 io.branchInfo.valid := s3.io.out.valid 298 io.branchInfo.bits := s3.io.out.bits.branchInfo.metas 299 s3.io.out.ready := io.branchInfo.ready 300 301} 302 303 304class FakeBPU extends BaseBPU { 305 io.out.foreach(i => { 306 i <> DontCare 307 i.redirect := false.B 308 }) 309 io.branchInfo <> DontCare 310} 311 312class BPU extends BaseBPU { 313 314 315 //**********************Stage 1****************************// 316 val s1_fire = s1.io.in.fire() 317 val s1_resp_in = new PredictorResponse 318 val s1_brInfo_in = VecInit(0.U.asTypeOf(Vec(PredictWidth, new BranchInfo))) 319 320 s1_resp_in := DontCare 321 s1_brInfo_in := DontCare 322 323 val s1_inLatch = RegEnable(io.in, s1_fire) 324 ubtb.io.flush := io.flush(0) // TODO: fix this 325 ubtb.io.pc.valid := s1_inLatch.valid 326 ubtb.io.pc.bits := s1_inLatch.bits.pc 327 ubtb.io.inMask := s1_inLatch.bits.inMask 328 329 // Wrap ubtb response into resp_in and brInfo_in 330 s1_resp_in.ubtb <> ubtb.io.out 331 for (i <- 0 until PredictWidth) { 332 s1_brInfo_in(i).ubtbWriteWay := ubtb.io.meta.writeWay(i) 333 s1_brInfo_in(i).ubtbHits := ubtb.io.out.targets(i).valid 334 } 335 336 btb.io.flush := io.flush(0) // TODO: fix this 337 btb.io.pc.valid := io.in.valid 338 btb.io.pc.bits := io.in.bits.pc 339 btb.io.inMask := io.in.bits.inMask 340 341 // Wrap btb response into resp_in and brInfo_in 342 s1_resp_in.btb <> btb.io.out 343 for (i <- 0 until PredictWidth) { 344 s1_brInfo_in(i).btbWriteWay := btb.io.meta.writeWay(i) 345 } 346 347 bim.io.flush := io.flush(0) // TODO: fix this 348 bim.io.pc.valid := io.in.valid 349 bim.io.pc.bits := io.in.bits.pc 350 bim.io.inMask := io.in.bits.inMask 351 352 // Wrap bim response into resp_in and brInfo_in 353 s1_resp_in.bim <> bim.io.resp 354 for (i <- 0 until PredictWidth) { 355 s1_brInfo_in(i).bimCtr := bim.io.meta(i) 356 } 357 358 359 s1.io.in.valid := io.in.valid 360 s1.io.in.bits.pc := io.in.pc.bits 361 s1.io.in.bits.mask := io.in.mask 362 s1.io.in.bits.target := DontCare 363 s1.io.in.bits.resp := s1_resp_in 364 s1.io.in.bits.brInfo <> s1_brInfo_in 365 366 367 tage.io.flush := io.flush(1) // TODO: fix this 368 tage.io.pc.valid := s1.io.out.fire() 369 tage.io.pc.bits := s1.io.out.bits.pc // PC from s1 370 tage.io.hist := io.in.hist // The inst is from s1 371 tage.io.inMask := s1.io.out.bits.mask 372 tage.io.s3Fire := s3.io.in.fire() // Tell tage to march 1 stage 373 tage.io.bim <> s1.io.out.resp.bim // Use bim results from s1 374 375 376 // Wrap tage response and meta into s3.io.in.bits 377 // This is ugly 378 s3.io.in.bits.resp.tage <> tage.io.resp 379 for (i <- 0 until PredictWidth) { 380 s3.io.in.bits.brInfo(i).tageMeta := tage.io.meta(i) 381 } 382 383} 384