1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.ALUOpType 8import xiangshan.backend.JumpOpType 9 10trait HasBPUParameter extends HasXSParameter { 11 val BPUDebug = false 12 val EnableCFICommitLog = true 13 val EnbaleCFIPredLog = true 14 val EnableBPUTimeRecord = EnableCFICommitLog || EnbaleCFIPredLog 15} 16 17class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 18 def tagBits = VAddrBits - idxBits - 1 19 20 val tag = UInt(tagBits.W) 21 val idx = UInt(idxBits.W) 22 val offset = UInt(1.W) 23 24 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 25 def getTag(x: UInt) = fromUInt(x).tag 26 def getIdx(x: UInt) = fromUInt(x).idx 27 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 28 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 29} 30 31class PredictorResponse extends XSBundle { 32 class UbtbResp extends XSBundle { 33 // the valid bits indicates whether a target is hit 34 val targets = Vec(PredictWidth, UInt(VAddrBits.W)) 35 val hits = Vec(PredictWidth, Bool()) 36 val takens = Vec(PredictWidth, Bool()) 37 val brMask = Vec(PredictWidth, Bool()) 38 val is_RVC = Vec(PredictWidth, Bool()) 39 } 40 class BtbResp extends XSBundle { 41 // the valid bits indicates whether a target is hit 42 val targets = Vec(PredictWidth, UInt(VAddrBits.W)) 43 val hits = Vec(PredictWidth, Bool()) 44 val types = Vec(PredictWidth, UInt(2.W)) 45 val isRVC = Vec(PredictWidth, Bool()) 46 } 47 class BimResp extends XSBundle { 48 val ctrs = Vec(PredictWidth, UInt(2.W)) 49 } 50 class TageResp extends XSBundle { 51 // the valid bits indicates whether a prediction is hit 52 val takens = Vec(PredictWidth, Bool()) 53 val hits = Vec(PredictWidth, Bool()) 54 } 55 class LoopResp extends XSBundle { 56 val exit = Vec(PredictWidth, Bool()) 57 } 58 59 val ubtb = new UbtbResp 60 val btb = new BtbResp 61 val bim = new BimResp 62 val tage = new TageResp 63 val loop = new LoopResp 64} 65 66trait PredictorUtils { 67 // circular shifting 68 def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = { 69 val res = Wire(UInt(len.W)) 70 val higher = source << shamt 71 val lower = source >> (len.U - shamt) 72 res := higher | lower 73 res 74 } 75 76 def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = { 77 val res = Wire(UInt(len.W)) 78 val higher = source << (len.U - shamt) 79 val lower = source >> shamt 80 res := higher | lower 81 res 82 } 83 84 // To be verified 85 def satUpdate(old: UInt, len: Int, taken: Bool): UInt = { 86 val oldSatTaken = old === ((1 << len)-1).U 87 val oldSatNotTaken = old === 0.U 88 Mux(oldSatTaken && taken, ((1 << len)-1).U, 89 Mux(oldSatNotTaken && !taken, 0.U, 90 Mux(taken, old + 1.U, old - 1.U))) 91 } 92 93 def signedSatUpdate(old: SInt, len: Int, taken: Bool): SInt = { 94 val oldSatTaken = old === ((1 << (len-1))-1).S 95 val oldSatNotTaken = old === (-(1 << (len-1))).S 96 Mux(oldSatTaken && taken, ((1 << (len-1))-1).S, 97 Mux(oldSatNotTaken && !taken, (-(1 << (len-1))).S, 98 Mux(taken, old + 1.S, old - 1.S))) 99 } 100} 101abstract class BasePredictor extends XSModule with HasBPUParameter with PredictorUtils { 102 val metaLen = 0 103 104 // An implementation MUST extend the IO bundle with a response 105 // and the special input from other predictors, as well as 106 // the metas to store in BRQ 107 abstract class Resp extends XSBundle {} 108 abstract class FromOthers extends XSBundle {} 109 abstract class Meta extends XSBundle {} 110 111 class DefaultBasePredictorIO extends XSBundle { 112 val flush = Input(Bool()) 113 val pc = Flipped(ValidIO(UInt(VAddrBits.W))) 114 val hist = Input(UInt(HistoryLength.W)) 115 val inMask = Input(UInt(PredictWidth.W)) 116 val update = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 117 val outFire = Input(Bool()) 118 } 119 120 val io = new DefaultBasePredictorIO 121 122 val debug = false 123} 124 125class BPUStageIO extends XSBundle { 126 val pc = UInt(VAddrBits.W) 127 val mask = UInt(PredictWidth.W) 128 val resp = new PredictorResponse 129 val target = UInt(VAddrBits.W) 130 val brInfo = Vec(PredictWidth, new BranchInfo) 131 val saveHalfRVI = Bool() 132} 133 134 135abstract class BPUStage extends XSModule with HasBPUParameter{ 136 class DefaultIO extends XSBundle { 137 val flush = Input(Bool()) 138 val in = Input(new BPUStageIO) 139 val inFire = Input(Bool()) 140 val pred = Output(new BranchPrediction) // to ifu 141 val out = Output(new BPUStageIO) // to the next stage 142 val outFire = Input(Bool()) 143 val predecode = Input(new Predecode) 144 val recover = Flipped(ValidIO(new BranchUpdateInfo)) 145 val debug_hist = Input(UInt((if (BPUDebug) (HistoryLength) else 0).W)) 146 val debug_histPtr = Input(UInt((if (BPUDebug) (ExtHistoryLength) else 0).W)) 147 } 148 val io = IO(new DefaultIO) 149 150 def npc(pc: UInt, instCount: UInt) = pc + (instCount << 1.U) 151 152 val inLatch = RegEnable(io.in, io.inFire) 153 154 // Each stage has its own logic to decide 155 // takens, notTakens and target 156 157 val takens = Wire(Vec(PredictWidth, Bool())) 158 val notTakens = Wire(Vec(PredictWidth, Bool())) 159 val brMask = Wire(Vec(PredictWidth, Bool())) 160 val jmpIdx = PriorityEncoder(takens) 161 val hasNTBr = (0 until PredictWidth).map(i => i.U <= jmpIdx && notTakens(i) && brMask(i)).reduce(_||_) 162 val taken = takens.reduce(_||_) 163 // get the last valid inst 164 val lastValidPos = WireInit(PriorityMux(Reverse(inLatch.mask), (PredictWidth-1 to 0 by -1).map(i => i.U))) 165 val lastHit = Wire(Bool()) 166 val lastIsRVC = Wire(Bool()) 167 val saveHalfRVI = ((lastValidPos === jmpIdx && taken) || !taken ) && !lastIsRVC && lastHit 168 169 val targetSrc = Wire(Vec(PredictWidth, UInt(VAddrBits.W))) 170 val target = Mux(taken, targetSrc(jmpIdx), npc(inLatch.pc, PopCount(inLatch.mask))) 171 172 io.pred <> DontCare 173 io.pred.redirect := target =/= inLatch.target || inLatch.saveHalfRVI && !saveHalfRVI 174 io.pred.taken := taken 175 io.pred.jmpIdx := jmpIdx 176 io.pred.hasNotTakenBrs := hasNTBr 177 io.pred.target := target 178 io.pred.saveHalfRVI := saveHalfRVI 179 io.pred.takenOnBr := taken && brMask(jmpIdx) 180 181 io.out <> DontCare 182 io.out.pc := inLatch.pc 183 io.out.mask := inLatch.mask 184 io.out.target := target 185 io.out.resp <> inLatch.resp 186 io.out.brInfo := inLatch.brInfo 187 io.out.saveHalfRVI := saveHalfRVI 188 (0 until PredictWidth).map(i => 189 io.out.brInfo(i).sawNotTakenBranch := (if (i == 0) false.B else (brMask.asUInt & notTakens.asUInt)(i-1,0).orR)) 190 191 // Default logic 192 // pred.ready not taken into consideration 193 // could be broken 194 // when (io.flush) { predValid := false.B } 195 // .elsewhen (inFire) { predValid := true.B } 196 // .elsewhen (outFire) { predValid := false.B } 197 // .otherwise { predValid := predValid } 198 199 if (BPUDebug) { 200 XSDebug(io.inFire, "in: pc=%x, mask=%b, target=%x\n", io.in.pc, io.in.mask, io.in.target) 201 XSDebug(io.outFire, "out: pc=%x, mask=%b, target=%x\n", io.out.pc, io.out.mask, io.out.target) 202 XSDebug("flush=%d\n", io.flush) 203 XSDebug("taken=%d, takens=%b, notTakens=%b, jmpIdx=%d, hasNTBr=%d, lastValidPos=%d, target=%x\n", 204 taken, takens.asUInt, notTakens.asUInt, jmpIdx, hasNTBr, lastValidPos, target) 205 val p = io.pred 206 XSDebug(io.outFire, "outPred: redirect=%d, taken=%d, jmpIdx=%d, hasNTBrs=%d, target=%x, saveHalfRVI=%d\n", 207 p.redirect, p.taken, p.jmpIdx, p.hasNotTakenBrs, p.target, p.saveHalfRVI) 208 XSDebug(io.outFire && p.taken, "outPredTaken: fetchPC:%x, jmpPC:%x\n", 209 inLatch.pc, inLatch.pc + (jmpIdx << 1.U)) 210 XSDebug(io.outFire && p.redirect, "outPred: previous target:%x redirected to %x \n", 211 inLatch.target, p.target) 212 XSDebug(io.outFire, "outPred targetSrc: ") 213 for (i <- 0 until PredictWidth) { 214 XSDebug(false, io.outFire, "(%d):%x ", i.U, targetSrc(i)) 215 } 216 XSDebug(false, io.outFire, "\n") 217 } 218} 219 220class BPUStage1 extends BPUStage { 221 222 // ubtb is accessed with inLatch pc in s1, 223 // so we use io.in instead of inLatch 224 val ubtbResp = io.in.resp.ubtb 225 // the read operation is already masked, so we do not need to mask here 226 takens := VecInit((0 until PredictWidth).map(i => ubtbResp.hits(i) && ubtbResp.takens(i))) 227 notTakens := VecInit((0 until PredictWidth).map(i => ubtbResp.hits(i) && !ubtbResp.takens(i) && ubtbResp.brMask(i))) 228 targetSrc := ubtbResp.targets 229 brMask := ubtbResp.brMask 230 231 lastIsRVC := ubtbResp.is_RVC(lastValidPos) 232 lastHit := ubtbResp.hits(lastValidPos) 233 234 // resp and brInfo are from the components, 235 // so it does not need to be latched 236 io.out.resp <> io.in.resp 237 io.out.brInfo := io.in.brInfo 238 239 // we do not need to compare target in stage1 240 io.pred.redirect := taken 241 242 if (BPUDebug) { 243 XSDebug(io.outFire, "outPred using ubtb resp: hits:%b, takens:%b, notTakens:%b, isRVC:%b\n", 244 ubtbResp.hits.asUInt, ubtbResp.takens.asUInt, ~ubtbResp.takens.asUInt & brMask.asUInt, ubtbResp.is_RVC.asUInt) 245 } 246 if (EnableBPUTimeRecord) { 247 io.out.brInfo.map(_.debug_ubtb_cycle := GTimer()) 248 } 249} 250 251class BPUStage2 extends BPUStage { 252 // Use latched response from s1 253 val btbResp = inLatch.resp.btb 254 val bimResp = inLatch.resp.bim 255 takens := VecInit((0 until PredictWidth).map(i => btbResp.hits(i) && (btbResp.types(i) === BTBtype.B && bimResp.ctrs(i)(1) || btbResp.types(i) =/= BTBtype.B))) 256 notTakens := VecInit((0 until PredictWidth).map(i => btbResp.hits(i) && btbResp.types(i) === BTBtype.B && !bimResp.ctrs(i)(1))) 257 targetSrc := btbResp.targets 258 brMask := VecInit(btbResp.types.map(_ === BTBtype.B)) 259 260 lastIsRVC := btbResp.isRVC(lastValidPos) 261 lastHit := btbResp.hits(lastValidPos) 262 263 264 if (BPUDebug) { 265 XSDebug(io.outFire, "outPred using btb&bim resp: hits:%b, ctrTakens:%b\n", 266 btbResp.hits.asUInt, VecInit(bimResp.ctrs.map(_(1))).asUInt) 267 } 268 if (EnableBPUTimeRecord) { 269 io.out.brInfo.map(_.debug_btb_cycle := GTimer()) 270 } 271} 272 273class BPUStage3 extends BPUStage { 274 // TAGE has its own pipelines and the 275 // response comes directly from s3, 276 // so we do not use those from inLatch 277 val tageResp = io.in.resp.tage 278 val tageTakens = tageResp.takens 279 val tageHits = tageResp.hits 280 val tageValidTakens = VecInit((tageTakens zip tageHits).map{case (t, h) => t && h}) 281 282 val loopResp = io.in.resp.loop.exit 283 284 val pdMask = io.predecode.mask 285 val pds = io.predecode.pd 286 287 val btbHits = inLatch.resp.btb.hits.asUInt 288 val bimTakens = VecInit(inLatch.resp.bim.ctrs.map(_(1))) 289 290 val brs = pdMask & Reverse(Cat(pds.map(_.isBr))) 291 val jals = pdMask & Reverse(Cat(pds.map(_.isJal))) 292 val jalrs = pdMask & Reverse(Cat(pds.map(_.isJalr))) 293 val calls = pdMask & Reverse(Cat(pds.map(_.isCall))) 294 val rets = pdMask & Reverse(Cat(pds.map(_.isRet))) 295 val RVCs = pdMask & Reverse(Cat(pds.map(_.isRVC))) 296 297 val callIdx = PriorityEncoder(calls) 298 val retIdx = PriorityEncoder(rets) 299 300 // Use bim results for those who tage does not have an entry for 301 val brTakens = brs & 302 (if (EnableBPD) Reverse(Cat((0 until PredictWidth).map(i => tageValidTakens(i) || !tageHits(i) && bimTakens(i)))) else Reverse(Cat((0 until PredictWidth).map(i => bimTakens(i))))) & 303 (if (EnableLoop) ~loopResp.asUInt else Fill(PredictWidth, 1.U(1.W))) 304 // if (EnableBPD) { 305 // brs & Reverse(Cat((0 until PredictWidth).map(i => tageValidTakens(i)))) 306 // } else { 307 // brs & Reverse(Cat((0 until PredictWidth).map(i => bimTakens(i)))) 308 // } 309 310 // predict taken only if btb has a target, jal targets will be provided by IFU 311 takens := VecInit((0 until PredictWidth).map(i => (brTakens(i) || jalrs(i)) && btbHits(i) || jals(i))) 312 // Whether should we count in branches that are not recorded in btb? 313 // PS: Currently counted in. Whenever tage does not provide a valid 314 // taken prediction, the branch is counted as a not taken branch 315 notTakens := ((VecInit((0 until PredictWidth).map(i => brs(i) && !takens(i)))).asUInt | 316 (if (EnableLoop) { VecInit((0 until PredictWidth).map(i => brs(i) && loopResp(i)))} 317 else { WireInit(0.U.asTypeOf(UInt(PredictWidth.W))) }).asUInt).asTypeOf(Vec(PredictWidth, Bool())) 318 targetSrc := inLatch.resp.btb.targets 319 brMask := WireInit(brs.asTypeOf(Vec(PredictWidth, Bool()))) 320 321 //RAS 322 if(EnableRAS){ 323 val ras = Module(new RAS) 324 ras.io <> DontCare 325 ras.io.pc.bits := inLatch.pc 326 ras.io.pc.valid := io.outFire//predValid 327 ras.io.is_ret := rets.orR && (retIdx === jmpIdx) && io.inFire 328 ras.io.callIdx.valid := calls.orR && (callIdx === jmpIdx) && io.inFire 329 ras.io.callIdx.bits := callIdx 330 ras.io.isRVC := (calls & RVCs).orR //TODO: this is ugly 331 ras.io.isLastHalfRVI := !io.predecode.isFetchpcEqualFirstpc 332 ras.io.recover := io.recover 333 334 for(i <- 0 until PredictWidth){ 335 io.out.brInfo(i).rasSp := ras.io.branchInfo.rasSp 336 io.out.brInfo(i).rasTopCtr := ras.io.branchInfo.rasTopCtr 337 io.out.brInfo(i).rasToqAddr := ras.io.branchInfo.rasToqAddr 338 } 339 takens := VecInit((0 until PredictWidth).map(i => { 340 ((brTakens(i) || jalrs(i)) && btbHits(i)) || 341 jals(i) || 342 (!ras.io.out.bits.specEmpty && rets(i)) || 343 (ras.io.out.bits.specEmpty && btbHits(i)) 344 } 345 )) 346 when(ras.io.is_ret && ras.io.out.valid){ 347 targetSrc(retIdx) := ras.io.out.bits.target 348 } 349 } 350 351 lastIsRVC := pds(lastValidPos).isRVC 352 when (lastValidPos === 1.U) { 353 lastHit := pdMask(1) | 354 !pdMask(0) & !pdMask(1) | 355 pdMask(0) & !pdMask(1) & (pds(0).isRVC | !io.predecode.isFetchpcEqualFirstpc) 356 }.elsewhen (lastValidPos > 0.U) { 357 lastHit := pdMask(lastValidPos) | 358 !pdMask(lastValidPos - 1.U) & !pdMask(lastValidPos) | 359 pdMask(lastValidPos - 1.U) & !pdMask(lastValidPos) & pds(lastValidPos - 1.U).isRVC 360 }.otherwise { 361 lastHit := pdMask(0) | !pdMask(0) & !pds(0).isRVC 362 } 363 364 365 io.pred.saveHalfRVI := ((lastValidPos === jmpIdx && taken && !(jmpIdx === 0.U && !io.predecode.isFetchpcEqualFirstpc)) || !taken ) && !lastIsRVC && lastHit 366 367 // Wrap tage resp and tage meta in 368 // This is ugly 369 io.out.resp.tage <> io.in.resp.tage 370 io.out.resp.loop <> io.in.resp.loop 371 for (i <- 0 until PredictWidth) { 372 io.out.brInfo(i).tageMeta := io.in.brInfo(i).tageMeta 373 io.out.brInfo(i).specCnt := io.in.brInfo(i).specCnt 374 } 375 376 if (BPUDebug) { 377 XSDebug(io.inFire, "predecode: pc:%x, mask:%b\n", inLatch.pc, io.predecode.mask) 378 for (i <- 0 until PredictWidth) { 379 val p = io.predecode.pd(i) 380 XSDebug(io.inFire && io.predecode.mask(i), "predecode(%d): brType:%d, br:%d, jal:%d, jalr:%d, call:%d, ret:%d, RVC:%d, excType:%d\n", 381 i.U, p.brType, p.isBr, p.isJal, p.isJalr, p.isCall, p.isRet, p.isRVC, p.excType) 382 } 383 } 384 385 if (EnbaleCFIPredLog) { 386 val out = io.out 387 XSDebug(io.outFire, p"cfi_pred: fetchpc(${Hexadecimal(out.pc)}) mask(${out.mask}) brmask(${brMask.asUInt}) hist(${Hexadecimal(io.debug_hist)}) histPtr(${io.debug_histPtr})\n") 388 } 389 390 if (EnableBPUTimeRecord) { 391 io.out.brInfo.map(_.debug_tage_cycle := GTimer()) 392 } 393} 394 395trait BranchPredictorComponents extends HasXSParameter { 396 val ubtb = Module(new MicroBTB) 397 val btb = Module(new BTB) 398 val bim = Module(new BIM) 399 val tage = (if(EnableBPD) { Module(new Tage) } 400 else { Module(new FakeTage) }) 401 val loop = Module(new LoopPredictor) 402 val preds = Seq(ubtb, btb, bim, tage, loop) 403 preds.map(_.io := DontCare) 404} 405 406class BPUReq extends XSBundle { 407 val pc = UInt(VAddrBits.W) 408 val hist = UInt(HistoryLength.W) 409 val inMask = UInt(PredictWidth.W) 410 val histPtr = UInt(log2Up(ExtHistoryLength).W) // only for debug 411} 412 413class BranchUpdateInfoWithHist extends XSBundle { 414 val ui = new BranchUpdateInfo 415 val hist = UInt(HistoryLength.W) 416} 417 418object BranchUpdateInfoWithHist { 419 def apply (brInfo: BranchUpdateInfo, hist: UInt) = { 420 val b = Wire(new BranchUpdateInfoWithHist) 421 b.ui <> brInfo 422 b.hist := hist 423 b 424 } 425} 426 427abstract class BaseBPU extends XSModule with BranchPredictorComponents with HasBPUParameter{ 428 val io = IO(new Bundle() { 429 // from backend 430 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 431 val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfoWithHist)) 432 // from ifu, frontend redirect 433 val flush = Input(Vec(3, Bool())) 434 // from if1 435 val in = Input(new BPUReq) 436 val inFire = Input(Vec(4, Bool())) 437 // to if2/if3/if4 438 val out = Vec(3, Output(new BranchPrediction)) 439 // from if4 440 val predecode = Input(new Predecode) 441 // to if4, some bpu info used for updating 442 val branchInfo = Output(Vec(PredictWidth, new BranchInfo)) 443 }) 444 445 def npc(pc: UInt, instCount: UInt) = pc + (instCount << 1.U) 446 447 preds.map(_.io.update <> io.outOfOrderBrInfo) 448 tage.io.update <> io.inOrderBrInfo 449 450 val s1 = Module(new BPUStage1) 451 val s2 = Module(new BPUStage2) 452 val s3 = Module(new BPUStage3) 453 454 val s1_fire = io.inFire(0) 455 val s2_fire = io.inFire(1) 456 val s3_fire = io.inFire(2) 457 val s4_fire = io.inFire(3) 458 459 s1.io.flush := io.flush(0) 460 s2.io.flush := io.flush(1) 461 s3.io.flush := io.flush(2) 462 463 s1.io.in <> DontCare 464 s2.io.in <> s1.io.out 465 s3.io.in <> s2.io.out 466 467 s1.io.inFire := s1_fire 468 s2.io.inFire := s2_fire 469 s3.io.inFire := s3_fire 470 471 s1.io.outFire := s2_fire 472 s2.io.outFire := s3_fire 473 s3.io.outFire := s4_fire 474 475 io.out(0) <> s1.io.pred 476 io.out(1) <> s2.io.pred 477 io.out(2) <> s3.io.pred 478 479 s1.io.predecode <> DontCare 480 s2.io.predecode <> DontCare 481 s3.io.predecode <> io.predecode 482 483 io.branchInfo := s3.io.out.brInfo 484 485 s1.io.recover <> DontCare 486 s2.io.recover <> DontCare 487 s3.io.recover.valid <> io.inOrderBrInfo.valid 488 s3.io.recover.bits <> io.inOrderBrInfo.bits.ui 489 490 if (BPUDebug) { 491 XSDebug(io.inFire(3), "branchInfo sent!\n") 492 for (i <- 0 until PredictWidth) { 493 val b = io.branchInfo(i) 494 XSDebug(io.inFire(3), "brInfo(%d): ubtbWrWay:%d, ubtbHit:%d, btbWrWay:%d, btbHitJal:%d, bimCtr:%d, fetchIdx:%d\n", 495 i.U, b.ubtbWriteWay, b.ubtbHits, b.btbWriteWay, b.btbHitJal, b.bimCtr, b.fetchIdx) 496 val t = b.tageMeta 497 XSDebug(io.inFire(3), " tageMeta: pvder(%d):%d, altDiffers:%d, pvderU:%d, pvderCtr:%d, allocate(%d):%d\n", 498 t.provider.valid, t.provider.bits, t.altDiffers, t.providerU, t.providerCtr, t.allocate.valid, t.allocate.bits) 499 } 500 } 501 val debug_verbose = false 502} 503 504 505class FakeBPU extends BaseBPU { 506 io.out.foreach(i => { 507 // Provide not takens 508 i <> DontCare 509 i.redirect := false.B 510 }) 511 io.branchInfo <> DontCare 512} 513 514class BPU extends BaseBPU { 515 516 //**********************Stage 1****************************// 517 518 val s1_resp_in = Wire(new PredictorResponse) 519 val s1_brInfo_in = Wire(Vec(PredictWidth, new BranchInfo)) 520 521 s1_resp_in.tage := DontCare 522 s1_resp_in.loop := DontCare 523 s1_brInfo_in := DontCare 524 (0 until PredictWidth).foreach(i => s1_brInfo_in(i).fetchIdx := i.U) 525 526 val s1_inLatch = RegEnable(io.in, s1_fire) 527 ubtb.io.flush := io.flush(0) // TODO: fix this 528 ubtb.io.pc.valid := s2_fire 529 ubtb.io.pc.bits := s1_inLatch.pc 530 ubtb.io.inMask := s1_inLatch.inMask 531 532 533 534 // Wrap ubtb response into resp_in and brInfo_in 535 s1_resp_in.ubtb <> ubtb.io.out 536 for (i <- 0 until PredictWidth) { 537 s1_brInfo_in(i).ubtbWriteWay := ubtb.io.uBTBBranchInfo.writeWay(i) 538 s1_brInfo_in(i).ubtbHits := ubtb.io.uBTBBranchInfo.hits(i) 539 } 540 541 btb.io.flush := io.flush(0) // TODO: fix this 542 btb.io.pc.valid := s1_fire 543 btb.io.pc.bits := io.in.pc 544 btb.io.inMask := io.in.inMask 545 546 547 548 // Wrap btb response into resp_in and brInfo_in 549 s1_resp_in.btb <> btb.io.resp 550 for (i <- 0 until PredictWidth) { 551 s1_brInfo_in(i).btbWriteWay := btb.io.meta.writeWay(i) 552 s1_brInfo_in(i).btbHitJal := btb.io.meta.hitJal(i) 553 } 554 555 bim.io.flush := io.flush(0) // TODO: fix this 556 bim.io.pc.valid := s1_fire 557 bim.io.pc.bits := io.in.pc 558 bim.io.inMask := io.in.inMask 559 560 561 // Wrap bim response into resp_in and brInfo_in 562 s1_resp_in.bim <> bim.io.resp 563 for (i <- 0 until PredictWidth) { 564 s1_brInfo_in(i).bimCtr := bim.io.meta.ctrs(i) 565 } 566 567 568 s1.io.inFire := s1_fire 569 s1.io.in.pc := io.in.pc 570 s1.io.in.mask := io.in.inMask 571 s1.io.in.target := DontCare 572 s1.io.in.resp <> s1_resp_in 573 s1.io.in.brInfo <> s1_brInfo_in 574 s1.io.in.saveHalfRVI := false.B 575 576 val s1_hist = RegEnable(io.in.hist, enable=s1_fire) 577 val s2_hist = RegEnable(s1_hist, enable=s2_fire) 578 val s3_hist = RegEnable(s2_hist, enable=s3_fire) 579 580 s1.io.debug_hist := s1_hist 581 s2.io.debug_hist := s2_hist 582 s3.io.debug_hist := s3_hist 583 584 val s1_histPtr = RegEnable(io.in.histPtr, enable=s1_fire) 585 val s2_histPtr = RegEnable(s1_histPtr, enable=s2_fire) 586 val s3_histPtr = RegEnable(s2_histPtr, enable=s3_fire) 587 588 s1.io.debug_histPtr := s1_histPtr 589 s2.io.debug_histPtr := s2_histPtr 590 s3.io.debug_histPtr := s3_histPtr 591 592 //**********************Stage 2****************************// 593 tage.io.flush := io.flush(1) // TODO: fix this 594 tage.io.pc.valid := s2_fire 595 tage.io.pc.bits := s2.io.in.pc // PC from s1 596 tage.io.hist := s1_hist // The inst is from s1 597 tage.io.inMask := s2.io.in.mask 598 tage.io.s3Fire := s3_fire // Tell tage to march 1 stage 599 tage.io.bim <> s1.io.out.resp.bim // Use bim results from s1 600 601 //**********************Stage 3****************************// 602 // Wrap tage response and meta into s3.io.in.bits 603 // This is ugly 604 605 loop.io.flush := io.flush(2) 606 loop.io.pc.valid := s3_fire 607 loop.io.pc.bits := s3.io.in.pc 608 loop.io.inMask := s3.io.in.mask 609 loop.io.outFire := s4_fire 610 loop.io.respIn.taken := s3.io.pred.taken 611 loop.io.respIn.jmpIdx := s3.io.pred.jmpIdx 612 613 614 s3.io.in.resp.tage <> tage.io.resp 615 s3.io.in.resp.loop <> loop.io.resp 616 for (i <- 0 until PredictWidth) { 617 s3.io.in.brInfo(i).tageMeta := tage.io.meta(i) 618 s3.io.in.brInfo(i).specCnt := loop.io.meta.specCnts(i) 619 } 620 621 if (BPUDebug) { 622 if (debug_verbose) { 623 val uo = ubtb.io.out 624 XSDebug("debug: ubtb hits:%b, takens:%b, notTakens:%b\n", uo.hits.asUInt, uo.takens.asUInt, ~uo.takens.asUInt & uo.brMask.asUInt) 625 val bio = bim.io.resp 626 XSDebug("debug: bim takens:%b\n", VecInit(bio.ctrs.map(_(1))).asUInt) 627 val bo = btb.io.resp 628 XSDebug("debug: btb hits:%b\n", bo.hits.asUInt) 629 } 630 } 631 632 633 634 if (EnableCFICommitLog) { 635 val buValid = io.inOrderBrInfo.valid 636 val buinfo = io.inOrderBrInfo.bits.ui 637 val pd = buinfo.pd 638 val tage_cycle = buinfo.brInfo.debug_tage_cycle 639 XSDebug(buValid, p"cfi_update: isBr(${pd.isBr}) pc(${Hexadecimal(buinfo.pc)}) taken(${buinfo.taken}) mispred(${buinfo.isMisPred}) cycle($tage_cycle) hist(${Hexadecimal(io.inOrderBrInfo.bits.hist)})\n") 640 } 641 642} 643 644object BPU{ 645 def apply(enableBPU: Boolean = true) = { 646 if(enableBPU) { 647 val BPU = Module(new BPU) 648 BPU 649 } 650 else { 651 val FakeBPU = Module(new FakeBPU) 652 FakeBPU 653 } 654 } 655} 656