1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.ALUOpType 8import xiangshan.backend.JumpOpType 9import chisel3.experimental.chiselName 10 11trait HasBPUParameter extends HasXSParameter { 12 val BPUDebug = true 13 val EnableCFICommitLog = true 14 val EnbaleCFIPredLog = true 15 val EnableBPUTimeRecord = EnableCFICommitLog || EnbaleCFIPredLog 16} 17 18class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle with HasIFUConst { 19 def tagBits = VAddrBits - idxBits - instOffsetBits 20 21 val tag = UInt(tagBits.W) 22 val idx = UInt(idxBits.W) 23 val offset = UInt(instOffsetBits.W) 24 25 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 26 def getTag(x: UInt) = fromUInt(x).tag 27 def getIdx(x: UInt) = fromUInt(x).idx 28 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 29 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 30} 31 32class PredictorResponse extends XSBundle { 33 class UbtbResp extends XSBundle { 34 // the valid bits indicates whether a target is hit 35 val targets = Vec(PredictWidth, UInt(VAddrBits.W)) 36 val hits = Vec(PredictWidth, Bool()) 37 val takens = Vec(PredictWidth, Bool()) 38 val brMask = Vec(PredictWidth, Bool()) 39 val is_RVC = Vec(PredictWidth, Bool()) 40 } 41 class BtbResp extends XSBundle { 42 // the valid bits indicates whether a target is hit 43 val targets = Vec(PredictWidth, UInt(VAddrBits.W)) 44 val hits = Vec(PredictWidth, Bool()) 45 val types = Vec(PredictWidth, UInt(2.W)) 46 val isRVC = Vec(PredictWidth, Bool()) 47 } 48 class BimResp extends XSBundle { 49 val ctrs = Vec(PredictWidth, UInt(2.W)) 50 } 51 class TageResp extends XSBundle { 52 // the valid bits indicates whether a prediction is hit 53 val takens = Vec(PredictWidth, Bool()) 54 val hits = Vec(PredictWidth, Bool()) 55 } 56 class LoopResp extends XSBundle { 57 val exit = Vec(PredictWidth, Bool()) 58 } 59 60 val ubtb = new UbtbResp 61 val btb = new BtbResp 62 val bim = new BimResp 63 val tage = new TageResp 64 val loop = new LoopResp 65} 66 67trait PredictorUtils { 68 // circular shifting 69 def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = { 70 val res = Wire(UInt(len.W)) 71 val higher = source << shamt 72 val lower = source >> (len.U - shamt) 73 res := higher | lower 74 res 75 } 76 77 def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = { 78 val res = Wire(UInt(len.W)) 79 val higher = source << (len.U - shamt) 80 val lower = source >> shamt 81 res := higher | lower 82 res 83 } 84 85 // To be verified 86 def satUpdate(old: UInt, len: Int, taken: Bool): UInt = { 87 val oldSatTaken = old === ((1 << len)-1).U 88 val oldSatNotTaken = old === 0.U 89 Mux(oldSatTaken && taken, ((1 << len)-1).U, 90 Mux(oldSatNotTaken && !taken, 0.U, 91 Mux(taken, old + 1.U, old - 1.U))) 92 } 93 94 def signedSatUpdate(old: SInt, len: Int, taken: Bool): SInt = { 95 val oldSatTaken = old === ((1 << (len-1))-1).S 96 val oldSatNotTaken = old === (-(1 << (len-1))).S 97 Mux(oldSatTaken && taken, ((1 << (len-1))-1).S, 98 Mux(oldSatNotTaken && !taken, (-(1 << (len-1))).S, 99 Mux(taken, old + 1.S, old - 1.S))) 100 } 101} 102 103trait HasIFUFire { this: MultiIOModule => 104 val fires = IO(Input(Vec(4, Bool()))) 105 val s1_fire = fires(0) 106 val s2_fire = fires(1) 107 val s3_fire = fires(2) 108 val out_fire = fires(3) 109} 110abstract class BasePredictor extends XSModule 111 with HasBPUParameter with HasIFUConst with PredictorUtils 112 with HasIFUFire { 113 val metaLen = 0 114 115 // An implementation MUST extend the IO bundle with a response 116 // and the special input from other predictors, as well as 117 // the metas to store in BRQ 118 abstract class Resp extends XSBundle {} 119 abstract class FromOthers extends XSBundle {} 120 abstract class Meta extends XSBundle {} 121 122 class DefaultBasePredictorIO extends XSBundle { 123 val flush = Input(Bool()) 124 val pc = Flipped(ValidIO(UInt(VAddrBits.W))) 125 val hist = Input(UInt(HistoryLength.W)) 126 val inMask = Input(UInt(PredictWidth.W)) 127 val update = Flipped(ValidIO(new CfiUpdateInfo)) 128 } 129 130 val io = new DefaultBasePredictorIO 131 val debug = true 132} 133 134class BPUStageIO extends XSBundle { 135 val pc = UInt(VAddrBits.W) 136 val mask = UInt(PredictWidth.W) 137 val resp = new PredictorResponse 138 // val target = UInt(VAddrBits.W) 139 val brInfo = Vec(PredictWidth, new BpuMeta) 140 // val saveHalfRVI = Bool() 141} 142 143 144abstract class BPUStage extends XSModule with HasBPUParameter 145 with HasIFUConst with HasIFUFire { 146 class DefaultIO extends XSBundle { 147 val flush = Input(Bool()) 148 val in = Input(new BPUStageIO) 149 val inFire = Input(Bool()) 150 val pred = Output(new BranchPrediction) // to ifu 151 val out = Output(new BPUStageIO) // to the next stage 152 val outFire = Input(Bool()) 153 154 val debug_hist = Input(UInt((if (BPUDebug) (HistoryLength) else 0).W)) 155 // val debug_histPtr = Input(UInt((if (BPUDebug) (ExtHistoryLength) else 0).W)) 156 } 157 val io = IO(new DefaultIO) 158 159 def npc(pc: UInt, instCount: UInt) = pc + (instCount << instOffsetBits.U) 160 161 val inLatch = RegEnable(io.in, io.inFire) 162 163 // Each stage has its own logic to decide 164 // takens, notTakens and target 165 166 val takens = Wire(Vec(PredictWidth, Bool())) 167 // val notTakens = Wire(Vec(PredictWidth, Bool())) 168 val brMask = Wire(Vec(PredictWidth, Bool())) 169 val jalMask = Wire(Vec(PredictWidth, Bool())) 170 171 val targets = Wire(Vec(PredictWidth, UInt(VAddrBits.W))) 172 173 val hasHalfRVI = Wire(Bool()) 174 175 io.pred <> DontCare 176 io.pred.takens := takens.asUInt 177 io.pred.brMask := brMask.asUInt 178 io.pred.jalMask := jalMask.asUInt 179 io.pred.targets := targets 180 io.pred.hasHalfRVI := hasHalfRVI 181 182 io.out <> DontCare 183 io.out.pc := inLatch.pc 184 io.out.mask := inLatch.mask 185 io.out.resp <> inLatch.resp 186 io.out.brInfo := inLatch.brInfo 187 (0 until PredictWidth).map(i => io.out.brInfo(i).sawNotTakenBranch := io.pred.sawNotTakenBr(i)) 188 189 if (BPUDebug) { 190 val jmpIdx = io.pred.jmpIdx 191 val taken = io.pred.taken 192 val target = Mux(taken, io.pred.targets(jmpIdx), snpc(inLatch.pc)) 193 XSDebug("in(%d): pc=%x, mask=%b\n", io.inFire, io.in.pc, io.in.mask) 194 XSDebug("inLatch: pc=%x, mask=%b\n", inLatch.pc, inLatch.mask) 195 XSDebug("out(%d): pc=%x, mask=%b, taken=%d, jmpIdx=%d, target=%x, hasHalfRVI=%d\n", 196 io.outFire, io.out.pc, io.out.mask, taken, jmpIdx, target, hasHalfRVI) 197 XSDebug("flush=%d\n", io.flush) 198 val p = io.pred 199 } 200} 201 202@chiselName 203class BPUStage1 extends BPUStage { 204 205 // ubtb is accessed with inLatch pc in s1, 206 // so we use io.in instead of inLatch 207 val ubtbResp = io.in.resp.ubtb 208 // the read operation is already masked, so we do not need to mask here 209 takens := VecInit((0 until PredictWidth).map(i => ubtbResp.takens(i))) 210 // notTakens := VecInit((0 until PredictWidth).map(i => ubtbResp.hits(i) && !ubtbResp.takens(i) && ubtbResp.brMask(i))) 211 brMask := ubtbResp.brMask 212 jalMask := DontCare 213 targets := ubtbResp.targets 214 215 hasHalfRVI := ubtbResp.hits(PredictWidth-1) && !ubtbResp.is_RVC(PredictWidth-1) && HasCExtension.B 216 217 // resp and brInfo are from the components, 218 // so it does not need to be latched 219 io.out.resp <> io.in.resp 220 io.out.brInfo := io.in.brInfo 221 222 if (BPUDebug) { 223 XSDebug(io.outFire, "outPred using ubtb resp: hits:%b, takens:%b, notTakens:%b, isRVC:%b\n", 224 ubtbResp.hits.asUInt, ubtbResp.takens.asUInt, ~ubtbResp.takens.asUInt & brMask.asUInt, ubtbResp.is_RVC.asUInt) 225 } 226 if (EnableBPUTimeRecord) { 227 io.out.brInfo.map(_.debug_ubtb_cycle := GTimer()) 228 } 229} 230@chiselName 231class BPUStage2 extends BPUStage { 232 // Use latched response from s1 233 val btbResp = inLatch.resp.btb 234 val bimResp = inLatch.resp.bim 235 takens := VecInit((0 until PredictWidth).map(i => btbResp.hits(i) && (btbResp.types(i) === BTBtype.B && bimResp.ctrs(i)(1) || btbResp.types(i) =/= BTBtype.B))) 236 targets := btbResp.targets 237 brMask := VecInit((0 until PredictWidth).map(i => btbResp.types(i) === BTBtype.B && btbResp.hits(i))) 238 jalMask := DontCare 239 240 hasHalfRVI := btbResp.hits(PredictWidth-1) && !btbResp.isRVC(PredictWidth-1) && HasCExtension.B 241 242 if (BPUDebug) { 243 XSDebug(io.outFire, "outPred using btb&bim resp: hits:%b, ctrTakens:%b\n", 244 btbResp.hits.asUInt, VecInit(bimResp.ctrs.map(_(1))).asUInt) 245 } 246 if (EnableBPUTimeRecord) { 247 io.out.brInfo.map(_.debug_btb_cycle := GTimer()) 248 } 249} 250@chiselName 251class BPUStage3 extends BPUStage { 252 class S3IO extends XSBundle { 253 254 val predecode = Input(new Predecode) 255 val realMask = Input(UInt(PredictWidth.W)) 256 val prevHalf = Flipped(ValidIO(new PrevHalfInstr)) 257 val recover = Flipped(ValidIO(new CfiUpdateInfo)) 258 } 259 val s3IO = IO(new S3IO) 260 // TAGE has its own pipelines and the 261 // response comes directly from s3, 262 // so we do not use those from inLatch 263 val tageResp = io.in.resp.tage 264 val tageTakens = tageResp.takens 265 266 val loopResp = io.in.resp.loop.exit 267 268 // realMask is in it 269 val pdMask = s3IO.predecode.mask 270 val pdLastHalf = s3IO.predecode.lastHalf 271 val pds = s3IO.predecode.pd 272 273 val btbResp = WireInit(inLatch.resp.btb) 274 val btbHits = WireInit(btbResp.hits.asUInt) 275 val bimTakens = VecInit(inLatch.resp.bim.ctrs.map(_(1))) 276 277 val brs = pdMask & Reverse(Cat(pds.map(_.isBr))) 278 val jals = pdMask & Reverse(Cat(pds.map(_.isJal))) 279 val jalrs = pdMask & Reverse(Cat(pds.map(_.isJalr))) 280 val calls = pdMask & Reverse(Cat(pds.map(_.isCall))) 281 val rets = pdMask & Reverse(Cat(pds.map(_.isRet))) 282 val RVCs = pdMask & Reverse(Cat(pds.map(_.isRVC))) 283 284 val callIdx = PriorityEncoder(calls) 285 val retIdx = PriorityEncoder(rets) 286 287 val brPred = (if(EnableBPD) tageTakens else bimTakens).asUInt 288 val loopRes = (if (EnableLoop) loopResp else VecInit(Fill(PredictWidth, 0.U(1.W)))).asUInt 289 val prevHalfTaken = s3IO.prevHalf.valid && s3IO.prevHalf.bits.taken && HasCExtension.B 290 val prevHalfTakenMask = prevHalfTaken.asUInt 291 val brTakens = ((brs & brPred | prevHalfTakenMask) & ~loopRes) 292 // VecInit((0 until PredictWidth).map(i => brs(i) && (brPred(i) || (if (i == 0) prevHalfTaken else false.B)) && !loopRes(i))) 293 // we should provide btb resp as well 294 btbHits := btbResp.hits.asUInt | prevHalfTakenMask 295 296 // predict taken only if btb has a target, jal targets will be provided by IFU 297 takens := VecInit((0 until PredictWidth).map(i => (brTakens(i) || jalrs(i)) && btbHits(i) || jals(i))) 298 299 300 targets := inLatch.resp.btb.targets 301 302 brMask := WireInit(brs.asTypeOf(Vec(PredictWidth, Bool()))) 303 jalMask := WireInit(jals.asTypeOf(Vec(PredictWidth, Bool()))) 304 305 hasHalfRVI := pdLastHalf && HasCExtension.B 306 307 //RAS 308 if(EnableRAS){ 309 val ras = Module(new RAS) 310 ras.io <> DontCare 311 ras.io.pc.bits := packetAligned(inLatch.pc) 312 ras.io.pc.valid := io.outFire//predValid 313 ras.io.is_ret := rets.orR && (retIdx === io.pred.jmpIdx) 314 ras.io.callIdx.valid := calls.orR && (callIdx === io.pred.jmpIdx) 315 ras.io.callIdx.bits := callIdx 316 ras.io.isRVC := (calls & RVCs).orR //TODO: this is ugly 317 ras.io.isLastHalfRVI := s3IO.predecode.hasLastHalfRVI 318 ras.io.recover := s3IO.recover 319 ras.fires <> fires 320 321 for(i <- 0 until PredictWidth){ 322 io.out.brInfo(i).rasSp := ras.io.meta.rasSp 323 io.out.brInfo(i).rasTopCtr := ras.io.meta.rasTopCtr 324 io.out.brInfo(i).rasToqAddr := ras.io.meta.rasToqAddr 325 } 326 takens := VecInit((0 until PredictWidth).map(i => { 327 ((brTakens(i) || jalrs(i)) && btbHits(i)) || 328 jals(i) || 329 (ras.io.out.valid && rets(i)) || 330 (!ras.io.out.valid && rets(i) && btbHits(i)) 331 } 332 )) 333 334 for (i <- 0 until PredictWidth) { 335 when(rets(i) && ras.io.out.valid){ 336 targets(i) := ras.io.out.bits.target 337 } 338 } 339 } 340 341 342 // we should provide the prediction for the first half RVI of the end of a fetch packet 343 // branch taken information would be lost in the prediction of the next packet, 344 // so we preserve this information here 345 when (hasHalfRVI && btbResp.types(PredictWidth-1) === BTBtype.B && btbHits(PredictWidth-1) && HasCExtension.B) { 346 takens(PredictWidth-1) := brPred(PredictWidth-1) && !loopRes(PredictWidth-1) 347 } 348 349 // targets would be lost as well, since it is from btb 350 // unless it is a ret, which target is from ras 351 when (prevHalfTaken && !rets(0) && HasCExtension.B) { 352 targets(0) := s3IO.prevHalf.bits.target 353 } 354 355 // Wrap tage resp and tage meta in 356 // This is ugly 357 io.out.resp.tage <> io.in.resp.tage 358 io.out.resp.loop <> io.in.resp.loop 359 for (i <- 0 until PredictWidth) { 360 io.out.brInfo(i).tageMeta := io.in.brInfo(i).tageMeta 361 io.out.brInfo(i).specCnt := io.in.brInfo(i).specCnt 362 } 363 364 if (BPUDebug) { 365 XSDebug(io.inFire, "predecode: pc:%x, mask:%b\n", inLatch.pc, s3IO.predecode.mask) 366 for (i <- 0 until PredictWidth) { 367 val p = s3IO.predecode.pd(i) 368 XSDebug(io.inFire && s3IO.predecode.mask(i), "predecode(%d): brType:%d, br:%d, jal:%d, jalr:%d, call:%d, ret:%d, RVC:%d, excType:%d\n", 369 i.U, p.brType, p.isBr, p.isJal, p.isJalr, p.isCall, p.isRet, p.isRVC, p.excType) 370 } 371 XSDebug(p"brs:${Binary(brs)} jals:${Binary(jals)} jalrs:${Binary(jalrs)} calls:${Binary(calls)} rets:${Binary(rets)} rvcs:${Binary(RVCs)}\n") 372 XSDebug(p"callIdx:${callIdx} retIdx:${retIdx}\n") 373 XSDebug(p"brPred:${Binary(brPred)} loopRes:${Binary(loopRes)} prevHalfTaken:${prevHalfTaken} brTakens:${Binary(brTakens)}\n") 374 } 375 376 if (EnbaleCFIPredLog) { 377 val out = io.out 378 XSDebug(io.outFire, p"cfi_pred: fetchpc(${Hexadecimal(out.pc)}) mask(${out.mask}) brmask(${brMask.asUInt}) hist(${Hexadecimal(io.debug_hist)})\n") 379 } 380 381 if (EnableBPUTimeRecord) { 382 io.out.brInfo.map(_.debug_tage_cycle := GTimer()) 383 } 384} 385 386trait BranchPredictorComponents extends HasXSParameter { 387 val ubtb = Module(new MicroBTB) 388 val btb = Module(new BTB) 389 val bim = Module(new BIM) 390 val tage = (if(EnableBPD) { Module(new Tage) } 391 else { Module(new FakeTage) }) 392 val loop = Module(new LoopPredictor) 393 val preds = Seq(ubtb, btb, bim, tage, loop) 394 preds.map(_.io := DontCare) 395} 396 397class BPUReq extends XSBundle { 398 val pc = UInt(VAddrBits.W) 399 val hist = UInt(HistoryLength.W) 400 val inMask = UInt(PredictWidth.W) 401} 402 403abstract class BaseBPU extends XSModule with BranchPredictorComponents with HasBPUParameter{ 404 val io = IO(new Bundle() { 405 // from backend 406 val cfiUpdateInfo = Flipped(ValidIO(new CfiUpdateInfo)) 407 // val cfiUpdateInfo = Flipped(ValidIO(new CfiUpdateInfoWithHist)) 408 // from ifu, frontend redirect 409 val flush = Input(Vec(3, Bool())) 410 // from if1 411 val in = Input(new BPUReq) 412 val inFire = Input(Vec(4, Bool())) 413 // to if2/if3/if4 414 val out = Vec(3, Output(new BranchPrediction)) 415 // from if4 416 val predecode = Input(new Predecode) 417 val realMask = Input(UInt(PredictWidth.W)) 418 val prevHalf = Flipped(ValidIO(new PrevHalfInstr)) 419 // to if4, some bpu info used for updating 420 val bpuMeta = Output(Vec(PredictWidth, new BpuMeta)) 421 }) 422 423 def npc(pc: UInt, instCount: UInt) = pc + (instCount << 1.U) 424 425 preds.map(p => { 426 p.io.update <> io.cfiUpdateInfo 427 p.fires <> io.inFire 428 }) 429 430 val s1 = Module(new BPUStage1) 431 val s2 = Module(new BPUStage2) 432 val s3 = Module(new BPUStage3) 433 434 Seq(s1, s2, s3).foreach(s => s.fires <> io.inFire) 435 436 val s1_fire = io.inFire(0) 437 val s2_fire = io.inFire(1) 438 val s3_fire = io.inFire(2) 439 val s4_fire = io.inFire(3) 440 441 s1.io.flush := io.flush(0) 442 s2.io.flush := io.flush(1) 443 s3.io.flush := io.flush(2) 444 445 s1.io.in <> DontCare 446 s2.io.in <> s1.io.out 447 s3.io.in <> s2.io.out 448 449 s1.io.inFire := s1_fire 450 s2.io.inFire := s2_fire 451 s3.io.inFire := s3_fire 452 453 s1.io.outFire := s2_fire 454 s2.io.outFire := s3_fire 455 s3.io.outFire := s4_fire 456 457 io.out(0) <> s1.io.pred 458 io.out(1) <> s2.io.pred 459 io.out(2) <> s3.io.pred 460 461 io.bpuMeta := s3.io.out.brInfo 462 463 if (BPUDebug) { 464 XSDebug(io.inFire(3), "bpuMeta sent!\n") 465 for (i <- 0 until PredictWidth) { 466 val b = io.bpuMeta(i) 467 XSDebug(io.inFire(3), "brInfo(%d): ubtbWrWay:%d, ubtbHit:%d, btbWrWay:%d, btbHitJal:%d, bimCtr:%d, fetchIdx:%d\n", 468 i.U, b.ubtbWriteWay, b.ubtbHits, b.btbWriteWay, b.btbHitJal, b.bimCtr, b.fetchIdx) 469 val t = b.tageMeta 470 XSDebug(io.inFire(3), " tageMeta: pvder(%d):%d, altDiffers:%d, pvderU:%d, pvderCtr:%d, allocate(%d):%d\n", 471 t.provider.valid, t.provider.bits, t.altDiffers, t.providerU, t.providerCtr, t.allocate.valid, t.allocate.bits) 472 } 473 } 474 val debug_verbose = false 475} 476 477 478class FakeBPU extends BaseBPU { 479 io.out.foreach(i => { 480 // Provide not takens 481 i <> DontCare 482 i.takens := 0.U 483 }) 484 io.bpuMeta <> DontCare 485} 486@chiselName 487class BPU extends BaseBPU { 488 489 //**********************Stage 1****************************// 490 491 val s1_resp_in = Wire(new PredictorResponse) 492 val s1_brInfo_in = Wire(Vec(PredictWidth, new BpuMeta)) 493 494 s1_resp_in.tage := DontCare 495 s1_resp_in.loop := DontCare 496 s1_brInfo_in := DontCare 497 (0 until PredictWidth).foreach(i => s1_brInfo_in(i).fetchIdx := i.U) 498 499 val s1_inLatch = RegEnable(io.in, s1_fire) 500 ubtb.io.flush := io.flush(0) // TODO: fix this 501 ubtb.io.pc.valid := s2_fire 502 ubtb.io.pc.bits := s1_inLatch.pc 503 ubtb.io.inMask := s1_inLatch.inMask 504 505 506 507 // Wrap ubtb response into resp_in and brInfo_in 508 s1_resp_in.ubtb <> ubtb.io.out 509 for (i <- 0 until PredictWidth) { 510 s1_brInfo_in(i).ubtbWriteWay := ubtb.io.uBTBMeta.writeWay(i) 511 s1_brInfo_in(i).ubtbHits := ubtb.io.uBTBMeta.hits(i) 512 } 513 514 btb.io.flush := io.flush(0) // TODO: fix this 515 btb.io.pc.valid := s1_fire 516 btb.io.pc.bits := io.in.pc 517 btb.io.inMask := io.in.inMask 518 519 520 521 // Wrap btb response into resp_in and brInfo_in 522 s1_resp_in.btb <> btb.io.resp 523 for (i <- 0 until PredictWidth) { 524 s1_brInfo_in(i).btbWriteWay := btb.io.meta.writeWay(i) 525 s1_brInfo_in(i).btbHitJal := btb.io.meta.hitJal(i) 526 } 527 528 bim.io.flush := io.flush(0) // TODO: fix this 529 bim.io.pc.valid := s1_fire 530 bim.io.pc.bits := io.in.pc 531 bim.io.inMask := io.in.inMask 532 533 534 // Wrap bim response into resp_in and brInfo_in 535 s1_resp_in.bim <> bim.io.resp 536 for (i <- 0 until PredictWidth) { 537 s1_brInfo_in(i).bimCtr := bim.io.meta.ctrs(i) 538 } 539 540 541 s1.io.inFire := s1_fire 542 s1.io.in.pc := io.in.pc 543 s1.io.in.mask := io.in.inMask 544 s1.io.in.resp <> s1_resp_in 545 s1.io.in.brInfo <> s1_brInfo_in 546 547 val s1_hist = RegEnable(io.in.hist, enable=s1_fire) 548 val s2_hist = RegEnable(s1_hist, enable=s2_fire) 549 val s3_hist = RegEnable(s2_hist, enable=s3_fire) 550 551 s1.io.debug_hist := s1_hist 552 s2.io.debug_hist := s2_hist 553 s3.io.debug_hist := s3_hist 554 555 //**********************Stage 2****************************// 556 tage.io.flush := io.flush(1) // TODO: fix this 557 tage.io.pc.valid := s2_fire 558 tage.io.pc.bits := s2.io.in.pc // PC from s1 559 tage.io.hist := s1_hist // The inst is from s1 560 tage.io.inMask := s2.io.in.mask 561 // tage.io.s3Fire := s3_fire // Tell tage to march 1 stage 562 tage.io.bim <> s1.io.out.resp.bim // Use bim results from s1 563 564 //**********************Stage 3****************************// 565 // Wrap tage response and meta into s3.io.in.bits 566 // This is ugly 567 568 loop.io.flush := io.flush(2) 569 loop.io.pc.valid := s2_fire 570 loop.io.if3_fire := s3_fire 571 loop.io.pc.bits := s2.io.in.pc 572 loop.io.inMask := io.predecode.mask 573 // loop.io.outFire := s4_fire 574 loop.io.respIn.taken := s3.io.pred.taken 575 loop.io.respIn.jmpIdx := s3.io.pred.jmpIdx 576 577 578 s3.io.in.resp.tage <> tage.io.resp 579 s3.io.in.resp.loop <> loop.io.resp 580 for (i <- 0 until PredictWidth) { 581 s3.io.in.brInfo(i).tageMeta := tage.io.meta(i) 582 s3.io.in.brInfo(i).specCnt := loop.io.meta.specCnts(i) 583 } 584 585 s3.s3IO.predecode <> io.predecode 586 587 s3.s3IO.realMask := io.realMask 588 589 s3.s3IO.prevHalf := io.prevHalf 590 591 s3.s3IO.recover.valid <> io.cfiUpdateInfo.valid 592 s3.s3IO.recover.bits <> io.cfiUpdateInfo.bits 593 594 if (BPUDebug) { 595 if (debug_verbose) { 596 val uo = ubtb.io.out 597 XSDebug("debug: ubtb hits:%b, takens:%b, notTakens:%b\n", uo.hits.asUInt, uo.takens.asUInt, ~uo.takens.asUInt & uo.brMask.asUInt) 598 val bio = bim.io.resp 599 XSDebug("debug: bim takens:%b\n", VecInit(bio.ctrs.map(_(1))).asUInt) 600 val bo = btb.io.resp 601 XSDebug("debug: btb hits:%b\n", bo.hits.asUInt) 602 } 603 } 604 605 606 607 if (EnableCFICommitLog) { 608 val buValid = io.cfiUpdateInfo.valid && !io.cfiUpdateInfo.bits.isReplay 609 val buinfo = io.cfiUpdateInfo.bits 610 val pd = buinfo.pd 611 val tage_cycle = buinfo.bpuMeta.debug_tage_cycle 612 XSDebug(buValid, p"cfi_update: isBr(${pd.isBr}) pc(${Hexadecimal(buinfo.pc)}) taken(${buinfo.taken}) mispred(${buinfo.isMisPred}) cycle($tage_cycle) hist(${Hexadecimal(buinfo.bpuMeta.predHist.asUInt)})\n") 613 } 614 615} 616 617object BPU{ 618 def apply(enableBPU: Boolean = true) = { 619 if(enableBPU) { 620 val BPU = Module(new BPU) 621 BPU 622 } 623 else { 624 val FakeBPU = Module(new FakeBPU) 625 FakeBPU 626 } 627 } 628} 629