1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.experimental.chiselName 22import chisel3.util._ 23import xiangshan._ 24import utils._ 25 26import scala.math.min 27 28trait HasBPUConst extends HasXSParameter { 29 val MaxMetaLength = 1024 // TODO: Reduce meta length 30 val MaxBasicBlockSize = 32 31 val LHistoryLength = 32 32 // val numBr = 2 33 val useBPD = true 34 val useLHist = true 35 val shareTailSlot = true 36 val numBrSlot = if (shareTailSlot) numBr-1 else numBr 37 val totalSlot = numBrSlot + 1 38 39 def BP_STAGES = (0 until 3).map(_.U(2.W)) 40 def BP_S1 = BP_STAGES(0) 41 def BP_S2 = BP_STAGES(1) 42 def BP_S3 = BP_STAGES(2) 43 val numBpStages = BP_STAGES.length 44 45 val debug = true 46 val resetVector = 0x10000000L//TODO: set reset vec 47 // TODO: Replace log2Up by log2Ceil 48} 49 50trait HasBPUParameter extends HasXSParameter with HasBPUConst { 51 val BPUDebug = true && !env.FPGAPlatform && env.EnablePerfDebug 52 val EnableCFICommitLog = true 53 val EnbaleCFIPredLog = true 54 val EnableBPUTimeRecord = (EnableCFICommitLog || EnbaleCFIPredLog) && !env.FPGAPlatform 55 val EnableCommit = false 56} 57 58class BPUCtrl(implicit p: Parameters) extends XSBundle { 59 val ubtb_enable = Bool() 60 val btb_enable = Bool() 61 val bim_enable = Bool() 62 val tage_enable = Bool() 63 val sc_enable = Bool() 64 val ras_enable = Bool() 65 val loop_enable = Bool() 66} 67 68trait BPUUtils extends HasXSParameter { 69 // circular shifting 70 def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = { 71 val res = Wire(UInt(len.W)) 72 val higher = source << shamt 73 val lower = source >> (len.U - shamt) 74 res := higher | lower 75 res 76 } 77 78 def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = { 79 val res = Wire(UInt(len.W)) 80 val higher = source << (len.U - shamt) 81 val lower = source >> shamt 82 res := higher | lower 83 res 84 } 85 86 // To be verified 87 def satUpdate(old: UInt, len: Int, taken: Bool): UInt = { 88 val oldSatTaken = old === ((1 << len)-1).U 89 val oldSatNotTaken = old === 0.U 90 Mux(oldSatTaken && taken, ((1 << len)-1).U, 91 Mux(oldSatNotTaken && !taken, 0.U, 92 Mux(taken, old + 1.U, old - 1.U))) 93 } 94 95 def signedSatUpdate(old: SInt, len: Int, taken: Bool): SInt = { 96 val oldSatTaken = old === ((1 << (len-1))-1).S 97 val oldSatNotTaken = old === (-(1 << (len-1))).S 98 Mux(oldSatTaken && taken, ((1 << (len-1))-1).S, 99 Mux(oldSatNotTaken && !taken, (-(1 << (len-1))).S, 100 Mux(taken, old + 1.S, old - 1.S))) 101 } 102 103 def getFallThroughAddr(start: UInt, carry: Bool, pft: UInt) = { 104 val higher = start.head(VAddrBits-log2Ceil(PredictWidth)-instOffsetBits-1) 105 Cat(Mux(carry, higher+1.U, higher), pft, 0.U(instOffsetBits.W)) 106 } 107 108 def foldTag(tag: UInt, l: Int): UInt = { 109 val nChunks = (tag.getWidth + l - 1) / l 110 val chunks = (0 until nChunks).map { i => 111 tag(min((i+1)*l, tag.getWidth)-1, i*l) 112 } 113 ParallelXOR(chunks) 114 } 115} 116 117// class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst { 118// val pc = UInt(VAddrBits.W) 119// val br_offset = Vec(num_br, UInt(log2Up(MaxBasicBlockSize).W)) 120// val br_mask = Vec(MaxBasicBlockSize, Bool()) 121// 122// val jmp_valid = Bool() 123// val jmp_type = UInt(3.W) 124// 125// val is_NextMask = Vec(FetchWidth*2, Bool()) 126// 127// val cfi_idx = Valid(UInt(log2Ceil(MaxBasicBlockSize).W)) 128// val cfi_mispredict = Bool() 129// val cfi_is_br = Bool() 130// val cfi_is_jal = Bool() 131// val cfi_is_jalr = Bool() 132// 133// val ghist = new ShiftingGlobalHistory() 134// 135// val target = UInt(VAddrBits.W) 136// 137// val meta = UInt(MaxMetaLength.W) 138// val spec_meta = UInt(MaxMetaLength.W) 139// 140// def taken = cfi_idx.valid 141// } 142 143class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 144 val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)}) 145 // println(gen.mkString) 146 require(gen.toSet.toList.equals(gen)) 147 def getHistWithInfo(info: Tuple2[Int, Int]) = { 148 val selected = hist.filter(_.info.equals(info)) 149 require(selected.length == 1) 150 selected(0) 151 } 152 def autoConnectFrom(that: AllFoldedHistories) = { 153 require(this.hist.length <= that.hist.length) 154 for (h <- this.hist) { 155 h := that.getHistWithInfo(h.info) 156 } 157 } 158 def update(ghr: Vec[Bool], ptr: CGHPtr, shift: UInt, taken: Bool): AllFoldedHistories = { 159 val res = WireInit(this) 160 for (i <- 0 until this.hist.length) { 161 res.hist(i) := this.hist(i).update(ghr, ptr, shift, taken) 162 } 163 res 164 } 165 def update(ghr: Vec[Bool], ptr: CGHPtr, br_valids: Vec[Bool], br_takens: Vec[Bool]): AllFoldedHistories = { 166 val last_valid_idx = PriorityMux( 167 br_valids.reverse :+ true.B, 168 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 169 ) 170 val first_taken_idx = PriorityEncoder(false.B +: br_takens) 171 val smaller = Mux(last_valid_idx < first_taken_idx, 172 last_valid_idx, 173 first_taken_idx 174 ) 175 val shift = smaller 176 val taken = br_takens.reduce(_||_) 177 update(ghr, ptr, shift, taken) 178 } 179 def update(ghr: Vec[Bool], ptr: CGHPtr, resp: BranchPredictionBundle): AllFoldedHistories = { 180 update(ghr, ptr, resp.preds.br_valids, resp.real_br_taken_mask) 181 } 182 def display(cond: Bool) = { 183 for (h <- hist) { 184 XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n") 185 } 186 } 187} 188 189class BasePredictorInput (implicit p: Parameters) extends XSBundle with HasBPUConst { 190 def nInputs = 1 191 192 val s0_pc = UInt(VAddrBits.W) 193 194 val ghist = UInt(HistoryLength.W) 195 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 196 val phist = UInt(PathHistoryLength.W) 197 198 val resp_in = Vec(nInputs, new BranchPredictionResp) 199 200 // val final_preds = Vec(numBpStages, new) 201 // val toFtq_fire = Bool() 202 203 // val s0_all_ready = Bool() 204} 205 206class BasePredictorOutput (implicit p: Parameters) extends XSBundle with HasBPUConst { 207 val s3_meta = UInt(MaxMetaLength.W) // This is use by composer 208 val resp = new BranchPredictionResp 209 210 // These store in meta, extract in composer 211 // val rasSp = UInt(log2Ceil(RasSize).W) 212 // val rasTop = new RASEntry 213 // val specCnt = Vec(PredictWidth, UInt(10.W)) 214} 215 216class BasePredictorIO (implicit p: Parameters) extends XSBundle with HasBPUConst { 217 val in = Flipped(DecoupledIO(new BasePredictorInput)) // TODO: Remove DecoupledIO 218 // val out = DecoupledIO(new BasePredictorOutput) 219 val out = Output(new BasePredictorOutput) 220 // val flush_out = Valid(UInt(VAddrBits.W)) 221 222 // val ctrl = Input(new BPUCtrl()) 223 224 val s0_fire = Input(Bool()) 225 val s1_fire = Input(Bool()) 226 val s2_fire = Input(Bool()) 227 val s3_fire = Input(Bool()) 228 229 val s1_ready = Output(Bool()) 230 val s2_ready = Output(Bool()) 231 val s3_ready = Output(Bool()) 232 233 val update = Flipped(Valid(new BranchPredictionUpdate)) 234 val redirect = Flipped(Valid(new BranchPredictionRedirect)) 235} 236 237abstract class BasePredictor(implicit p: Parameters) extends XSModule with HasBPUConst with BPUUtils { 238 val meta_size = 0 239 val spec_meta_size = 0 240 val io = IO(new BasePredictorIO()) 241 242 io.out.resp := io.in.bits.resp_in(0) 243 244 io.out.s3_meta := 0.U 245 246 io.in.ready := !io.redirect.valid 247 248 io.s1_ready := true.B 249 io.s2_ready := true.B 250 io.s3_ready := true.B 251 252 val s0_pc = WireInit(io.in.bits.s0_pc) // fetchIdx(io.f0_pc) 253 val s1_pc = RegEnable(s0_pc, resetVector.U, io.s0_fire) 254 val s2_pc = RegEnable(s1_pc, io.s1_fire) 255 val s3_pc = RegEnable(s2_pc, io.s2_fire) 256 257 258 def getFoldedHistoryInfo: Option[Set[FoldedHistoryInfo]] = None 259} 260 261class FakePredictor(implicit p: Parameters) extends BasePredictor { 262 io.in.ready := true.B 263 io.out.s3_meta := 0.U 264 io.out.resp := io.in.bits.resp_in(0) 265} 266 267class BpuToFtqIO(implicit p: Parameters) extends XSBundle { 268 val resp = DecoupledIO(new BpuToFtqBundle()) 269} 270 271class PredictorIO(implicit p: Parameters) extends XSBundle { 272 val bpu_to_ftq = new BpuToFtqIO() 273 val ftq_to_bpu = Flipped(new FtqToBpuIO()) 274} 275 276class FakeBPU(implicit p: Parameters) extends XSModule with HasBPUConst { 277 val io = IO(new PredictorIO) 278 279 val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready 280 281 val s0_pc = RegInit(resetVector.U) 282 283 when(toFtq_fire) { 284 s0_pc := s0_pc + (FetchWidth*4).U 285 } 286 287 when (io.ftq_to_bpu.redirect.valid) { 288 s0_pc := io.ftq_to_bpu.redirect.bits.cfiUpdate.target 289 } 290 291 io.bpu_to_ftq.resp.valid := !reset.asBool() && !io.ftq_to_bpu.redirect.valid 292 293 io.bpu_to_ftq.resp.bits := 0.U.asTypeOf(new BranchPredictionBundle) 294 io.bpu_to_ftq.resp.bits.s1.pc := s0_pc 295 io.bpu_to_ftq.resp.bits.s1.ftb_entry.pftAddr := s0_pc + (FetchWidth*4).U 296} 297 298@chiselName 299class Predictor(implicit p: Parameters) extends XSModule with HasBPUConst { 300 val io = IO(new PredictorIO) 301 302 val predictors = Module(if (useBPD) new Composer else new FakePredictor) 303 304 val folded_hist_infos = predictors.getFoldedHistoryInfo.getOrElse(Set()).toList 305 for ((len, compLen) <- folded_hist_infos) { 306 println(f"folded hist info: len $len, compLen $compLen") 307 } 308 309 val s0_fire, s1_fire, s2_fire, s3_fire = Wire(Bool()) 310 val s1_valid, s2_valid, s3_valid = RegInit(false.B) 311 val s1_ready, s2_ready, s3_ready = Wire(Bool()) 312 val s1_components_ready, s2_components_ready, s3_components_ready = Wire(Bool()) 313 314 val s0_pc = WireInit(resetVector.U) 315 val s0_pc_reg = RegNext(s0_pc, init=resetVector.U) 316 val s1_pc = RegEnable(s0_pc, s0_fire) 317 val s2_pc = RegEnable(s1_pc, s1_fire) 318 val s3_pc = RegEnable(s2_pc, s2_fire) 319 320 val s0_ghist = Wire(UInt(HistoryLength.W)) 321 // val s0_ghist_reg = RegNext(s0_ghist, init=0.U.asTypeOf(new ShiftingGlobalHistory)) 322 // val s1_ghist = RegEnable(s0_ghist, 0.U.asTypeOf(new ShiftingGlobalHistory), s0_fire) 323 // val s2_ghist = RegEnable(s1_ghist, 0.U.asTypeOf(new ShiftingGlobalHistory), s1_fire) 324 // val s3_ghist = RegEnable(s2_ghist, 0.U.asTypeOf(new ShiftingGlobalHistory), s2_fire) 325 326 val s0_folded_gh = Wire(new AllFoldedHistories(foldedGHistInfos)) 327 val s0_folded_gh_reg = RegNext(s0_folded_gh, init=0.U.asTypeOf(s0_folded_gh)) 328 val s1_folded_gh = RegEnable(s0_folded_gh, 0.U.asTypeOf(s0_folded_gh), s0_fire) 329 val s2_folded_gh = RegEnable(s1_folded_gh, 0.U.asTypeOf(s0_folded_gh), s1_fire) 330 val s3_folded_gh = RegEnable(s2_folded_gh, 0.U.asTypeOf(s0_folded_gh), s2_fire) 331 332 val ghr = RegInit(0.U.asTypeOf(Vec(HistoryLength, Bool()))) 333 val ghr_wire = WireInit(ghr) 334 335 336 def ghist_update(ptr: CGHPtr, shift: UInt, taken: Bool): Unit = { 337 for (i <- 0 until numBr) { 338 when (shift === (i+1).U) { 339 ghr(ptr.value - i.U) := taken 340 }.elsewhen(shift > (i+1).U) { 341 ghr(ptr.value - i.U) := false.B 342 } 343 } 344 } 345 def ghist_update(ptr: CGHPtr, br_valids: Vec[Bool], br_takens: Vec[Bool]): Unit = { 346 val last_valid_idx = PriorityMux( 347 br_valids.reverse :+ true.B, 348 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 349 ) 350 val first_taken_idx = PriorityEncoder(false.B +: br_takens) 351 val smaller = Mux(last_valid_idx < first_taken_idx, 352 last_valid_idx, 353 first_taken_idx 354 ) 355 val shift = smaller 356 val taken = br_takens.reduce(_||_) 357 ghist_update(ptr, shift, taken) 358 } 359 def ghist_update(ptr: CGHPtr, resp: BranchPredictionBundle): Unit = { 360 ghist_update(ptr, resp.preds.br_valids, resp.real_br_taken_mask) 361 } 362 def getHist(ptr: CGHPtr) = (Cat(ghr_wire.asUInt, ghr_wire.asUInt) >> (ptr.value+1.U))(HistoryLength-1, 0) 363 val s0_ghist_ptr = Wire(new CGHPtr) 364 s0_ghist := getHist(s0_ghist_ptr) 365 val s0_ghist_ptr_reg = RegNext(s0_ghist_ptr, init=0.U.asTypeOf(new CGHPtr)) 366 val s1_ghist_ptr = RegEnable(s0_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s0_fire) 367 val s2_ghist_ptr = RegEnable(s1_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s1_fire) 368 val s3_ghist_ptr = RegEnable(s2_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s2_fire) 369 370 val s0_last_pred = Wire(new BranchPredictionBundle) 371 val s0_last_pred_reg = RegNext(s0_last_pred, init=0.U.asTypeOf(new BranchPredictionBundle)) 372 val s1_last_pred = RegEnable(s0_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s0_fire) 373 val s2_last_pred = RegEnable(s1_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s1_fire) 374 val s3_last_pred = RegEnable(s2_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s2_fire) 375 376 val s0_phist = WireInit(0.U(PathHistoryLength.W)) 377 val s0_phist_reg = RegNext(s0_phist, init=0.U(PathHistoryLength.W)) 378 val s1_phist = RegEnable(s0_phist, 0.U, s0_fire) 379 val s2_phist = RegEnable(s1_phist, 0.U, s1_fire) 380 val s3_phist = RegEnable(s2_phist, 0.U, s2_fire) 381 382 val resp = predictors.io.out.resp 383 384 385 val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready 386 387 when(RegNext(reset.asBool) && !reset.asBool) { 388 // s0_ghist := 0.U.asTypeOf(new ShiftingGlobalHistory) 389 s0_folded_gh := 0.U.asTypeOf(s0_folded_gh) 390 s0_ghist_ptr := 0.U.asTypeOf(new CGHPtr) 391 s0_phist := 0.U 392 s0_pc := resetVector.U 393 s0_last_pred := 0.U.asTypeOf(new BranchPredictionBundle) 394 } 395 396 // when(toFtq_fire) { 397 // final_gh := s3_gh.update(io.bpu_to_ftq.resp.bits.ftb_entry.brValids.reduce(_||_) && !io.bpu_to_ftq.resp.bits.preds.taken, 398 // io.bpu_to_ftq.resp.bits.preds.taken) 399 // } 400 401 val s1_flush, s2_flush, s3_flush = Wire(Bool()) 402 val s2_redirect, s3_redirect = Wire(Bool()) 403 404 // val s1_bp_resp = predictors.io.out.resp.s1 405 // val s2_bp_resp = predictors.io.out.resp.s2 406 // val s3_bp_resp = predictors.io.out.resp.s3 407 408 // predictors.io := DontCare 409 predictors.io.in.valid := s0_fire 410 predictors.io.in.bits.s0_pc := s0_pc 411 predictors.io.in.bits.ghist := s0_ghist 412 predictors.io.in.bits.folded_hist := s0_folded_gh 413 predictors.io.in.bits.phist := s0_phist 414 predictors.io.in.bits.resp_in(0) := (0.U).asTypeOf(new BranchPredictionResp) 415 // predictors.io.in.bits.resp_in(0).s1.pc := s0_pc 416 // predictors.io.in.bits.toFtq_fire := toFtq_fire 417 418 // predictors.io.out.ready := io.bpu_to_ftq.resp.ready 419 420 // Pipeline logic 421 s2_redirect := false.B 422 s3_redirect := false.B 423 424 s3_flush := io.ftq_to_bpu.redirect.valid 425 s2_flush := s3_flush || s3_redirect 426 s1_flush := s2_flush || s2_redirect 427 428 s1_components_ready := predictors.io.s1_ready 429 s1_ready := s1_fire || !s1_valid 430 s0_fire := !reset.asBool && s1_components_ready && s1_ready 431 predictors.io.s0_fire := s0_fire 432 433 s2_components_ready := predictors.io.s2_ready 434 s2_ready := s2_fire || !s2_valid 435 s1_fire := s1_valid && s2_components_ready && s2_ready && io.bpu_to_ftq.resp.ready 436 437 when(s0_fire) { s1_valid := true.B } 438 .elsewhen(s1_flush) { s1_valid := false.B } 439 .elsewhen(s1_fire) { s1_valid := false.B } 440 441 predictors.io.s1_fire := s1_fire 442 443 s3_components_ready := predictors.io.s3_ready 444 s3_ready := s3_fire || !s3_valid 445 s2_fire := s2_valid && s3_components_ready && s3_ready 446 447 when(s2_flush) { s2_valid := false.B } 448 .elsewhen(s1_fire && !s1_flush) { s2_valid := true.B } 449 .elsewhen(s2_fire) { s2_valid := false.B } 450 451 predictors.io.s2_fire := s2_fire 452 453 // s3_fire := s3_valid && io.bpu_to_ftq.resp.ready 454 s3_fire := s3_valid 455 456 when(s3_flush) { s3_valid := false.B } 457 .elsewhen(s2_fire && !s2_flush) { s3_valid := true.B } 458 .elsewhen(s3_fire) { s3_valid := false.B } 459 460 predictors.io.s3_fire := s3_fire 461 462 io.bpu_to_ftq.resp.valid := 463 s1_valid && s2_components_ready && s2_ready || 464 s2_fire && s2_redirect || 465 s3_fire && s3_redirect 466 io.bpu_to_ftq.resp.bits := BpuToFtqBundle(predictors.io.out.resp) 467 io.bpu_to_ftq.resp.bits.meta := predictors.io.out.s3_meta 468 // io.bpu_to_ftq.resp.bits.s3.ghist := s3_ghist 469 io.bpu_to_ftq.resp.bits.s3.folded_hist := s3_folded_gh 470 io.bpu_to_ftq.resp.bits.s3.histPtr := s3_ghist_ptr 471 io.bpu_to_ftq.resp.bits.s3.phist := s3_phist 472 473 s0_pc := s0_pc_reg 474 // s0_ghist := s0_ghist_reg 475 s0_folded_gh := s0_folded_gh_reg 476 s0_ghist_ptr := s0_ghist_ptr_reg 477 s0_phist := s0_phist_reg 478 s0_last_pred := s0_last_pred_reg 479 480 // History manage 481 // s1 482 val s1_predicted_ghist_ptr = s1_ghist_ptr - resp.s1.br_count 483 val s1_predicted_ghist = WireInit(getHist(s1_predicted_ghist_ptr).asTypeOf(Vec(HistoryLength, Bool()))) 484 for (i <- 0 until numBr) { 485 when ((i+1).U <= resp.s1.br_count) { 486 s1_predicted_ghist(i) := resp.s1.real_br_taken_mask.reduce(_||_) && (i==0).B 487 } 488 } 489 val s1_predicted_fh = s1_folded_gh.update(ghr, s1_ghist_ptr, resp.s1) 490 491 492 // XSDebug(p"[hit] ${resp.s1.preds.hit} [s1_real_br_taken_mask] ${Binary(resp.s1.real_br_taken_mask.asUInt)}\n") 493 // XSDebug(p"s1_predicted_ghist=${Binary(s1_predicted_ghist.predHist)}\n") 494 495 when(s1_valid) { 496 s0_pc := resp.s1.target 497 s0_ghist := s1_predicted_ghist.asUInt 498 s0_folded_gh := s1_predicted_fh 499 ghist_update(s1_ghist_ptr, resp.s1) 500 s0_ghist_ptr := s1_predicted_ghist_ptr 501 s0_phist := (s1_phist << 1) | s1_pc(instOffsetBits) 502 s0_last_pred := resp.s1 503 } 504 505 def preds_needs_redirect(x: BranchPredictionBundle, y: BranchPredictionBundle) = { 506 x.real_slot_taken_mask().asUInt.orR =/= y.real_slot_taken_mask().asUInt().orR || 507 x.preds.br_valids.asUInt =/= y.preds.br_valids.asUInt || 508 PriorityEncoder(x.real_br_taken_mask()) =/= PriorityEncoder(y.real_br_taken_mask) 509 } 510 // s2 511 val s2_predicted_ghist_ptr = s2_ghist_ptr - resp.s2.br_count 512 val s2_predicted_ghist = WireInit(getHist(s2_predicted_ghist_ptr).asTypeOf(Vec(HistoryLength, Bool()))) 513 for (i <- 0 until numBr) { 514 when ((i+1).U <= resp.s2.br_count) { 515 s2_predicted_ghist(i) := resp.s2.real_br_taken_mask.reduce(_||_) && (i==0).B 516 } 517 } 518 val s2_predicted_fh = s2_folded_gh.update(ghr, s2_ghist_ptr, resp.s2) 519 val previous_s1_pred = RegEnable(resp.s1, init=0.U.asTypeOf(resp.s1), s1_fire) 520 521 val s2_redirect_s1_last_pred = preds_needs_redirect(s1_last_pred, resp.s2) 522 val s2_redirect_s0_last_pred = preds_needs_redirect(s0_last_pred_reg, resp.s2) 523 524 when(s2_fire) { 525 when((s1_valid && (s1_pc =/= resp.s2.target || s2_redirect_s1_last_pred)) || 526 !s1_valid && (s0_pc_reg =/= resp.s2.target || s2_redirect_s0_last_pred)) { 527 s0_ghist := s2_predicted_ghist.asUInt 528 s0_folded_gh := s2_predicted_fh 529 s0_ghist_ptr := s2_predicted_ghist_ptr 530 ghist_update(s2_ghist_ptr, resp.s2) 531 s2_redirect := true.B 532 s0_pc := resp.s2.target 533 s0_phist := (s2_phist << 1) | s2_pc(instOffsetBits) 534 s0_last_pred := resp.s2 535 // XSDebug(p"s1_valid=$s1_valid, s1_pc=${Hexadecimal(s1_pc)}, s2_resp_target=${Hexadecimal(resp.s2.target)}\n") 536 // XSDebug(p"s2_correct_s1_ghist=$s2_correct_s1_ghist\n") 537 // XSDebug(p"s1_ghist=${Binary(s1_ghist.predHist)}\n") 538 // XSDebug(p"s2_predicted_ghist=${Binary(s2_predicted_ghist.predHist)}\n") 539 } 540 } 541 542 val s2_redirect_target = s2_fire && s1_valid && s1_pc =/= resp.s2.target 543 val s2_saw_s1_hit = RegEnable(resp.s1.preds.hit, s1_fire) 544 val s2_redirect_target_both_hit = s2_redirect_target && s2_saw_s1_hit && resp.s2.preds.hit 545 546 XSPerfAccumulate("s2_redirect_because_s1_not_valid", s2_fire && !s1_valid) 547 XSPerfAccumulate("s2_redirect_because_target_diff", s2_fire && s1_valid && s1_pc =/= resp.s2.target) 548 XSPerfAccumulate("s2_redirect_target_diff_s1_nhit_s2_hit", s2_redirect_target && !s2_saw_s1_hit && resp.s2.preds.hit) 549 XSPerfAccumulate("s2_redirect_target_diff_s1_hit_s2_nhit", s2_redirect_target && s2_saw_s1_hit && !resp.s2.preds.hit) 550 XSPerfAccumulate("s2_redirect_target_diff_both_hit", s2_redirect_target && s2_saw_s1_hit && resp.s2.preds.hit) 551 XSPerfAccumulate("s2_redirect_br_direction_diff", 552 s2_redirect_target_both_hit && 553 RegEnable(PriorityEncoder(resp.s1.preds.br_taken_mask), s1_fire) =/= PriorityEncoder(resp.s2.preds.br_taken_mask)) 554 // XSPerfAccumulate("s2_redirect_because_ghist_diff", s2_fire && s1_valid && s2_correct_s1_ghist) 555 556 // s3 557 val s3_predicted_ghist_ptr = s3_ghist_ptr - resp.s3.br_count 558 val s3_predicted_ghist = WireInit(getHist(s3_predicted_ghist_ptr).asTypeOf(Vec(HistoryLength, Bool()))) 559 for (i <- 0 until numBr) { 560 when ((i+1).U <= resp.s3.br_count) { 561 s3_predicted_ghist(i) := resp.s3.real_br_taken_mask.reduce(_||_) && (i==0).B 562 } 563 } 564 val s3_predicted_fh = s3_folded_gh.update(ghr, s3_ghist_ptr, resp.s3) 565 val s3_redirect_s2_last_pred = preds_needs_redirect(s2_last_pred, resp.s3) 566 val s3_redirect_s1_last_pred = preds_needs_redirect(s1_last_pred, resp.s3) 567 val s3_redirect_s0_last_pred = preds_needs_redirect(s0_last_pred_reg, resp.s3) 568 569 when(s3_fire) { 570 when((s2_valid && (s2_pc =/= resp.s3.target || s3_redirect_s2_last_pred)) || 571 (!s2_valid && s1_valid && (s1_pc =/= resp.s3.target || s3_redirect_s1_last_pred)) || 572 (!s2_valid && !s1_valid && (s0_pc_reg =/= resp.s3.target || s3_redirect_s0_last_pred))) { 573 574 s0_ghist := s3_predicted_ghist.asUInt 575 s0_folded_gh := s3_predicted_fh 576 s0_ghist_ptr := s3_predicted_ghist_ptr 577 ghist_update(s3_ghist_ptr, resp.s3) 578 s3_redirect := true.B 579 s0_pc := resp.s3.target 580 s0_phist := (s3_phist << 1) | s3_pc(instOffsetBits) 581 s0_last_pred := resp.s3 582 } 583 } 584 585 // Send signal tell Ftq override 586 val s2_ftq_idx = RegEnable(io.ftq_to_bpu.enq_ptr, s1_fire) 587 val s3_ftq_idx = RegEnable(s2_ftq_idx, s2_fire) 588 589 io.bpu_to_ftq.resp.bits.s1.valid := s1_fire && !s1_flush 590 io.bpu_to_ftq.resp.bits.s1.hasRedirect := false.B 591 io.bpu_to_ftq.resp.bits.s1.ftq_idx := DontCare 592 io.bpu_to_ftq.resp.bits.s2.valid := s2_fire && !s2_flush 593 io.bpu_to_ftq.resp.bits.s2.hasRedirect := s2_redirect 594 io.bpu_to_ftq.resp.bits.s2.ftq_idx := s2_ftq_idx 595 io.bpu_to_ftq.resp.bits.s3.valid := s3_fire && !s3_flush 596 io.bpu_to_ftq.resp.bits.s3.hasRedirect := s3_redirect 597 io.bpu_to_ftq.resp.bits.s3.ftq_idx := s3_ftq_idx 598 599 val redirect = io.ftq_to_bpu.redirect.bits 600 601 predictors.io.update := io.ftq_to_bpu.update 602 predictors.io.update.bits.ghist.predHist := getHist(io.ftq_to_bpu.update.bits.histPtr) // TODO: remove this 603 predictors.io.redirect := io.ftq_to_bpu.redirect 604 605 when(io.ftq_to_bpu.redirect.valid) { 606 607 val shift = redirect.cfiUpdate.shift 608 val addIntoHist = redirect.cfiUpdate.addIntoHist 609 610 val isBr = redirect.cfiUpdate.pd.isBr 611 val taken = redirect.cfiUpdate.taken 612 613 val oldPtr = redirect.cfiUpdate.histPtr 614 val oldFh = redirect.cfiUpdate.folded_hist 615 val updated_ptr = oldPtr - shift 616 val updated_ghist = WireInit(getHist(updated_ptr).asTypeOf(Vec(HistoryLength, Bool()))) 617 val updated_fh = oldFh.update(ghr, oldPtr, shift, taken && addIntoHist) 618 for (i <- 0 until numBr) { 619 when (shift >= (i+1).U) { 620 updated_ghist(i) := taken && addIntoHist && (i==0).B 621 } 622 } 623 // val updatedGh = oldGh.update(shift, taken && addIntoHist) 624 s0_ghist := updated_ghist.asUInt // TODO: History fix logic 625 s0_folded_gh := updated_fh 626 ghist_update(oldPtr, shift, taken && addIntoHist) 627 s0_ghist_ptr := updated_ptr 628 s0_pc := redirect.cfiUpdate.target 629 val oldPh = redirect.cfiUpdate.phist 630 val phNewBit = redirect.cfiUpdate.phNewBit 631 s0_phist := (oldPh << 1) | phNewBit 632 633 // no need to assign s0_last_pred 634 635 XSDebug(io.ftq_to_bpu.redirect.valid, p"-------------redirect Repair------------\n") 636 // XSDebug(io.ftq_to_bpu.redirect.valid, p"taken_mask=${Binary(taken_mask.asUInt)}, brValids=${Binary(brValids.asUInt)}\n") 637 XSDebug(io.ftq_to_bpu.redirect.valid, p"isBr: ${isBr}, taken: ${taken}, addIntoHist: ${addIntoHist}, shift: ${shift}\n") 638 // XSDebug(io.ftq_to_bpu.redirect.valid, p"oldGh =${Binary(oldGh.predHist)}\n") 639 // XSDebug(io.ftq_to_bpu.redirect.valid, p"updateGh=${Binary(updatedGh.predHist)}\n") 640 641 } 642 643 XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n") 644 XSDebug(io.ftq_to_bpu.update.valid, p"Update from ftq\n") 645 XSDebug(io.ftq_to_bpu.redirect.valid, p"Redirect from ftq\n") 646 647 XSDebug("[BP0] fire=%d pc=%x\n", s0_fire, s0_pc) 648 XSDebug("[BP1] v=%d r=%d cr=%d fire=%d flush=%d pc=%x\n", 649 s1_valid, s1_ready, s1_components_ready, s1_fire, s1_flush, s1_pc) 650 XSDebug("[BP2] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n", 651 s2_valid, s2_ready, s2_components_ready, s2_fire, s2_redirect, s2_flush, s2_pc) 652 XSDebug("[BP3] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n", 653 s3_valid, s3_ready, s3_components_ready, s3_fire, s3_redirect, s3_flush, s3_pc) 654 XSDebug("[FTQ] ready=%d\n", io.bpu_to_ftq.resp.ready) 655 XSDebug("resp.s1.target=%x\n", resp.s1.target) 656 XSDebug("resp.s2.target=%x\n", resp.s2.target) 657 // XSDebug("s0_ghist: %b\n", s0_ghist.predHist) 658 // XSDebug("s1_ghist: %b\n", s1_ghist.predHist) 659 // XSDebug("s2_ghist: %b\n", s2_ghist.predHist) 660 // XSDebug("s3_ghist: %b\n", s3_ghist.predHist) 661 // XSDebug("s2_predicted_ghist: %b\n", s2_predicted_ghist.predHist) 662 // XSDebug("s3_predicted_ghist: %b\n", s3_predicted_ghist.predHist) 663 // XSDebug("s3_correct_s2_ghist: %b, s3_correct_s1_ghist: %b, s2_correct_s1_ghist: %b\n", 664 // s3_correct_s2_ghist, s3_correct_s1_ghist, s2_correct_s1_ghist) 665 XSDebug(p"s0_ghist_ptr: $s0_ghist_ptr\n") 666 XSDebug(p"s1_ghist_ptr: $s1_ghist_ptr\n") 667 XSDebug(p"s2_ghist_ptr: $s2_ghist_ptr\n") 668 XSDebug(p"s3_ghist_ptr: $s3_ghist_ptr\n") 669 670 io.ftq_to_bpu.update.bits.display(io.ftq_to_bpu.update.valid) 671 io.ftq_to_bpu.redirect.bits.display(io.ftq_to_bpu.redirect.valid) 672 673 674 XSPerfAccumulate("s2_redirect", s2_redirect) 675 XSPerfAccumulate("s3_redirect", s3_redirect) 676 677 val perfEvents = predictors.asInstanceOf[Composer].perfEvents.map(_._1).zip(predictors.asInstanceOf[Composer].perfinfo.perfEvents.perf_events) 678 val perfinfo = IO(new Bundle(){ 679 val perfEvents = Output(new PerfEventsBundle(predictors.asInstanceOf[Composer].perfinfo.perfEvents.perf_events.length)) 680 }) 681 perfinfo.perfEvents := predictors.asInstanceOf[Composer].perfinfo.perfEvents 682 683} 684