1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16package xiangshan.frontend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.chiselName 22import xiangshan._ 23import xiangshan.frontend.icache.HasICacheParameters 24import utils._ 25import scala.math._ 26 27@chiselName 28class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters { 29 val startAddr = UInt(VAddrBits.W) 30 val nextlineStart = UInt(VAddrBits.W) 31 val ftqIdx = new FtqPtr 32 val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 33 val nextStartAddr = UInt(VAddrBits.W) 34 35 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 36 37 def fromFtqPcBundle(b: Ftq_RF_Components) = { 38 this.startAddr := b.startAddr 39 this.nextlineStart := b.nextLineAddr 40 when (b.fallThruError) { 41 val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.startAddr, b.nextLineAddr) 42 val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 43 this.nextStartAddr := 44 Cat(nextBlockHigher, 45 startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W), 46 startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits), 47 0.U(instOffsetBits.W) 48 ) 49 } 50 this 51 } 52 override def toPrintable: Printable = { 53 p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" + 54 p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" + 55 p" offset: ${ftqOffset.bits}\n" 56 } 57} 58 59class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle { 60 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 61 val pd = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode 62 val ftqIdx = new FtqPtr 63 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 64 val misOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 65 val cfiOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 66 val target = UInt(VAddrBits.W) 67 val jalTarget = UInt(VAddrBits.W) 68 val instrRange = Vec(PredictWidth, Bool()) 69} 70 71// Ftq send req to Prefetch 72class PrefetchRequest(implicit p:Parameters) extends XSBundle { 73 val target = UInt(VAddrBits.W) 74} 75 76class FtqPrefechBundle(implicit p:Parameters) extends XSBundle { 77 val req = DecoupledIO(new PrefetchRequest) 78} 79 80class FetchToIBuffer(implicit p: Parameters) extends XSBundle { 81 val instrs = Vec(PredictWidth, UInt(32.W)) 82 val valid = UInt(PredictWidth.W) 83 val enqEnable = UInt(PredictWidth.W) 84 val pd = Vec(PredictWidth, new PreDecodeInfo) 85 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 86 val foldpc = Vec(PredictWidth, UInt(MemPredPCWidth.W)) 87 val ftqPtr = new FtqPtr 88 val ftqOffset = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 89 val ipf = Vec(PredictWidth, Bool()) 90 val acf = Vec(PredictWidth, Bool()) 91 val crossPageIPFFix = Vec(PredictWidth, Bool()) 92 val triggered = Vec(PredictWidth, new TriggerCf) 93} 94 95// class BitWiseUInt(val width: Int, val init: UInt) extends Module { 96// val io = IO(new Bundle { 97// val set 98// }) 99// } 100// Move from BPU 101abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst { 102 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory 103} 104 105class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory { 106 val predHist = UInt(HistoryLength.W) 107 108 def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = { 109 val g = Wire(new ShiftingGlobalHistory) 110 g.predHist := (hist << shift) | taken 111 g 112 } 113 114 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = { 115 require(br_valids.length == numBr) 116 require(real_taken_mask.length == numBr) 117 val last_valid_idx = PriorityMux( 118 br_valids.reverse :+ true.B, 119 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 120 ) 121 val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask) 122 val smaller = Mux(last_valid_idx < first_taken_idx, 123 last_valid_idx, 124 first_taken_idx 125 ) 126 val shift = smaller 127 val taken = real_taken_mask.reduce(_||_) 128 update(shift, taken, this.predHist) 129 } 130 131 // static read 132 def read(n: Int): Bool = predHist.asBools()(n) 133 134 final def === (that: ShiftingGlobalHistory): Bool = { 135 predHist === that.predHist 136 } 137 138 final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that) 139} 140 141// circular global history pointer 142class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr]( 143 p => p(XSCoreParamsKey).HistoryLength 144){ 145} 146 147object CGHPtr { 148 def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = { 149 val ptr = Wire(new CGHPtr) 150 ptr.flag := f 151 ptr.value := v 152 ptr 153 } 154 def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr = { 155 apply(!ptr.flag, ptr.value) 156 } 157} 158 159class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory { 160 val buffer = Vec(HistoryLength, Bool()) 161 type HistPtr = UInt 162 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = { 163 this 164 } 165} 166 167class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters) 168 extends XSBundle with HasBPUConst { 169 require(compLen >= 1) 170 require(len > 0) 171 // require(folded_len <= len) 172 require(compLen >= max_update_num) 173 val folded_hist = UInt(compLen.W) 174 175 def need_oldest_bits = len > compLen 176 def info = (len, compLen) 177 def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1) 178 def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen) 179 def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0) 180 def oldest_bit_start = oldest_bit_pos_in_folded.head 181 182 def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = { 183 // TODO: wrap inc for histPtr value 184 oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value)) 185 } 186 187 def circular_shift_left(src: UInt, shamt: Int) = { 188 val srcLen = src.getWidth 189 val src_doubled = Cat(src, src) 190 val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt) 191 shifted 192 } 193 194 // slow path, read bits from ghr 195 def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = { 196 val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr)) 197 update(oldest_bits, num, taken) 198 } 199 200 201 // fast path, use pre-read oldest bits 202 def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = { 203 // do xors for several bitsets at specified bits 204 def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = { 205 val res = Wire(Vec(len, Bool())) 206 // println(f"num bitsets: ${bitsets.length}") 207 // println(f"bitsets $bitsets") 208 val resArr = Array.fill(len)(List[Bool]()) 209 for (bs <- bitsets) { 210 for ((n, b) <- bs) { 211 resArr(n) = b :: resArr(n) 212 } 213 } 214 // println(f"${resArr.mkString}") 215 // println(f"histLen: ${this.len}, foldedLen: $folded_len") 216 for (i <- 0 until len) { 217 // println(f"bit[$i], ${resArr(i).mkString}") 218 if (resArr(i).length > 2) { 219 println(f"[warning] update logic of foldest history has two or more levels of xor gates! " + 220 f"histlen:${this.len}, compLen:$compLen, at bit $i") 221 } 222 if (resArr(i).length == 0) { 223 println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen") 224 } 225 res(i) := resArr(i).foldLeft(false.B)(_^_) 226 } 227 res.asUInt 228 } 229 230 val new_folded_hist = if (need_oldest_bits) { 231 val oldest_bits = ob 232 require(oldest_bits.length == max_update_num) 233 // mask off bits that do not update 234 val oldest_bits_masked = oldest_bits.zipWithIndex.map{ 235 case (ob, i) => ob && (i < num).B 236 } 237 // if a bit does not wrap around, it should not be xored when it exits 238 val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))) 239 240 // println(f"old bits pos ${oldest_bits_set.map(_._1)}") 241 242 // only the last bit could be 1, as we have at most one taken branch at a time 243 val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt 244 // if a bit does not wrap around, newest bits should not be xored onto it either 245 val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i))) 246 247 // println(f"new bits set ${newest_bits_set.map(_._1)}") 248 // 249 val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{ 250 case (fb, i) => fb && !(num >= (len-i)).B 251 }) 252 val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i))) 253 254 // do xor then shift 255 val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set)) 256 circular_shift_left(xored, num) 257 } else { 258 // histLen too short to wrap around 259 ((folded_hist << num) | taken)(compLen-1,0) 260 } 261 262 val fh = WireInit(this) 263 fh.folded_hist := new_folded_hist 264 fh 265 } 266} 267 268class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle { 269 val bits = Vec(max_update_num*2, Bool()) 270 // def info = (len, compLen) 271 def getRealOb(brNumOH: UInt): Vec[Bool] = { 272 val ob = Wire(Vec(max_update_num, Bool())) 273 for (i <- 0 until max_update_num) { 274 ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr+1)) 275 } 276 ob 277 } 278} 279 280class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 281 val afhob = MixedVec(gen.filter(t => t._1 > t._2).map{_._1} 282 .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates 283 require(gen.toSet.toList.equals(gen)) 284 def getObWithInfo(info: Tuple2[Int, Int]) = { 285 val selected = afhob.filter(_.len == info._1) 286 require(selected.length == 1) 287 selected(0) 288 } 289 def read(ghv: Vec[Bool], ptr: CGHPtr) = { 290 val hisLens = afhob.map(_.len) 291 val bitsToRead = hisLens.flatMap(l => (0 until numBr*2).map(i => l-i-1)).toSet // remove duplicates 292 val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr+(pos+1).U).value))) 293 for (ob <- afhob) { 294 for (i <- 0 until numBr*2) { 295 val pos = ob.len - i - 1 296 val bit_found = bitsWithInfo.filter(_._1 == pos).toList 297 require(bit_found.length == 1) 298 ob.bits(i) := bit_found(0)._2 299 } 300 } 301 } 302} 303 304class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 305 val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)}) 306 // println(gen.mkString) 307 require(gen.toSet.toList.equals(gen)) 308 def getHistWithInfo(info: Tuple2[Int, Int]) = { 309 val selected = hist.filter(_.info.equals(info)) 310 require(selected.length == 1) 311 selected(0) 312 } 313 def autoConnectFrom(that: AllFoldedHistories) = { 314 require(this.hist.length <= that.hist.length) 315 for (h <- this.hist) { 316 h := that.getHistWithInfo(h.info) 317 } 318 } 319 def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = { 320 val res = WireInit(this) 321 for (i <- 0 until this.hist.length) { 322 res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken) 323 } 324 res 325 } 326 def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = { 327 val res = WireInit(this) 328 for (i <- 0 until this.hist.length) { 329 val fh = this.hist(i) 330 if (fh.need_oldest_bits) { 331 val info = fh.info 332 val selectedAfhob = afhob.getObWithInfo(info) 333 val ob = selectedAfhob.getRealOb(lastBrNumOH) 334 res.hist(i) := this.hist(i).update(ob, shift, taken) 335 } else { 336 val dumb = Wire(Vec(numBr, Bool())) // not needed 337 dumb := DontCare 338 res.hist(i) := this.hist(i).update(dumb, shift, taken) 339 } 340 } 341 res 342 } 343 344 def display(cond: Bool) = { 345 for (h <- hist) { 346 XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n") 347 } 348 } 349} 350 351class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{ 352 def tagBits = VAddrBits - idxBits - instOffsetBits 353 354 val tag = UInt(tagBits.W) 355 val idx = UInt(idxBits.W) 356 val offset = UInt(instOffsetBits.W) 357 358 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 359 def getTag(x: UInt) = fromUInt(x).tag 360 def getIdx(x: UInt) = fromUInt(x).idx 361 def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U 362 def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x) 363} 364 365trait BasicPrediction extends HasXSParameter { 366 def cfiIndex: ValidUndirectioned[UInt] 367 def target(pc: UInt): UInt 368 def lastBrPosOH: Vec[Bool] 369 def brTaken: Bool 370 def shouldShiftVec: Vec[Bool] 371 def fallThruError: Bool 372} 373class MinimalBranchPrediction(implicit p: Parameters) extends NewMicroBTBEntry with BasicPrediction { 374 val valid = Bool() 375 def cfiIndex = { 376 val res = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 377 res.valid := taken && valid 378 res.bits := cfiOffset | Fill(res.bits.getWidth, !valid) 379 res 380 } 381 def target(pc: UInt) = nextAddr 382 def lastBrPosOH: Vec[Bool] = VecInit(brNumOH.asBools()) 383 def brTaken = takenOnBr 384 def shouldShiftVec: Vec[Bool] = VecInit((0 until numBr).map(i => lastBrPosOH.drop(i+1).reduce(_||_))) 385 def fallThruError: Bool = false.B // we do this check on the following stages 386 387 def fromMicroBTBEntry(valid: Bool, entry: NewMicroBTBEntry, pc: UInt) = { 388 this.valid := valid 389 this.nextAddr := Mux(valid, entry.nextAddr, pc + (FetchWidth*4).U) 390 this.cfiOffset := entry.cfiOffset | Fill(cfiOffset.getWidth, !valid) 391 this.taken := entry.taken && valid 392 this.takenOnBr := entry.takenOnBr && valid 393 this.brNumOH := Mux(valid, entry.brNumOH, 1.U(3.W)) 394 } 395} 396@chiselName 397class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction { 398 val br_taken_mask = Vec(numBr, Bool()) 399 400 val slot_valids = Vec(totalSlot, Bool()) 401 402 val targets = Vec(totalSlot, UInt(VAddrBits.W)) 403 val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors 404 val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W)) 405 val fallThroughAddr = UInt(VAddrBits.W) 406 val fallThroughErr = Bool() 407 408 val is_jal = Bool() 409 val is_jalr = Bool() 410 val is_call = Bool() 411 val is_ret = Bool() 412 val last_may_be_rvi_call = Bool() 413 val is_br_sharing = Bool() 414 415 // val call_is_rvc = Bool() 416 val hit = Bool() 417 418 def br_slot_valids = slot_valids.init 419 def tail_slot_valid = slot_valids.last 420 421 def br_valids = { 422 VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing)) 423 } 424 425 def taken_mask_on_slot = { 426 VecInit( 427 (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ ( 428 tail_slot_valid && ( 429 is_br_sharing && br_taken_mask.last || !is_br_sharing 430 ) 431 ) 432 ) 433 } 434 435 def real_slot_taken_mask(): Vec[Bool] = { 436 VecInit(taken_mask_on_slot.map(_ && hit)) 437 } 438 439 // len numBr 440 def real_br_taken_mask(): Vec[Bool] = { 441 VecInit( 442 taken_mask_on_slot.map(_ && hit).init :+ 443 (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit) 444 ) 445 } 446 447 // the vec indicating if ghr should shift on each branch 448 def shouldShiftVec = 449 VecInit(br_valids.zipWithIndex.map{ case (v, i) => 450 v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)}) 451 452 def lastBrPosOH = 453 VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry 454 (0 until numBr).map(i => 455 br_valids(i) && 456 !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it 457 (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it 458 hit 459 ) 460 ) 461 462 def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_) 463 464 def target(pc: UInt): UInt = { 465 val targetVec = targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U) 466 val tm = taken_mask_on_slot 467 val selVecOH = 468 tm.zipWithIndex.map{ case (t, i) => !tm.take(i).fold(false.B)(_||_) && t && hit} :+ 469 (!tm.asUInt.orR && hit) :+ !hit 470 Mux1H(selVecOH, targetVec) 471 } 472 473 def fallThruError: Bool = hit && fallThroughErr 474 475 def hit_taken_on_jmp = 476 !real_slot_taken_mask().init.reduce(_||_) && 477 real_slot_taken_mask().last && !is_br_sharing 478 def hit_taken_on_call = hit_taken_on_jmp && is_call 479 def hit_taken_on_ret = hit_taken_on_jmp && is_ret 480 def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr 481 482 def cfiIndex = { 483 val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 484 cfiIndex.valid := real_slot_taken_mask().asUInt.orR 485 // when no takens, set cfiIndex to PredictWidth-1 486 cfiIndex.bits := 487 ParallelPriorityMux(real_slot_taken_mask(), offsets) | 488 Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt) 489 cfiIndex 490 } 491 492 def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr) 493 494 def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = { 495 slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid 496 targets := entry.getTargetVec(pc) 497 jalr_target := targets.last 498 offsets := entry.getOffsetVec 499 is_jal := entry.tailSlot.valid && entry.isJal 500 is_jalr := entry.tailSlot.valid && entry.isJalr 501 is_call := entry.tailSlot.valid && entry.isCall 502 is_ret := entry.tailSlot.valid && entry.isRet 503 last_may_be_rvi_call := entry.last_may_be_rvi_call 504 is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing 505 506 val startLower = Cat(0.U(1.W), pc(instOffsetBits+log2Ceil(PredictWidth)-1, instOffsetBits)) 507 val endLowerwithCarry = Cat(entry.carry, entry.pftAddr) 508 fallThroughErr := startLower >= endLowerwithCarry 509 fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc)) 510 } 511 512 def display(cond: Bool): Unit = { 513 XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n") 514 } 515} 516 517@chiselName 518class BranchPredictionBundle(implicit p: Parameters) extends XSBundle 519 with HasBPUConst with BPUUtils { 520 // def full_pred_info[T <: Data](x: T) = if (is_minimal) None else Some(x) 521 val pc = UInt(VAddrBits.W) 522 523 val valid = Bool() 524 525 val hasRedirect = Bool() 526 val ftq_idx = new FtqPtr 527 // val hit = Bool() 528 val is_minimal = Bool() 529 val minimal_pred = new MinimalBranchPrediction 530 val full_pred = new FullBranchPrediction 531 532 533 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 534 val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos) 535 val lastBrNumOH = UInt((numBr+1).W) 536 val histPtr = new CGHPtr 537 val rasSp = UInt(log2Ceil(RasSize).W) 538 val rasTop = new RASEntry 539 // val specCnt = Vec(numBr, UInt(10.W)) 540 // val meta = UInt(MaxMetaLength.W) 541 542 val ftb_entry = new FTBEntry() 543 544 def target(pc: UInt) = Mux(is_minimal, minimal_pred.target(pc), full_pred.target(pc)) 545 def cfiIndex = Mux(is_minimal, minimal_pred.cfiIndex, full_pred.cfiIndex) 546 def lastBrPosOH = Mux(is_minimal, minimal_pred.lastBrPosOH, full_pred.lastBrPosOH) 547 def brTaken = Mux(is_minimal, minimal_pred.brTaken, full_pred.brTaken) 548 def shouldShiftVec = Mux(is_minimal, minimal_pred.shouldShiftVec, full_pred.shouldShiftVec) 549 def fallThruError = Mux(is_minimal, minimal_pred.fallThruError, full_pred.fallThruError) 550 551 def getTarget = target(pc) 552 def taken = cfiIndex.valid 553 554 def display(cond: Bool): Unit = { 555 XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n") 556 folded_hist.display(cond) 557 full_pred.display(cond) 558 ftb_entry.display(cond) 559 } 560} 561 562@chiselName 563class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst { 564 // val valids = Vec(3, Bool()) 565 val s1 = new BranchPredictionBundle 566 val s2 = new BranchPredictionBundle 567 val s3 = new BranchPredictionBundle 568 569 def selectedResp ={ 570 val res = 571 PriorityMux(Seq( 572 ((s3.valid && s3.hasRedirect) -> s3), 573 ((s2.valid && s2.hasRedirect) -> s2), 574 (s1.valid -> s1) 575 )) 576 // println("is minimal: ", res.is_minimal) 577 res 578 } 579 def selectedRespIdx = 580 PriorityMux(Seq( 581 ((s3.valid && s3.hasRedirect) -> BP_S3), 582 ((s2.valid && s2.hasRedirect) -> BP_S2), 583 (s1.valid -> BP_S1) 584 )) 585 def lastStage = s3 586} 587 588class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp with HasBPUConst { 589 val meta = UInt(MaxMetaLength.W) 590} 591 592object BpuToFtqBundle { 593 def apply(resp: BranchPredictionResp)(implicit p: Parameters): BpuToFtqBundle = { 594 val e = Wire(new BpuToFtqBundle()) 595 e.s1 := resp.s1 596 e.s2 := resp.s2 597 e.s3 := resp.s3 598 599 e.meta := DontCare 600 e 601 } 602} 603 604class BranchPredictionUpdate(implicit p: Parameters) extends BranchPredictionBundle with HasBPUConst { 605 val mispred_mask = Vec(numBr+1, Bool()) 606 val pred_hit = Bool() 607 val false_hit = Bool() 608 val new_br_insert_pos = Vec(numBr, Bool()) 609 val old_entry = Bool() 610 val meta = UInt(MaxMetaLength.W) 611 val full_target = UInt(VAddrBits.W) 612 val from_stage = UInt(2.W) 613 val ghist = UInt(HistoryLength.W) 614 615 def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = { 616 folded_hist := entry.folded_hist 617 afhob := entry.afhob 618 lastBrNumOH := entry.lastBrNumOH 619 histPtr := entry.histPtr 620 rasSp := entry.rasSp 621 rasTop := entry.rasEntry 622 this 623 } 624 625 override def display(cond: Bool) = { 626 XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n") 627 XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n") 628 XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n") 629 super.display(cond) 630 XSDebug(cond, p"--------------------------------------------\n") 631 } 632} 633 634class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst { 635 // override def toPrintable: Printable = { 636 // p"-----------BranchPredictionRedirect----------- " + 637 // p"-----------cfiUpdate----------- " + 638 // p"[pc] ${Hexadecimal(cfiUpdate.pc)} " + 639 // p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " + 640 // p"[target] ${Hexadecimal(cfiUpdate.target)} " + 641 // p"------------------------------- " + 642 // p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " + 643 // p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " + 644 // p"[ftqOffset] ${ftqOffset} " + 645 // p"[level] ${level}, [interrupt] ${interrupt} " + 646 // p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " + 647 // p"[stFtqOffset] ${stFtqOffset} " + 648 // p"\n" 649 650 // } 651 652 def display(cond: Bool): Unit = { 653 XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n") 654 XSDebug(cond, p"-----------cfiUpdate----------- \n") 655 XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n") 656 // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n") 657 XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n") 658 XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n") 659 XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n") 660 XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n") 661 XSDebug(cond, p"------------------------------- \n") 662 XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n") 663 XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n") 664 XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n") 665 XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n") 666 XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n") 667 XSDebug(cond, p"---------------------------------------------- \n") 668 } 669} 670