1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16package xiangshan.frontend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.chiselName 22import xiangshan._ 23import xiangshan.frontend.icache.HasICacheParameters 24import utils._ 25import scala.math._ 26 27@chiselName 28class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters { 29 val startAddr = UInt(VAddrBits.W) 30 val nextlineStart = UInt(VAddrBits.W) 31 val ftqIdx = new FtqPtr 32 val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 33 val nextStartAddr = UInt(VAddrBits.W) 34 35 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 36 37 def fromFtqPcBundle(b: Ftq_RF_Components) = { 38 this.startAddr := b.startAddr 39 this.nextlineStart := b.nextLineAddr 40 when (b.fallThruError) { 41 val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.startAddr, b.nextLineAddr) 42 val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 43 this.nextStartAddr := 44 Cat(nextBlockHigher, 45 startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W), 46 startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits), 47 0.U(instOffsetBits.W) 48 ) 49 } 50 this 51 } 52 override def toPrintable: Printable = { 53 p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" + 54 p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" + 55 p" offset: ${ftqOffset.bits}\n" 56 } 57} 58 59class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle { 60 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 61 val pd = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode 62 val ftqIdx = new FtqPtr 63 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 64 val misOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 65 val cfiOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 66 val target = UInt(VAddrBits.W) 67 val jalTarget = UInt(VAddrBits.W) 68 val instrRange = Vec(PredictWidth, Bool()) 69} 70 71// Ftq send req to Prefetch 72class PrefetchRequest(implicit p:Parameters) extends XSBundle { 73 val target = UInt(VAddrBits.W) 74} 75 76class FtqPrefechBundle(implicit p:Parameters) extends XSBundle { 77 val req = DecoupledIO(new PrefetchRequest) 78} 79 80class FetchToIBuffer(implicit p: Parameters) extends XSBundle { 81 val instrs = Vec(PredictWidth, UInt(32.W)) 82 val valid = UInt(PredictWidth.W) 83 val enqEnable = UInt(PredictWidth.W) 84 val pd = Vec(PredictWidth, new PreDecodeInfo) 85 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 86 val foldpc = Vec(PredictWidth, UInt(MemPredPCWidth.W)) 87 val ftqPtr = new FtqPtr 88 val ftqOffset = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 89 val ipf = Vec(PredictWidth, Bool()) 90 val acf = Vec(PredictWidth, Bool()) 91 val crossPageIPFFix = Vec(PredictWidth, Bool()) 92 val triggered = Vec(PredictWidth, new TriggerCf) 93} 94 95// class BitWiseUInt(val width: Int, val init: UInt) extends Module { 96// val io = IO(new Bundle { 97// val set 98// }) 99// } 100// Move from BPU 101abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst { 102 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory 103} 104 105class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory { 106 val predHist = UInt(HistoryLength.W) 107 108 def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = { 109 val g = Wire(new ShiftingGlobalHistory) 110 g.predHist := (hist << shift) | taken 111 g 112 } 113 114 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = { 115 require(br_valids.length == numBr) 116 require(real_taken_mask.length == numBr) 117 val last_valid_idx = PriorityMux( 118 br_valids.reverse :+ true.B, 119 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 120 ) 121 val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask) 122 val smaller = Mux(last_valid_idx < first_taken_idx, 123 last_valid_idx, 124 first_taken_idx 125 ) 126 val shift = smaller 127 val taken = real_taken_mask.reduce(_||_) 128 update(shift, taken, this.predHist) 129 } 130 131 // static read 132 def read(n: Int): Bool = predHist.asBools()(n) 133 134 final def === (that: ShiftingGlobalHistory): Bool = { 135 predHist === that.predHist 136 } 137 138 final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that) 139} 140 141// circular global history pointer 142class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr]( 143 p => p(XSCoreParamsKey).HistoryLength 144){ 145 override def cloneType = (new CGHPtr).asInstanceOf[this.type] 146} 147 148object CGHPtr { 149 def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = { 150 val ptr = Wire(new CGHPtr) 151 ptr.flag := f 152 ptr.value := v 153 ptr 154 } 155 def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr = { 156 apply(!ptr.flag, ptr.value) 157 } 158} 159 160class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory { 161 val buffer = Vec(HistoryLength, Bool()) 162 type HistPtr = UInt 163 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = { 164 this 165 } 166} 167 168class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters) 169 extends XSBundle with HasBPUConst { 170 require(compLen >= 1) 171 require(len > 0) 172 // require(folded_len <= len) 173 require(compLen >= max_update_num) 174 val folded_hist = UInt(compLen.W) 175 176 def need_oldest_bits = len > compLen 177 def info = (len, compLen) 178 def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1) 179 def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen) 180 def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0) 181 def oldest_bit_start = oldest_bit_pos_in_folded.head 182 183 def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = { 184 // TODO: wrap inc for histPtr value 185 oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value)) 186 } 187 188 def circular_shift_left(src: UInt, shamt: Int) = { 189 val srcLen = src.getWidth 190 val src_doubled = Cat(src, src) 191 val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt) 192 shifted 193 } 194 195 // slow path, read bits from ghr 196 def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = { 197 val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr)) 198 update(oldest_bits, num, taken) 199 } 200 201 202 // fast path, use pre-read oldest bits 203 def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = { 204 // do xors for several bitsets at specified bits 205 def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = { 206 val res = Wire(Vec(len, Bool())) 207 // println(f"num bitsets: ${bitsets.length}") 208 // println(f"bitsets $bitsets") 209 val resArr = Array.fill(len)(List[Bool]()) 210 for (bs <- bitsets) { 211 for ((n, b) <- bs) { 212 resArr(n) = b :: resArr(n) 213 } 214 } 215 // println(f"${resArr.mkString}") 216 // println(f"histLen: ${this.len}, foldedLen: $folded_len") 217 for (i <- 0 until len) { 218 // println(f"bit[$i], ${resArr(i).mkString}") 219 if (resArr(i).length > 2) { 220 println(f"[warning] update logic of foldest history has two or more levels of xor gates! " + 221 f"histlen:${this.len}, compLen:$compLen, at bit $i") 222 } 223 if (resArr(i).length == 0) { 224 println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen") 225 } 226 res(i) := resArr(i).foldLeft(false.B)(_^_) 227 } 228 res.asUInt 229 } 230 231 val new_folded_hist = if (need_oldest_bits) { 232 val oldest_bits = ob 233 require(oldest_bits.length == max_update_num) 234 // mask off bits that do not update 235 val oldest_bits_masked = oldest_bits.zipWithIndex.map{ 236 case (ob, i) => ob && (i < num).B 237 } 238 // if a bit does not wrap around, it should not be xored when it exits 239 val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))) 240 241 // println(f"old bits pos ${oldest_bits_set.map(_._1)}") 242 243 // only the last bit could be 1, as we have at most one taken branch at a time 244 val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt 245 // if a bit does not wrap around, newest bits should not be xored onto it either 246 val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i))) 247 248 // println(f"new bits set ${newest_bits_set.map(_._1)}") 249 // 250 val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{ 251 case (fb, i) => fb && !(num >= (len-i)).B 252 }) 253 val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i))) 254 255 // do xor then shift 256 val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set)) 257 circular_shift_left(xored, num) 258 } else { 259 // histLen too short to wrap around 260 ((folded_hist << num) | taken)(compLen-1,0) 261 } 262 263 val fh = WireInit(this) 264 fh.folded_hist := new_folded_hist 265 fh 266 } 267} 268 269class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle { 270 val bits = Vec(max_update_num*2, Bool()) 271 // def info = (len, compLen) 272 def getRealOb(brNumOH: UInt): Vec[Bool] = { 273 val ob = Wire(Vec(max_update_num, Bool())) 274 for (i <- 0 until max_update_num) { 275 ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr+1)) 276 } 277 ob 278 } 279} 280 281class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 282 val afhob = MixedVec(gen.filter(t => t._1 > t._2).map{_._1} 283 .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates 284 require(gen.toSet.toList.equals(gen)) 285 def getObWithInfo(info: Tuple2[Int, Int]) = { 286 val selected = afhob.filter(_.len == info._1) 287 require(selected.length == 1) 288 selected(0) 289 } 290 def read(ghv: Vec[Bool], ptr: CGHPtr) = { 291 val hisLens = afhob.map(_.len) 292 val bitsToRead = hisLens.flatMap(l => (0 until numBr*2).map(i => l-i-1)).toSet // remove duplicates 293 val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr+(pos+1).U).value))) 294 for (ob <- afhob) { 295 for (i <- 0 until numBr*2) { 296 val pos = ob.len - i - 1 297 val bit_found = bitsWithInfo.filter(_._1 == pos).toList 298 require(bit_found.length == 1) 299 ob.bits(i) := bit_found(0)._2 300 } 301 } 302 } 303} 304 305class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 306 val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)}) 307 // println(gen.mkString) 308 require(gen.toSet.toList.equals(gen)) 309 def getHistWithInfo(info: Tuple2[Int, Int]) = { 310 val selected = hist.filter(_.info.equals(info)) 311 require(selected.length == 1) 312 selected(0) 313 } 314 def autoConnectFrom(that: AllFoldedHistories) = { 315 require(this.hist.length <= that.hist.length) 316 for (h <- this.hist) { 317 h := that.getHistWithInfo(h.info) 318 } 319 } 320 def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = { 321 val res = WireInit(this) 322 for (i <- 0 until this.hist.length) { 323 res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken) 324 } 325 res 326 } 327 def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = { 328 val res = WireInit(this) 329 for (i <- 0 until this.hist.length) { 330 val fh = this.hist(i) 331 if (fh.need_oldest_bits) { 332 val info = fh.info 333 val selectedAfhob = afhob.getObWithInfo(info) 334 val ob = selectedAfhob.getRealOb(lastBrNumOH) 335 res.hist(i) := this.hist(i).update(ob, shift, taken) 336 } else { 337 val dumb = Wire(Vec(numBr, Bool())) // not needed 338 dumb := DontCare 339 res.hist(i) := this.hist(i).update(dumb, shift, taken) 340 } 341 } 342 res 343 } 344 345 def display(cond: Bool) = { 346 for (h <- hist) { 347 XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n") 348 } 349 } 350} 351 352class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{ 353 def tagBits = VAddrBits - idxBits - instOffsetBits 354 355 val tag = UInt(tagBits.W) 356 val idx = UInt(idxBits.W) 357 val offset = UInt(instOffsetBits.W) 358 359 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 360 def getTag(x: UInt) = fromUInt(x).tag 361 def getIdx(x: UInt) = fromUInt(x).idx 362 def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U 363 def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x) 364} 365 366trait BasicPrediction extends HasXSParameter { 367 def cfiIndex: ValidUndirectioned[UInt] 368 def target(pc: UInt): UInt 369 def lastBrPosOH: Vec[Bool] 370 def brTaken: Bool 371 def shouldShiftVec: Vec[Bool] 372 def fallThruError: Bool 373} 374class MinimalBranchPrediction(implicit p: Parameters) extends NewMicroBTBEntry with BasicPrediction { 375 val valid = Bool() 376 def cfiIndex = { 377 val res = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 378 res.valid := taken && valid 379 res.bits := cfiOffset | Fill(res.bits.getWidth, !valid) 380 res 381 } 382 def target(pc: UInt) = nextAddr 383 def lastBrPosOH: Vec[Bool] = VecInit(brNumOH.asBools()) 384 def brTaken = takenOnBr 385 def shouldShiftVec: Vec[Bool] = VecInit((0 until numBr).map(i => lastBrPosOH.drop(i+1).reduce(_||_))) 386 def fallThruError: Bool = false.B // we do this check on the following stages 387 388 def fromMicroBTBEntry(valid: Bool, entry: NewMicroBTBEntry, pc: UInt) = { 389 this.valid := valid 390 this.nextAddr := Mux(valid, entry.nextAddr, pc + (FetchWidth*4).U) 391 this.cfiOffset := entry.cfiOffset | Fill(cfiOffset.getWidth, !valid) 392 this.taken := entry.taken && valid 393 this.takenOnBr := entry.takenOnBr && valid 394 this.brNumOH := Mux(valid, entry.brNumOH, 1.U(3.W)) 395 } 396} 397@chiselName 398class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction { 399 val br_taken_mask = Vec(numBr, Bool()) 400 401 val slot_valids = Vec(totalSlot, Bool()) 402 403 val targets = Vec(totalSlot, UInt(VAddrBits.W)) 404 val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors 405 val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W)) 406 val fallThroughAddr = UInt(VAddrBits.W) 407 val fallThroughErr = Bool() 408 409 val is_jal = Bool() 410 val is_jalr = Bool() 411 val is_call = Bool() 412 val is_ret = Bool() 413 val last_may_be_rvi_call = Bool() 414 val is_br_sharing = Bool() 415 416 // val call_is_rvc = Bool() 417 val hit = Bool() 418 419 def br_slot_valids = slot_valids.init 420 def tail_slot_valid = slot_valids.last 421 422 def br_valids = { 423 VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing)) 424 } 425 426 def taken_mask_on_slot = { 427 VecInit( 428 (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ ( 429 tail_slot_valid && ( 430 is_br_sharing && br_taken_mask.last || !is_br_sharing 431 ) 432 ) 433 ) 434 } 435 436 def real_slot_taken_mask(): Vec[Bool] = { 437 VecInit(taken_mask_on_slot.map(_ && hit)) 438 } 439 440 // len numBr 441 def real_br_taken_mask(): Vec[Bool] = { 442 VecInit( 443 taken_mask_on_slot.map(_ && hit).init :+ 444 (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit) 445 ) 446 } 447 448 // the vec indicating if ghr should shift on each branch 449 def shouldShiftVec = 450 VecInit(br_valids.zipWithIndex.map{ case (v, i) => 451 v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)}) 452 453 def lastBrPosOH = 454 VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry 455 (0 until numBr).map(i => 456 br_valids(i) && 457 !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it 458 (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it 459 hit 460 ) 461 ) 462 463 def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_) 464 465 def target(pc: UInt): UInt = { 466 val targetVec = targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U) 467 val tm = taken_mask_on_slot 468 val selVecOH = 469 tm.zipWithIndex.map{ case (t, i) => !tm.take(i).fold(false.B)(_||_) && t && hit} :+ 470 (!tm.asUInt.orR && hit) :+ !hit 471 Mux1H(selVecOH, targetVec) 472 } 473 474 def fallThruError: Bool = hit && fallThroughErr 475 476 def hit_taken_on_jmp = 477 !real_slot_taken_mask().init.reduce(_||_) && 478 real_slot_taken_mask().last && !is_br_sharing 479 def hit_taken_on_call = hit_taken_on_jmp && is_call 480 def hit_taken_on_ret = hit_taken_on_jmp && is_ret 481 def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr 482 483 def cfiIndex = { 484 val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 485 cfiIndex.valid := real_slot_taken_mask().asUInt.orR 486 // when no takens, set cfiIndex to PredictWidth-1 487 cfiIndex.bits := 488 ParallelPriorityMux(real_slot_taken_mask(), offsets) | 489 Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt) 490 cfiIndex 491 } 492 493 def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr) 494 495 def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = { 496 slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid 497 targets := entry.getTargetVec(pc) 498 jalr_target := targets.last 499 offsets := entry.getOffsetVec 500 is_jal := entry.tailSlot.valid && entry.isJal 501 is_jalr := entry.tailSlot.valid && entry.isJalr 502 is_call := entry.tailSlot.valid && entry.isCall 503 is_ret := entry.tailSlot.valid && entry.isRet 504 last_may_be_rvi_call := entry.last_may_be_rvi_call 505 is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing 506 507 val startLower = Cat(0.U(1.W), pc(instOffsetBits+log2Ceil(PredictWidth)-1, instOffsetBits)) 508 val endLowerwithCarry = Cat(entry.carry, entry.pftAddr) 509 fallThroughErr := startLower >= endLowerwithCarry 510 fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc)) 511 } 512 513 def display(cond: Bool): Unit = { 514 XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n") 515 } 516} 517 518@chiselName 519class BranchPredictionBundle(implicit p: Parameters) extends XSBundle 520 with HasBPUConst with BPUUtils { 521 // def full_pred_info[T <: Data](x: T) = if (is_minimal) None else Some(x) 522 val pc = UInt(VAddrBits.W) 523 524 val valid = Bool() 525 526 val hasRedirect = Bool() 527 val ftq_idx = new FtqPtr 528 // val hit = Bool() 529 val is_minimal = Bool() 530 val minimal_pred = new MinimalBranchPrediction 531 val full_pred = new FullBranchPrediction 532 533 534 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 535 val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos) 536 val lastBrNumOH = UInt((numBr+1).W) 537 val histPtr = new CGHPtr 538 val rasSp = UInt(log2Ceil(RasSize).W) 539 val rasTop = new RASEntry 540 // val specCnt = Vec(numBr, UInt(10.W)) 541 // val meta = UInt(MaxMetaLength.W) 542 543 val ftb_entry = new FTBEntry() 544 545 def target(pc: UInt) = Mux(is_minimal, minimal_pred.target(pc), full_pred.target(pc)) 546 def cfiIndex = Mux(is_minimal, minimal_pred.cfiIndex, full_pred.cfiIndex) 547 def lastBrPosOH = Mux(is_minimal, minimal_pred.lastBrPosOH, full_pred.lastBrPosOH) 548 def brTaken = Mux(is_minimal, minimal_pred.brTaken, full_pred.brTaken) 549 def shouldShiftVec = Mux(is_minimal, minimal_pred.shouldShiftVec, full_pred.shouldShiftVec) 550 def fallThruError = Mux(is_minimal, minimal_pred.fallThruError, full_pred.fallThruError) 551 552 def getTarget = target(pc) 553 def taken = cfiIndex.valid 554 555 def display(cond: Bool): Unit = { 556 XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n") 557 folded_hist.display(cond) 558 full_pred.display(cond) 559 ftb_entry.display(cond) 560 } 561} 562 563@chiselName 564class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst { 565 // val valids = Vec(3, Bool()) 566 val s1 = new BranchPredictionBundle 567 val s2 = new BranchPredictionBundle 568 val s3 = new BranchPredictionBundle 569 570 def selectedResp ={ 571 val res = 572 PriorityMux(Seq( 573 ((s3.valid && s3.hasRedirect) -> s3), 574 ((s2.valid && s2.hasRedirect) -> s2), 575 (s1.valid -> s1) 576 )) 577 // println("is minimal: ", res.is_minimal) 578 res 579 } 580 def selectedRespIdx = 581 PriorityMux(Seq( 582 ((s3.valid && s3.hasRedirect) -> BP_S3), 583 ((s2.valid && s2.hasRedirect) -> BP_S2), 584 (s1.valid -> BP_S1) 585 )) 586 def lastStage = s3 587} 588 589class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp with HasBPUConst { 590 val meta = UInt(MaxMetaLength.W) 591} 592 593object BpuToFtqBundle { 594 def apply(resp: BranchPredictionResp)(implicit p: Parameters): BpuToFtqBundle = { 595 val e = Wire(new BpuToFtqBundle()) 596 e.s1 := resp.s1 597 e.s2 := resp.s2 598 e.s3 := resp.s3 599 600 e.meta := DontCare 601 e 602 } 603} 604 605class BranchPredictionUpdate(implicit p: Parameters) extends BranchPredictionBundle with HasBPUConst { 606 val mispred_mask = Vec(numBr+1, Bool()) 607 val pred_hit = Bool() 608 val false_hit = Bool() 609 val new_br_insert_pos = Vec(numBr, Bool()) 610 val old_entry = Bool() 611 val meta = UInt(MaxMetaLength.W) 612 val full_target = UInt(VAddrBits.W) 613 val from_stage = UInt(2.W) 614 val ghist = UInt(HistoryLength.W) 615 616 def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = { 617 folded_hist := entry.folded_hist 618 afhob := entry.afhob 619 lastBrNumOH := entry.lastBrNumOH 620 histPtr := entry.histPtr 621 rasSp := entry.rasSp 622 rasTop := entry.rasEntry 623 this 624 } 625 626 override def display(cond: Bool) = { 627 XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n") 628 XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n") 629 XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n") 630 super.display(cond) 631 XSDebug(cond, p"--------------------------------------------\n") 632 } 633} 634 635class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst { 636 // override def toPrintable: Printable = { 637 // p"-----------BranchPredictionRedirect----------- " + 638 // p"-----------cfiUpdate----------- " + 639 // p"[pc] ${Hexadecimal(cfiUpdate.pc)} " + 640 // p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " + 641 // p"[target] ${Hexadecimal(cfiUpdate.target)} " + 642 // p"------------------------------- " + 643 // p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " + 644 // p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " + 645 // p"[ftqOffset] ${ftqOffset} " + 646 // p"[level] ${level}, [interrupt] ${interrupt} " + 647 // p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " + 648 // p"[stFtqOffset] ${stFtqOffset} " + 649 // p"\n" 650 651 // } 652 653 def display(cond: Bool): Unit = { 654 XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n") 655 XSDebug(cond, p"-----------cfiUpdate----------- \n") 656 XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n") 657 // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n") 658 XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n") 659 XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n") 660 XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n") 661 XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n") 662 XSDebug(cond, p"------------------------------- \n") 663 XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n") 664 XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n") 665 XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n") 666 XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n") 667 XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n") 668 XSDebug(cond, p"---------------------------------------------- \n") 669 } 670} 671