1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16package xiangshan.frontend 17 18import org.chipsalliance.cde.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import xiangshan._ 22import xiangshan.frontend.icache._ 23import utils._ 24import utility._ 25import scala.math._ 26import java.util.ResourceBundle.Control 27 28class FrontendTopDownBundle(implicit p: Parameters) extends XSBundle { 29 val reasons = Vec(TopDownCounters.NumStallReasons.id, Bool()) 30 val stallWidth = UInt(log2Ceil(PredictWidth).W) 31} 32 33class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters { 34 35 //fast path: Timing critical 36 val startAddr = UInt(VAddrBits.W) 37 val nextlineStart = UInt(VAddrBits.W) 38 val nextStartAddr = UInt(VAddrBits.W) 39 //slow path 40 val ftqIdx = new FtqPtr 41 val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 42 43 val topdown_info = new FrontendTopDownBundle 44 45 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 46 47 def fromFtqPcBundle(b: Ftq_RF_Components) = { 48 this.startAddr := b.startAddr 49 this.nextlineStart := b.nextLineAddr 50 when (b.fallThruError) { 51 val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.nextLineAddr, b.startAddr) 52 val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 53 this.nextStartAddr := 54 Cat(nextBlockHigher, 55 startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W), 56 startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits), 57 0.U(instOffsetBits.W) 58 ) 59 } 60 this 61 } 62 override def toPrintable: Printable = { 63 p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" + 64 p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" + 65 p" offset: ${ftqOffset.bits}\n" 66 } 67} 68 69class FtqICacheInfo(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 70 val startAddr = UInt(VAddrBits.W) 71 val nextlineStart = UInt(VAddrBits.W) 72 val ftqIdx = new FtqPtr 73 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 74 def fromFtqPcBundle(b: Ftq_RF_Components) = { 75 this.startAddr := b.startAddr 76 this.nextlineStart := b.nextLineAddr 77 this 78 } 79} 80 81class IFUICacheIO(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 82 val icacheReady = Output(Bool()) 83 val resp = Vec(PortNumber, ValidIO(new ICacheMainPipeResp)) 84 val topdownIcacheMiss = Output(Bool()) 85 val topdownItlbMiss = Output(Bool()) 86} 87 88class FtqToICacheRequestBundle(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 89 val pcMemRead = Vec(5, new FtqICacheInfo) 90 val readValid = Vec(5, Bool()) 91} 92 93 94class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle { 95 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 96 val pd = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode 97 val ftqIdx = new FtqPtr 98 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 99 val misOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 100 val cfiOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 101 val target = UInt(VAddrBits.W) 102 val jalTarget = UInt(VAddrBits.W) 103 val instrRange = Vec(PredictWidth, Bool()) 104} 105 106class mmioCommitRead(implicit p: Parameters) extends XSBundle { 107 val mmioFtqPtr = Output(new FtqPtr) 108 val mmioLastCommit = Input(Bool()) 109} 110 111object ExceptionType { 112 def none = "b00".U 113 def ipf = "b01".U 114 def igpf = "b10".U 115 def acf = "b11".U 116 def width = 2 117} 118 119class FetchToIBuffer(implicit p: Parameters) extends XSBundle { 120 val instrs = Vec(PredictWidth, UInt(32.W)) 121 val valid = UInt(PredictWidth.W) 122 val enqEnable = UInt(PredictWidth.W) 123 val pd = Vec(PredictWidth, new PreDecodeInfo) 124 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 125 val foldpc = Vec(PredictWidth, UInt(MemPredPCWidth.W)) 126 val ftqPtr = new FtqPtr 127 val ftqOffset = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 128 val exceptionType = Vec(PredictWidth, UInt(ExceptionType.width.W)) 129 val crossPageIPFFix = Vec(PredictWidth, Bool()) 130 val triggered = Vec(PredictWidth, new TriggerCf) 131 val topdown_info = new FrontendTopDownBundle 132} 133 134// class BitWiseUInt(val width: Int, val init: UInt) extends Module { 135// val io = IO(new Bundle { 136// val set 137// }) 138// } 139// Move from BPU 140abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst { 141 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory 142} 143 144class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory { 145 val predHist = UInt(HistoryLength.W) 146 147 def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = { 148 val g = Wire(new ShiftingGlobalHistory) 149 g.predHist := (hist << shift) | taken 150 g 151 } 152 153 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = { 154 require(br_valids.length == numBr) 155 require(real_taken_mask.length == numBr) 156 val last_valid_idx = PriorityMux( 157 br_valids.reverse :+ true.B, 158 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 159 ) 160 val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask) 161 val smaller = Mux(last_valid_idx < first_taken_idx, 162 last_valid_idx, 163 first_taken_idx 164 ) 165 val shift = smaller 166 val taken = real_taken_mask.reduce(_||_) 167 update(shift, taken, this.predHist) 168 } 169 170 // static read 171 def read(n: Int): Bool = predHist.asBools(n) 172 173 final def === (that: ShiftingGlobalHistory): Bool = { 174 predHist === that.predHist 175 } 176 177 final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that) 178} 179 180// circular global history pointer 181class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr]( 182 p => p(XSCoreParamsKey).HistoryLength 183){ 184} 185 186object CGHPtr { 187 def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = { 188 val ptr = Wire(new CGHPtr) 189 ptr.flag := f 190 ptr.value := v 191 ptr 192 } 193 def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr = { 194 apply(!ptr.flag, ptr.value) 195 } 196} 197 198class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory { 199 val buffer = Vec(HistoryLength, Bool()) 200 type HistPtr = UInt 201 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = { 202 this 203 } 204} 205 206class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters) 207 extends XSBundle with HasBPUConst { 208 require(compLen >= 1) 209 require(len > 0) 210 // require(folded_len <= len) 211 require(compLen >= max_update_num) 212 val folded_hist = UInt(compLen.W) 213 214 def need_oldest_bits = len > compLen 215 def info = (len, compLen) 216 def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1) 217 def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen) 218 def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0) 219 def oldest_bit_start = oldest_bit_pos_in_folded.head 220 221 def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = { 222 // TODO: wrap inc for histPtr value 223 oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value)) 224 } 225 226 def circular_shift_left(src: UInt, shamt: Int) = { 227 val srcLen = src.getWidth 228 val src_doubled = Cat(src, src) 229 val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt) 230 shifted 231 } 232 233 // slow path, read bits from ghr 234 def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = { 235 val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr)) 236 update(oldest_bits, num, taken) 237 } 238 239 240 // fast path, use pre-read oldest bits 241 def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = { 242 // do xors for several bitsets at specified bits 243 def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = { 244 val res = Wire(Vec(len, Bool())) 245 // println(f"num bitsets: ${bitsets.length}") 246 // println(f"bitsets $bitsets") 247 val resArr = Array.fill(len)(List[Bool]()) 248 for (bs <- bitsets) { 249 for ((n, b) <- bs) { 250 resArr(n) = b :: resArr(n) 251 } 252 } 253 // println(f"${resArr.mkString}") 254 // println(f"histLen: ${this.len}, foldedLen: $folded_len") 255 for (i <- 0 until len) { 256 // println(f"bit[$i], ${resArr(i).mkString}") 257 if (resArr(i).length == 0) { 258 println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen") 259 } 260 res(i) := resArr(i).foldLeft(false.B)(_^_) 261 } 262 res.asUInt 263 } 264 265 val new_folded_hist = if (need_oldest_bits) { 266 val oldest_bits = ob 267 require(oldest_bits.length == max_update_num) 268 // mask off bits that do not update 269 val oldest_bits_masked = oldest_bits.zipWithIndex.map{ 270 case (ob, i) => ob && (i < num).B 271 } 272 // if a bit does not wrap around, it should not be xored when it exits 273 val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))) 274 275 // println(f"old bits pos ${oldest_bits_set.map(_._1)}") 276 277 // only the last bit could be 1, as we have at most one taken branch at a time 278 val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt 279 // if a bit does not wrap around, newest bits should not be xored onto it either 280 val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i))) 281 282 // println(f"new bits set ${newest_bits_set.map(_._1)}") 283 // 284 val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{ 285 case (fb, i) => fb && !(num >= (len-i)).B 286 }) 287 val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i))) 288 289 // do xor then shift 290 val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set)) 291 circular_shift_left(xored, num) 292 } else { 293 // histLen too short to wrap around 294 ((folded_hist << num) | taken)(compLen-1,0) 295 } 296 297 val fh = WireInit(this) 298 fh.folded_hist := new_folded_hist 299 fh 300 } 301} 302 303class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle { 304 val bits = Vec(max_update_num*2, Bool()) 305 // def info = (len, compLen) 306 def getRealOb(brNumOH: UInt): Vec[Bool] = { 307 val ob = Wire(Vec(max_update_num, Bool())) 308 for (i <- 0 until max_update_num) { 309 ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr+1)) 310 } 311 ob 312 } 313} 314 315class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 316 val afhob = MixedVec(gen.filter(t => t._1 > t._2).map{_._1} 317 .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates 318 require(gen.toSet.toList.equals(gen)) 319 def getObWithInfo(info: Tuple2[Int, Int]) = { 320 val selected = afhob.filter(_.len == info._1) 321 require(selected.length == 1) 322 selected(0) 323 } 324 def read(ghv: Vec[Bool], ptr: CGHPtr) = { 325 val hisLens = afhob.map(_.len) 326 val bitsToRead = hisLens.flatMap(l => (0 until numBr*2).map(i => l-i-1)).toSet // remove duplicates 327 val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr+(pos+1).U).value))) 328 for (ob <- afhob) { 329 for (i <- 0 until numBr*2) { 330 val pos = ob.len - i - 1 331 val bit_found = bitsWithInfo.filter(_._1 == pos).toList 332 require(bit_found.length == 1) 333 ob.bits(i) := bit_found(0)._2 334 } 335 } 336 } 337} 338 339class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 340 val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)}) 341 // println(gen.mkString) 342 require(gen.toSet.toList.equals(gen)) 343 def getHistWithInfo(info: Tuple2[Int, Int]) = { 344 val selected = hist.filter(_.info.equals(info)) 345 require(selected.length == 1) 346 selected(0) 347 } 348 def autoConnectFrom(that: AllFoldedHistories) = { 349 require(this.hist.length <= that.hist.length) 350 for (h <- this.hist) { 351 h := that.getHistWithInfo(h.info) 352 } 353 } 354 def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = { 355 val res = WireInit(this) 356 for (i <- 0 until this.hist.length) { 357 res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken) 358 } 359 res 360 } 361 def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = { 362 val res = WireInit(this) 363 for (i <- 0 until this.hist.length) { 364 val fh = this.hist(i) 365 if (fh.need_oldest_bits) { 366 val info = fh.info 367 val selectedAfhob = afhob.getObWithInfo(info) 368 val ob = selectedAfhob.getRealOb(lastBrNumOH) 369 res.hist(i) := this.hist(i).update(ob, shift, taken) 370 } else { 371 val dumb = Wire(Vec(numBr, Bool())) // not needed 372 dumb := DontCare 373 res.hist(i) := this.hist(i).update(dumb, shift, taken) 374 } 375 } 376 res 377 } 378 379 def display(cond: Bool) = { 380 for (h <- hist) { 381 XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n") 382 } 383 } 384} 385 386class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{ 387 def tagBits = VAddrBits - idxBits - instOffsetBits 388 389 val tag = UInt(tagBits.W) 390 val idx = UInt(idxBits.W) 391 val offset = UInt(instOffsetBits.W) 392 393 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 394 def getTag(x: UInt) = fromUInt(x).tag 395 def getIdx(x: UInt) = fromUInt(x).idx 396 def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U 397 def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x) 398} 399 400trait BasicPrediction extends HasXSParameter { 401 def cfiIndex: ValidUndirectioned[UInt] 402 def target(pc: UInt): UInt 403 def lastBrPosOH: Vec[Bool] 404 def brTaken: Bool 405 def shouldShiftVec: Vec[Bool] 406 def fallThruError: Bool 407} 408 409// selectByTaken selects some data according to takenMask 410// allTargets should be in a Vec, like [taken0, taken1, ..., not taken, not hit] 411object selectByTaken { 412 def apply[T <: Data](takenMask: Vec[Bool], hit: Bool, allTargets: Vec[T]): T = { 413 val selVecOH = 414 takenMask.zipWithIndex.map { case (t, i) => !takenMask.take(i).fold(false.B)(_ || _) && t && hit } :+ 415 (!takenMask.asUInt.orR && hit) :+ !hit 416 Mux1H(selVecOH, allTargets) 417 } 418} 419 420class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction { 421 val br_taken_mask = Vec(numBr, Bool()) 422 423 val slot_valids = Vec(totalSlot, Bool()) 424 425 val targets = Vec(totalSlot, UInt(VAddrBits.W)) 426 val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors 427 val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W)) 428 val fallThroughAddr = UInt(VAddrBits.W) 429 val fallThroughErr = Bool() 430 val multiHit = Bool() 431 432 val is_jal = Bool() 433 val is_jalr = Bool() 434 val is_call = Bool() 435 val is_ret = Bool() 436 val last_may_be_rvi_call = Bool() 437 val is_br_sharing = Bool() 438 439 // val call_is_rvc = Bool() 440 val hit = Bool() 441 442 val predCycle = if (!env.FPGAPlatform) Some(UInt(64.W)) else None 443 444 def br_slot_valids = slot_valids.init 445 def tail_slot_valid = slot_valids.last 446 447 def br_valids = { 448 VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing)) 449 } 450 451 def taken_mask_on_slot = { 452 VecInit( 453 (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ ( 454 tail_slot_valid && ( 455 is_br_sharing && br_taken_mask.last || !is_br_sharing 456 ) 457 ) 458 ) 459 } 460 461 def real_slot_taken_mask(): Vec[Bool] = { 462 VecInit(taken_mask_on_slot.map(_ && hit)) 463 } 464 465 // len numBr 466 def real_br_taken_mask(): Vec[Bool] = { 467 VecInit( 468 taken_mask_on_slot.map(_ && hit).init :+ 469 (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit) 470 ) 471 } 472 473 // the vec indicating if ghr should shift on each branch 474 def shouldShiftVec = 475 VecInit(br_valids.zipWithIndex.map{ case (v, i) => 476 v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)}) 477 478 def lastBrPosOH = 479 VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry 480 (0 until numBr).map(i => 481 br_valids(i) && 482 !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it 483 (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it 484 hit 485 ) 486 ) 487 488 def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_) 489 490 def target(pc: UInt): UInt = { 491 selectByTaken(taken_mask_on_slot, hit, allTarget(pc)) 492 } 493 494 // allTarget return a Vec of all possible target of a BP stage 495 // in the following order: [taken_target0, taken_target1, ..., fallThroughAddr, not hit (plus fetch width)] 496 // 497 // This exposes internal targets for timing optimization, 498 // since usually targets are generated quicker than taken 499 def allTarget(pc: UInt): Vec[UInt] = { 500 VecInit(targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U)) 501 } 502 503 def fallThruError: Bool = hit && fallThroughErr 504 def ftbMultiHit: Bool = hit && multiHit 505 506 def hit_taken_on_jmp = 507 !real_slot_taken_mask().init.reduce(_||_) && 508 real_slot_taken_mask().last && !is_br_sharing 509 def hit_taken_on_call = hit_taken_on_jmp && is_call 510 def hit_taken_on_ret = hit_taken_on_jmp && is_ret 511 def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr 512 513 def cfiIndex = { 514 val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 515 cfiIndex.valid := real_slot_taken_mask().asUInt.orR 516 // when no takens, set cfiIndex to PredictWidth-1 517 cfiIndex.bits := 518 ParallelPriorityMux(real_slot_taken_mask(), offsets) | 519 Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt) 520 cfiIndex 521 } 522 523 def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr) 524 525 def fromFtbEntry( 526 entry: FTBEntry, 527 pc: UInt, 528 last_stage_pc: Option[Tuple2[UInt, Bool]] = None, 529 last_stage_entry: Option[Tuple2[FTBEntry, Bool]] = None 530 ) = { 531 slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid 532 targets := entry.getTargetVec(pc, last_stage_pc) // Use previous stage pc for better timing 533 jalr_target := targets.last 534 offsets := entry.getOffsetVec 535 is_jal := entry.tailSlot.valid && entry.isJal 536 is_jalr := entry.tailSlot.valid && entry.isJalr 537 is_call := entry.tailSlot.valid && entry.isCall 538 is_ret := entry.tailSlot.valid && entry.isRet 539 last_may_be_rvi_call := entry.last_may_be_rvi_call 540 is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing 541 predCycle.map(_ := GTimer()) 542 543 val startLower = Cat(0.U(1.W), pc(instOffsetBits+log2Ceil(PredictWidth)-1, instOffsetBits)) 544 val endLowerwithCarry = Cat(entry.carry, entry.pftAddr) 545 fallThroughErr := startLower >= endLowerwithCarry || endLowerwithCarry > (startLower + (PredictWidth).U) 546 fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc, last_stage_entry)) 547 } 548 549 def display(cond: Bool): Unit = { 550 XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n") 551 } 552} 553 554class SpeculativeInfo(implicit p: Parameters) extends XSBundle 555 with HasBPUConst with BPUUtils { 556 val histPtr = new CGHPtr 557 val ssp = UInt(log2Up(RasSize).W) 558 val sctr = UInt(RasCtrSize.W) 559 val TOSW = new RASPtr 560 val TOSR = new RASPtr 561 val NOS = new RASPtr 562 val topAddr = UInt(VAddrBits.W) 563} 564 565class BranchPredictionBundle(implicit p: Parameters) extends XSBundle 566 with HasBPUConst with BPUUtils { 567 val pc = Vec(numDup, UInt(VAddrBits.W)) 568 val valid = Vec(numDup, Bool()) 569 val hasRedirect = Vec(numDup, Bool()) 570 val ftq_idx = new FtqPtr 571 val full_pred = Vec(numDup, new FullBranchPrediction) 572 573 574 def target(pc: UInt) = VecInit(full_pred.map(_.target(pc))) 575 def targets(pc: Vec[UInt]) = VecInit(pc.zipWithIndex.map{case (pc, idx) => full_pred(idx).target(pc)}) 576 def allTargets(pc: Vec[UInt]) = VecInit(pc.zipWithIndex.map{case (pc, idx) => full_pred(idx).allTarget(pc)}) 577 def cfiIndex = VecInit(full_pred.map(_.cfiIndex)) 578 def lastBrPosOH = VecInit(full_pred.map(_.lastBrPosOH)) 579 def brTaken = VecInit(full_pred.map(_.brTaken)) 580 def shouldShiftVec = VecInit(full_pred.map(_.shouldShiftVec)) 581 def fallThruError = VecInit(full_pred.map(_.fallThruError)) 582 def ftbMultiHit = VecInit(full_pred.map(_.ftbMultiHit)) 583 584 def taken = VecInit(cfiIndex.map(_.valid)) 585 586 def getTarget = targets(pc) 587 def getAllTargets = allTargets(pc) 588 589 def display(cond: Bool): Unit = { 590 XSDebug(cond, p"[pc] ${Hexadecimal(pc(0))}\n") 591 full_pred(0).display(cond) 592 } 593} 594 595class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst { 596 val s1 = new BranchPredictionBundle 597 val s2 = new BranchPredictionBundle 598 val s3 = new BranchPredictionBundle 599 600 val s1_uftbHit = Bool() 601 val s1_uftbHasIndirect = Bool() 602 val s1_ftbCloseReq = Bool() 603 604 val last_stage_meta = UInt(MaxMetaLength.W) 605 val last_stage_spec_info = new Ftq_Redirect_SRAMEntry 606 val last_stage_ftb_entry = new FTBEntry 607 608 val topdown_info = new FrontendTopDownBundle 609 610 def selectedResp ={ 611 val res = 612 PriorityMux(Seq( 613 ((s3.valid(3) && s3.hasRedirect(3)) -> s3), 614 ((s2.valid(3) && s2.hasRedirect(3)) -> s2), 615 (s1.valid(3) -> s1) 616 )) 617 res 618 } 619 def selectedRespIdxForFtq = 620 PriorityMux(Seq( 621 ((s3.valid(3) && s3.hasRedirect(3)) -> BP_S3), 622 ((s2.valid(3) && s2.hasRedirect(3)) -> BP_S2), 623 (s1.valid(3) -> BP_S1) 624 )) 625 def lastStage = s3 626} 627 628class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp {} 629 630class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst { 631 val pc = UInt(VAddrBits.W) 632 val spec_info = new SpeculativeInfo 633 val ftb_entry = new FTBEntry() 634 635 val cfi_idx = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 636 val br_taken_mask = Vec(numBr, Bool()) 637 val br_committed = Vec(numBr, Bool()) // High only when br valid && br committed 638 val jmp_taken = Bool() 639 val mispred_mask = Vec(numBr+1, Bool()) 640 val pred_hit = Bool() 641 val false_hit = Bool() 642 val new_br_insert_pos = Vec(numBr, Bool()) 643 val old_entry = Bool() 644 val meta = UInt(MaxMetaLength.W) 645 val full_target = UInt(VAddrBits.W) 646 val from_stage = UInt(2.W) 647 val ghist = UInt(HistoryLength.W) 648 649 def is_jal = ftb_entry.tailSlot.valid && ftb_entry.isJal 650 def is_jalr = ftb_entry.tailSlot.valid && ftb_entry.isJalr 651 def is_call = ftb_entry.tailSlot.valid && ftb_entry.isCall 652 def is_ret = ftb_entry.tailSlot.valid && ftb_entry.isRet 653 654 def is_call_taken = is_call && jmp_taken && cfi_idx.valid && cfi_idx.bits === ftb_entry.tailSlot.offset 655 def is_ret_taken = is_ret && jmp_taken && cfi_idx.valid && cfi_idx.bits === ftb_entry.tailSlot.offset 656 657 def display(cond: Bool) = { 658 XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n") 659 XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n") 660 XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n") 661 XSDebug(cond, p"--------------------------------------------\n") 662 } 663} 664 665class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst { 666 // override def toPrintable: Printable = { 667 // p"-----------BranchPredictionRedirect----------- " + 668 // p"-----------cfiUpdate----------- " + 669 // p"[pc] ${Hexadecimal(cfiUpdate.pc)} " + 670 // p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " + 671 // p"[target] ${Hexadecimal(cfiUpdate.target)} " + 672 // p"------------------------------- " + 673 // p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " + 674 // p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " + 675 // p"[ftqOffset] ${ftqOffset} " + 676 // p"[level] ${level}, [interrupt] ${interrupt} " + 677 // p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " + 678 // p"[stFtqOffset] ${stFtqOffset} " + 679 // p"\n" 680 681 // } 682 683 // TODO: backend should pass topdown signals here 684 // must not change its parent since BPU has used asTypeOf(this type) from its parent class 685 require(isInstanceOf[Redirect]) 686 val BTBMissBubble = Bool() 687 def ControlRedirectBubble = debugIsCtrl 688 // if mispred br not in ftb, count as BTB miss 689 def ControlBTBMissBubble = ControlRedirectBubble && !cfiUpdate.br_hit && !cfiUpdate.jr_hit 690 def TAGEMissBubble = ControlRedirectBubble && cfiUpdate.br_hit && !cfiUpdate.sc_hit 691 def SCMissBubble = ControlRedirectBubble && cfiUpdate.br_hit && cfiUpdate.sc_hit 692 def ITTAGEMissBubble = ControlRedirectBubble && cfiUpdate.jr_hit && !cfiUpdate.pd.isRet 693 def RASMissBubble = ControlRedirectBubble && cfiUpdate.jr_hit && cfiUpdate.pd.isRet 694 def MemVioRedirectBubble = debugIsMemVio 695 def OtherRedirectBubble = !debugIsCtrl && !debugIsMemVio 696 697 def connectRedirect(source: Redirect): Unit = { 698 for ((name, data) <- this.elements) { 699 if (source.elements.contains(name)) { 700 data := source.elements(name) 701 } 702 } 703 } 704 705 def display(cond: Bool): Unit = { 706 XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n") 707 XSDebug(cond, p"-----------cfiUpdate----------- \n") 708 XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n") 709 // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n") 710 XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n") 711 XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n") 712 XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n") 713 XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n") 714 XSDebug(cond, p"------------------------------- \n") 715 XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n") 716 XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n") 717 XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n") 718 XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n") 719 XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n") 720 XSDebug(cond, p"---------------------------------------------- \n") 721 } 722} 723