1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16package xiangshan.frontend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.chiselName 22import xiangshan._ 23import utils._ 24import scala.math._ 25 26@chiselName 27class FetchRequestBundle(implicit p: Parameters) extends XSBundle { 28 val startAddr = UInt(VAddrBits.W) 29 val fallThruAddr = UInt(VAddrBits.W) 30 val fallThruError = Bool() 31 val ftqIdx = new FtqPtr 32 val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 33 val target = UInt(VAddrBits.W) 34 val oversize = Bool() 35 36 def fallThroughError() = { 37 def carryPos = instOffsetBits+log2Ceil(PredictWidth)+1 38 def getLower(pc: UInt) = pc(instOffsetBits+log2Ceil(PredictWidth), instOffsetBits) 39 val carry = (startAddr(carryPos) =/= fallThruAddr(carryPos)).asUInt 40 val startLower = Cat(0.U(1.W), getLower(startAddr)) 41 val endLowerwithCarry = Cat(carry, getLower(fallThruAddr)) 42 require(startLower.getWidth == log2Ceil(PredictWidth)+2) 43 require(endLowerwithCarry.getWidth == log2Ceil(PredictWidth)+2) 44 startLower >= endLowerwithCarry || (endLowerwithCarry - startLower) > (PredictWidth+1).U 45 } 46 def fromFtqPcBundle(b: Ftq_RF_Components) = { 47 this.startAddr := b.startAddr 48 this.fallThruAddr := b.getFallThrough() 49 this.oversize := b.oversize 50 this 51 } 52 def fromBpuResp(resp: BranchPredictionBundle) = { 53 // only used to bypass, so some fields remains unchanged 54 this.startAddr := resp.pc 55 this.target := resp.target 56 this.ftqOffset := resp.genCfiIndex 57 this.fallThruAddr := resp.fallThroughAddr 58 this.oversize := resp.ftb_entry.oversize 59 this 60 } 61 override def toPrintable: Printable = { 62 p"[start] ${Hexadecimal(startAddr)} [pft] ${Hexadecimal(fallThruAddr)}" + 63 p"[tgt] ${Hexadecimal(target)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" + 64 p" offset: ${ftqOffset.bits}\n" 65 } 66} 67 68class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle { 69 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 70 val pd = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode 71 val ftqIdx = new FtqPtr 72 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 73 val misOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 74 val cfiOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 75 val target = UInt(VAddrBits.W) 76 val jalTarget = UInt(VAddrBits.W) 77 val instrRange = Vec(PredictWidth, Bool()) 78} 79 80class Exception(implicit p: Parameters) extends XSBundle { 81 82} 83 84class FetchToIBuffer(implicit p: Parameters) extends XSBundle { 85 val instrs = Vec(PredictWidth, UInt(32.W)) 86 val valid = UInt(PredictWidth.W) 87 val pd = Vec(PredictWidth, new PreDecodeInfo) 88 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 89 val foldpc = Vec(PredictWidth, UInt(MemPredPCWidth.W)) 90 //val exception = new Exception 91 val ftqPtr = new FtqPtr 92 val ftqOffset = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 93 val ipf = Vec(PredictWidth, Bool()) 94 val acf = Vec(PredictWidth, Bool()) 95 val crossPageIPFFix = Vec(PredictWidth, Bool()) 96} 97 98// class BitWiseUInt(val width: Int, val init: UInt) extends Module { 99// val io = IO(new Bundle { 100// val set 101// }) 102// } 103// Move from BPU 104abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst { 105 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory 106} 107 108class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory { 109 val predHist = UInt(HistoryLength.W) 110 111 def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = { 112 val g = Wire(new ShiftingGlobalHistory) 113 g.predHist := (hist << shift) | taken 114 g 115 } 116 117 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = { 118 require(br_valids.length == numBr) 119 require(real_taken_mask.length == numBr) 120 val last_valid_idx = PriorityMux( 121 br_valids.reverse :+ true.B, 122 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 123 ) 124 val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask) 125 val smaller = Mux(last_valid_idx < first_taken_idx, 126 last_valid_idx, 127 first_taken_idx 128 ) 129 val shift = smaller 130 val taken = real_taken_mask.reduce(_||_) 131 update(shift, taken, this.predHist) 132 } 133 134 // static read 135 def read(n: Int): Bool = predHist.asBools()(n) 136 137 final def === (that: ShiftingGlobalHistory): Bool = { 138 predHist === that.predHist 139 } 140 141 final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that) 142} 143 144// circular global history pointer 145class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr]( 146 p => p(XSCoreParamsKey).HistoryLength 147){ 148 override def cloneType = (new CGHPtr).asInstanceOf[this.type] 149} 150class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory { 151 val buffer = Vec(HistoryLength, Bool()) 152 type HistPtr = UInt 153 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = { 154 this 155 } 156} 157 158class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters) 159 extends XSBundle with HasBPUConst { 160 require(compLen >= 1) 161 require(len > 0) 162 // require(folded_len <= len) 163 require(compLen >= max_update_num) 164 val folded_hist = UInt(compLen.W) 165 166 def info = (len, compLen) 167 def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1) 168 def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen) 169 def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0) 170 def oldest_bit_start = oldest_bit_pos_in_folded.head 171 172 def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = { 173 // TODO: wrap inc for histPtr value 174 oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value)) 175 } 176 177 def circular_shift_left(max_shift_value: Int)(src: UInt, shamt: UInt) = { 178 val srcLen = src.getWidth 179 require(max_shift_value <= srcLen) 180 val src_doubled = Cat(src, src) 181 val shifted_vec = (0 to max_shift_value).map(i => src_doubled(srcLen*2-1-i, srcLen-i)) 182 val sel_vec = (0 to max_shift_value).map(_.U === shamt) 183 Mux1H(sel_vec, shifted_vec) 184 } 185 186 187 def update(ghr: Vec[Bool], histPtr: CGHPtr, num: UInt, taken: Bool): FoldedHistory = { 188 // do xors for several bitsets at specified bits 189 def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = { 190 val res = Wire(Vec(len, Bool())) 191 // println(f"num bitsets: ${bitsets.length}") 192 // println(f"bitsets $bitsets") 193 val resArr = Array.fill(len)(List[Bool]()) 194 for (bs <- bitsets) { 195 for ((n, b) <- bs) { 196 resArr(n) = b :: resArr(n) 197 } 198 } 199 // println(f"${resArr.mkString}") 200 // println(f"histLen: ${this.len}, foldedLen: $folded_len") 201 for (i <- 0 until len) { 202 // println(f"bit[$i], ${resArr(i).mkString}") 203 if (resArr(i).length > 2) { 204 println(f"[warning] update logic of foldest history has two or more levels of xor gates! " + 205 f"histlen:${this.len}, compLen:$compLen") 206 } 207 if (resArr(i).length == 0) { 208 println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen") 209 } 210 res(i) := resArr(i).foldLeft(false.B)(_^_) 211 } 212 res.asUInt 213 } 214 val oldest_bits = get_oldest_bits_from_ghr(ghr, histPtr) 215 216 // mask off bits that do not update 217 val oldest_bits_masked = oldest_bits.zipWithIndex.map{ 218 case (ob, i) => ob && (i.U < num) 219 } 220 // if a bit does not wrap around, it should not be xored when it exits 221 val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))) 222 223 // println(f"old bits pos ${oldest_bits_set.map(_._1)}") 224 225 // only the last bit could be 1, as we have at most one taken branch at a time 226 val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && (i+1).U === num)).asUInt 227 // if a bit does not wrap around, newest bits should not be xored onto it either 228 val newest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (compLen-1-i, newest_bits_masked(i))) 229 230 // println(f"new bits set ${newest_bits_set.map(_._1)}") 231 // 232 val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{ 233 case (fb, i) => fb && !(num >= (len-i).U) 234 }) 235 val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i))) 236 237 238 // histLen too short to wrap around 239 val new_folded_hist = 240 if (len <= compLen) { 241 ((folded_hist << num) | taken)(compLen-1,0) 242 // circular_shift_left(max_update_num)(Cat(Reverse(newest_bits_masked), folded_hist(compLen-max_update_num-1,0)), num) 243 } else { 244 // do xor then shift 245 val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set)) 246 circular_shift_left(max_update_num)(xored, num) 247 } 248 val fh = WireInit(this) 249 fh.folded_hist := new_folded_hist 250 fh 251 } 252 253 // def update(ghr: Vec[Bool], histPtr: CGHPtr, valids: Vec[Bool], takens: Vec[Bool]): FoldedHistory = { 254 // val fh = WireInit(this) 255 // require(valids.length == max_update_num) 256 // require(takens.length == max_update_num) 257 // val last_valid_idx = PriorityMux( 258 // valids.reverse :+ true.B, 259 // (max_update_num to 0 by -1).map(_.U(log2Ceil(max_update_num+1).W)) 260 // ) 261 // val first_taken_idx = PriorityEncoder(false.B +: takens) 262 // val smaller = Mux(last_valid_idx < first_taken_idx, 263 // last_valid_idx, 264 // first_taken_idx 265 // ) 266 // // update folded_hist 267 // fh.update(ghr, histPtr, smaller, takens.reduce(_||_)) 268 // } 269 // println(f"folded hist original length: ${len}, folded len: ${folded_len} " + 270 // f"oldest bits' pos in folded: ${oldest_bit_pos_in_folded}") 271 272 273} 274 275class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{ 276 def tagBits = VAddrBits - idxBits - instOffsetBits 277 278 val tag = UInt(tagBits.W) 279 val idx = UInt(idxBits.W) 280 val offset = UInt(instOffsetBits.W) 281 282 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 283 def getTag(x: UInt) = fromUInt(x).tag 284 def getIdx(x: UInt) = fromUInt(x).idx 285 def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U 286 def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x) 287} 288 289@chiselName 290class BranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst { 291 val br_taken_mask = Vec(numBr, Bool()) 292 293 val slot_valids = Vec(totalSlot, Bool()) 294 295 val targets = Vec(totalSlot, UInt(VAddrBits.W)) 296 297 val is_jal = Bool() 298 val is_jalr = Bool() 299 val is_call = Bool() 300 val is_ret = Bool() 301 val is_br_sharing = Bool() 302 303 // val call_is_rvc = Bool() 304 val hit = Bool() 305 306 def br_slot_valids = slot_valids.init 307 def tail_slot_valid = slot_valids.last 308 309 def br_valids = { 310 VecInit( 311 if (shareTailSlot) 312 br_slot_valids :+ (tail_slot_valid && is_br_sharing) 313 else 314 br_slot_valids 315 ) 316 } 317 318 def taken_mask_on_slot = { 319 VecInit( 320 if (shareTailSlot) 321 (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ ( 322 (br_taken_mask.last && tail_slot_valid && is_br_sharing) || 323 tail_slot_valid && !is_br_sharing 324 ) 325 else 326 (br_slot_valids zip br_taken_mask).map{ case (v, t) => v && t } :+ 327 tail_slot_valid 328 ) 329 } 330 331 def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr) 332 333 def fromFtbEntry(entry: FTBEntry, pc: UInt) = { 334 slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid 335 targets := entry.getTargetVec(pc) 336 is_jal := entry.tailSlot.valid && entry.isJal 337 is_jalr := entry.tailSlot.valid && entry.isJalr 338 is_call := entry.tailSlot.valid && entry.isCall 339 is_ret := entry.tailSlot.valid && entry.isRet 340 is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing 341 } 342 // override def toPrintable: Printable = { 343 // p"-----------BranchPrediction----------- " + 344 // p"[taken_mask] ${Binary(taken_mask.asUInt)} " + 345 // p"[is_br] ${Binary(is_br.asUInt)}, [is_jal] ${Binary(is_jal.asUInt)} " + 346 // p"[is_jalr] ${Binary(is_jalr.asUInt)}, [is_call] ${Binary(is_call.asUInt)}, [is_ret] ${Binary(is_ret.asUInt)} " + 347 // p"[target] ${Hexadecimal(target)}}, [hit] $hit " 348 // } 349 350 def display(cond: Bool): Unit = { 351 XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n") 352 } 353} 354 355@chiselName 356class BranchPredictionBundle(implicit p: Parameters) extends XSBundle with HasBPUConst with BPUUtils{ 357 val pc = UInt(VAddrBits.W) 358 359 val valid = Bool() 360 361 val hasRedirect = Bool() 362 val ftq_idx = new FtqPtr 363 // val hit = Bool() 364 val preds = new BranchPrediction 365 366 // val ghist = new ShiftingGlobalHistory() 367 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 368 val histPtr = new CGHPtr 369 val phist = UInt(PathHistoryLength.W) 370 val rasSp = UInt(log2Ceil(RasSize).W) 371 val rasTop = new RASEntry 372 val specCnt = Vec(numBr, UInt(10.W)) 373 // val meta = UInt(MaxMetaLength.W) 374 375 val ftb_entry = new FTBEntry() // TODO: Send this entry to ftq 376 377 def real_slot_taken_mask(): Vec[Bool] = { 378 VecInit(preds.taken_mask_on_slot.map(_ && preds.hit)) 379 } 380 381 // len numBr 382 def real_br_taken_mask(): Vec[Bool] = { 383 if (shareTailSlot) 384 VecInit( 385 preds.taken_mask_on_slot.map(_ && preds.hit).init :+ 386 (preds.br_taken_mask.last && preds.tail_slot_valid && preds.is_br_sharing && preds.hit) 387 ) 388 else 389 VecInit(real_slot_taken_mask().init) 390 } 391 392 def br_count(): UInt = { 393 val last_valid_idx = PriorityMux( 394 preds.br_valids.reverse :+ true.B, 395 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 396 ) 397 val first_taken_idx = PriorityEncoder(false.B +: real_br_taken_mask) 398 Mux(last_valid_idx < first_taken_idx, 399 last_valid_idx, 400 first_taken_idx 401 ) 402 } 403 404 def hit_taken_on_jmp = 405 !real_slot_taken_mask().init.reduce(_||_) && 406 real_slot_taken_mask().last && !preds.is_br_sharing 407 def hit_taken_on_call = hit_taken_on_jmp && preds.is_call 408 def hit_taken_on_ret = hit_taken_on_jmp && preds.is_ret 409 def hit_taken_on_jalr = hit_taken_on_jmp && preds.is_jalr 410 411 def fallThroughAddr = getFallThroughAddr(pc, ftb_entry.carry, ftb_entry.pftAddr) 412 413 def target(): UInt = { 414 val targetVec = preds.targets :+ fallThroughAddr :+ (pc + (FetchWidth*4).U) 415 val selVec = real_slot_taken_mask() :+ (preds.hit && !real_slot_taken_mask().asUInt.orR) :+ true.B 416 PriorityMux(selVec zip targetVec) 417 } 418 def genCfiIndex = { 419 val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 420 cfiIndex.valid := real_slot_taken_mask().asUInt.orR 421 // when no takens, set cfiIndex to PredictWidth-1 422 cfiIndex.bits := 423 ParallelPriorityMux(real_slot_taken_mask(), ftb_entry.getOffsetVec) | 424 Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt) 425 cfiIndex 426 } 427 428 429 // override def toPrintable: Printable = { 430 // p"-----------BranchPredictionBundle----------- " + 431 // p"[pc] ${Hexadecimal(pc)} " + 432 // p"[ghist] ${Binary(ghist.predHist)} " + 433 // preds.toPrintable + 434 // ftb_entry.toPrintable 435 // } 436 437 def display(cond: Bool): Unit = { 438 XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n") 439 // XSDebug(cond, p"[ghist] ${Binary(ghist.predHist)}\n") 440 folded_hist.display(cond) 441 preds.display(cond) 442 ftb_entry.display(cond) 443 } 444} 445 446@chiselName 447class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst { 448 // val valids = Vec(3, Bool()) 449 val s1 = new BranchPredictionBundle() 450 val s2 = new BranchPredictionBundle() 451 val s3 = new BranchPredictionBundle() 452 453 def selectedResp = 454 PriorityMux(Seq( 455 ((s3.valid && s3.hasRedirect) -> s3), 456 ((s2.valid && s2.hasRedirect) -> s2), 457 (s1.valid -> s1) 458 )) 459 def selectedRespIdx = 460 PriorityMux(Seq( 461 ((s3.valid && s3.hasRedirect) -> BP_S3), 462 ((s2.valid && s2.hasRedirect) -> BP_S2), 463 (s1.valid -> BP_S1) 464 )) 465 def lastStage = s3 466} 467 468class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp with HasBPUConst { 469 val meta = UInt(MaxMetaLength.W) 470} 471 472object BpuToFtqBundle { 473 def apply(resp: BranchPredictionResp)(implicit p: Parameters): BpuToFtqBundle = { 474 val e = Wire(new BpuToFtqBundle()) 475 e.s1 := resp.s1 476 e.s2 := resp.s2 477 e.s3 := resp.s3 478 479 e.meta := DontCare 480 e 481 } 482} 483 484class BranchPredictionUpdate(implicit p: Parameters) extends BranchPredictionBundle with HasBPUConst { 485 val mispred_mask = Vec(numBr+1, Bool()) 486 val false_hit = Bool() 487 val new_br_insert_pos = Vec(numBr, Bool()) 488 val old_entry = Bool() 489 val meta = UInt(MaxMetaLength.W) 490 val full_target = UInt(VAddrBits.W) 491 val ghist = new ShiftingGlobalHistory() // TODO: remove this 492 493 def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = { 494 // ghist := entry.ghist 495 folded_hist := entry.folded_hist 496 histPtr := entry.histPtr 497 phist := entry.phist 498 rasSp := entry.rasSp 499 rasTop := entry.rasEntry 500 specCnt := entry.specCnt 501 this 502 } 503 // override def toPrintable: Printable = { 504 // p"-----------BranchPredictionUpdate----------- " + 505 // p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] ${Binary(false_hit)} " + 506 // p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)} " + 507 // super.toPrintable + 508 // p"\n" 509 // } 510 511 override def display(cond: Bool) = { 512 XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n") 513 XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n") 514 XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n") 515 super.display(cond) 516 XSDebug(cond, p"--------------------------------------------\n") 517 } 518} 519 520class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst { 521 // override def toPrintable: Printable = { 522 // p"-----------BranchPredictionRedirect----------- " + 523 // p"-----------cfiUpdate----------- " + 524 // p"[pc] ${Hexadecimal(cfiUpdate.pc)} " + 525 // p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " + 526 // p"[target] ${Hexadecimal(cfiUpdate.target)} " + 527 // p"------------------------------- " + 528 // p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " + 529 // p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " + 530 // p"[ftqOffset] ${ftqOffset} " + 531 // p"[level] ${level}, [interrupt] ${interrupt} " + 532 // p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " + 533 // p"[stFtqOffset] ${stFtqOffset} " + 534 // p"\n" 535 536 // } 537 538 def display(cond: Bool): Unit = { 539 XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n") 540 XSDebug(cond, p"-----------cfiUpdate----------- \n") 541 XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n") 542 // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n") 543 XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n") 544 XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n") 545 XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n") 546 XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n") 547 XSDebug(cond, p"------------------------------- \n") 548 XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n") 549 XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n") 550 XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n") 551 XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n") 552 XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n") 553 XSDebug(cond, p"---------------------------------------------- \n") 554 } 555} 556