1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.stage.{ChiselGeneratorAnnotation, ChiselStage} 22import chisel3.util._ 23import xiangshan._ 24import utils._ 25import chisel3.experimental.chiselName 26 27import scala.math.min 28import os.copy 29 30 31trait FTBParams extends HasXSParameter with HasBPUConst { 32 val numEntries = FtbSize 33 val numWays = FtbWays 34 val numSets = numEntries/numWays // 512 35 val tagSize = 20 36 37 38 39 val TAR_STAT_SZ = 2 40 def TAR_FIT = 0.U(TAR_STAT_SZ.W) 41 def TAR_OVF = 1.U(TAR_STAT_SZ.W) 42 def TAR_UDF = 2.U(TAR_STAT_SZ.W) 43 44 def BR_OFFSET_LEN = 12 45 def JMP_OFFSET_LEN = 20 46} 47 48class FtbSlot(val offsetLen: Int, val subOffsetLen: Option[Int] = None)(implicit p: Parameters) extends XSBundle with FTBParams { 49 if (subOffsetLen.isDefined) { 50 require(subOffsetLen.get <= offsetLen) 51 } 52 val offset = UInt(log2Ceil(PredictWidth).W) 53 val lower = UInt(offsetLen.W) 54 val tarStat = UInt(TAR_STAT_SZ.W) 55 val sharing = Bool() 56 val valid = Bool() 57 58 def setLowerStatByTarget(pc: UInt, target: UInt, isShare: Boolean) = { 59 def getTargetStatByHigher(pc_higher: UInt, target_higher: UInt) = 60 Mux(target_higher > pc_higher, TAR_OVF, 61 Mux(target_higher < pc_higher, TAR_UDF, TAR_FIT)) 62 def getLowerByTarget(target: UInt, offsetLen: Int) = target(offsetLen, 1) 63 val offLen = if (isShare) this.subOffsetLen.get else this.offsetLen 64 val pc_higher = pc(VAddrBits-1, offLen+1) 65 val target_higher = target(VAddrBits-1, offLen+1) 66 val stat = getTargetStatByHigher(pc_higher, target_higher) 67 val lower = ZeroExt(getLowerByTarget(target, offLen), this.offsetLen) 68 this.lower := lower 69 this.tarStat := stat 70 this.sharing := isShare.B 71 } 72 73 def getTarget(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = { 74 def getTarget(offLen: Int)(pc: UInt, lower: UInt, stat: UInt, 75 last_stage: Option[Tuple2[UInt, Bool]] = None) = { 76 val h = pc(VAddrBits-1, offLen+1) 77 val higher = Wire(UInt((VAddrBits-offLen-1).W)) 78 val higher_plus_one = Wire(UInt((VAddrBits-offLen-1).W)) 79 val higher_minus_one = Wire(UInt((VAddrBits-offLen-1).W)) 80 if (last_stage.isDefined) { 81 val last_stage_pc = last_stage.get._1 82 val last_stage_pc_h = last_stage_pc(VAddrBits-1, offLen+1) 83 val stage_en = last_stage.get._2 84 higher := RegEnable(last_stage_pc_h, stage_en) 85 higher_plus_one := RegEnable(last_stage_pc_h+1.U, stage_en) 86 higher_minus_one := RegEnable(last_stage_pc_h-1.U, stage_en) 87 } else { 88 higher := h 89 higher_plus_one := h + 1.U 90 higher_minus_one := h - 1.U 91 } 92 val target = 93 Cat( 94 Mux1H(Seq( 95 (stat === TAR_OVF, higher_plus_one), 96 (stat === TAR_UDF, higher_minus_one), 97 (stat === TAR_FIT, higher), 98 )), 99 lower(offLen-1, 0), 0.U(1.W) 100 ) 101 require(target.getWidth == VAddrBits) 102 require(offLen != 0) 103 target 104 } 105 if (subOffsetLen.isDefined) 106 Mux(sharing, 107 getTarget(subOffsetLen.get)(pc, lower, tarStat, last_stage), 108 getTarget(offsetLen)(pc, lower, tarStat, last_stage) 109 ) 110 else 111 getTarget(offsetLen)(pc, lower, tarStat, last_stage) 112 } 113 def fromAnotherSlot(that: FtbSlot) = { 114 require( 115 this.offsetLen > that.offsetLen && this.subOffsetLen.map(_ == that.offsetLen).getOrElse(true) || 116 this.offsetLen == that.offsetLen 117 ) 118 this.offset := that.offset 119 this.tarStat := that.tarStat 120 this.sharing := (this.offsetLen > that.offsetLen && that.offsetLen == this.subOffsetLen.get).B 121 this.valid := that.valid 122 this.lower := ZeroExt(that.lower, this.offsetLen) 123 } 124 125} 126 127class FTBEntry(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils { 128 129 130 val valid = Bool() 131 132 val brSlots = Vec(numBrSlot, new FtbSlot(BR_OFFSET_LEN)) 133 134 val tailSlot = new FtbSlot(JMP_OFFSET_LEN, Some(BR_OFFSET_LEN)) 135 136 // Partial Fall-Through Address 137 val pftAddr = UInt(log2Up(PredictWidth).W) 138 val carry = Bool() 139 140 val isCall = Bool() 141 val isRet = Bool() 142 val isJalr = Bool() 143 144 val last_may_be_rvi_call = Bool() 145 146 val always_taken = Vec(numBr, Bool()) 147 148 def getSlotForBr(idx: Int): FtbSlot = { 149 require(idx <= numBr-1) 150 (idx, numBr) match { 151 case (i, n) if i == n-1 => this.tailSlot 152 case _ => this.brSlots(idx) 153 } 154 } 155 def allSlotsForBr = { 156 (0 until numBr).map(getSlotForBr(_)) 157 } 158 def setByBrTarget(brIdx: Int, pc: UInt, target: UInt) = { 159 val slot = getSlotForBr(brIdx) 160 slot.setLowerStatByTarget(pc, target, brIdx == numBr-1) 161 } 162 def setByJmpTarget(pc: UInt, target: UInt) = { 163 this.tailSlot.setLowerStatByTarget(pc, target, false) 164 } 165 166 def getTargetVec(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = { 167 VecInit((brSlots :+ tailSlot).map(_.getTarget(pc, last_stage))) 168 } 169 170 def getOffsetVec = VecInit(brSlots.map(_.offset) :+ tailSlot.offset) 171 def isJal = !isJalr 172 def getFallThrough(pc: UInt) = getFallThroughAddr(pc, carry, pftAddr) 173 def hasBr(offset: UInt) = 174 brSlots.map{ s => s.valid && s.offset <= offset}.reduce(_||_) || 175 (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing) 176 177 def getBrMaskByOffset(offset: UInt) = 178 brSlots.map{ s => s.valid && s.offset <= offset } :+ 179 (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing) 180 181 def getBrRecordedVec(offset: UInt) = { 182 VecInit( 183 brSlots.map(s => s.valid && s.offset === offset) :+ 184 (tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing) 185 ) 186 } 187 188 def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_) 189 190 def brValids = { 191 VecInit( 192 brSlots.map(_.valid) :+ (tailSlot.valid && tailSlot.sharing) 193 ) 194 } 195 196 def noEmptySlotForNewBr = { 197 VecInit(brSlots.map(_.valid) :+ tailSlot.valid).reduce(_&&_) 198 } 199 200 def newBrCanNotInsert(offset: UInt) = { 201 val lastSlotForBr = tailSlot 202 lastSlotForBr.valid && lastSlotForBr.offset < offset 203 } 204 205 def jmpValid = { 206 tailSlot.valid && !tailSlot.sharing 207 } 208 209 def brOffset = { 210 VecInit(brSlots.map(_.offset) :+ tailSlot.offset) 211 } 212 213 def display(cond: Bool): Unit = { 214 XSDebug(cond, p"-----------FTB entry----------- \n") 215 XSDebug(cond, p"v=${valid}\n") 216 for(i <- 0 until numBr) { 217 XSDebug(cond, p"[br$i]: v=${allSlotsForBr(i).valid}, offset=${allSlotsForBr(i).offset}," + 218 p"lower=${Hexadecimal(allSlotsForBr(i).lower)}\n") 219 } 220 XSDebug(cond, p"[tailSlot]: v=${tailSlot.valid}, offset=${tailSlot.offset}," + 221 p"lower=${Hexadecimal(tailSlot.lower)}, sharing=${tailSlot.sharing}}\n") 222 XSDebug(cond, p"pftAddr=${Hexadecimal(pftAddr)}, carry=$carry\n") 223 XSDebug(cond, p"isCall=$isCall, isRet=$isRet, isjalr=$isJalr\n") 224 XSDebug(cond, p"last_may_be_rvi_call=$last_may_be_rvi_call\n") 225 XSDebug(cond, p"------------------------------- \n") 226 } 227 228} 229 230class FTBEntryWithTag(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils { 231 val entry = new FTBEntry 232 val tag = UInt(tagSize.W) 233 def display(cond: Bool): Unit = { 234 entry.display(cond) 235 XSDebug(cond, p"tag is ${Hexadecimal(tag)}\n------------------------------- \n") 236 } 237} 238 239class FTBMeta(implicit p: Parameters) extends XSBundle with FTBParams { 240 val writeWay = UInt(log2Ceil(numWays).W) 241 val hit = Bool() 242 val pred_cycle = if (!env.FPGAPlatform) Some(UInt(64.W)) else None 243} 244 245object FTBMeta { 246 def apply(writeWay: UInt, hit: Bool, pred_cycle: UInt)(implicit p: Parameters): FTBMeta = { 247 val e = Wire(new FTBMeta) 248 e.writeWay := writeWay 249 e.hit := hit 250 e.pred_cycle.map(_ := pred_cycle) 251 e 252 } 253} 254 255// class UpdateQueueEntry(implicit p: Parameters) extends XSBundle with FTBParams { 256// val pc = UInt(VAddrBits.W) 257// val ftb_entry = new FTBEntry 258// val hit = Bool() 259// val hit_way = UInt(log2Ceil(numWays).W) 260// } 261// 262// object UpdateQueueEntry { 263// def apply(pc: UInt, fe: FTBEntry, hit: Bool, hit_way: UInt)(implicit p: Parameters): UpdateQueueEntry = { 264// val e = Wire(new UpdateQueueEntry) 265// e.pc := pc 266// e.ftb_entry := fe 267// e.hit := hit 268// e.hit_way := hit_way 269// e 270// } 271// } 272 273class FTB(implicit p: Parameters) extends BasePredictor with FTBParams with BPUUtils 274 with HasCircularQueuePtrHelper with HasPerfEvents { 275 override val meta_size = WireInit(0.U.asTypeOf(new FTBMeta)).getWidth 276 277 val ftbAddr = new TableAddr(log2Up(numSets), 1) 278 279 class FTBBank(val numSets: Int, val nWays: Int) extends XSModule with BPUUtils { 280 val io = IO(new Bundle { 281 val s1_fire = Input(Bool()) 282 283 // when ftb hit, read_hits.valid is true, and read_hits.bits is OH of hit way 284 // when ftb not hit, read_hits.valid is false, and read_hits is OH of allocWay 285 // val read_hits = Valid(Vec(numWays, Bool())) 286 val req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W))) 287 val read_resp = Output(new FTBEntry) 288 val read_hits = Valid(UInt(log2Ceil(numWays).W)) 289 290 val u_req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W))) 291 val update_hits = Valid(UInt(log2Ceil(numWays).W)) 292 val update_access = Input(Bool()) 293 294 val update_pc = Input(UInt(VAddrBits.W)) 295 val update_write_data = Flipped(Valid(new FTBEntryWithTag)) 296 val update_write_way = Input(UInt(log2Ceil(numWays).W)) 297 val update_write_alloc = Input(Bool()) 298 299 val try_to_write_way = Flipped(Valid(UInt(log2Ceil(numWays).W))) 300 val try_to_write_pc = Input(UInt(VAddrBits.W)) 301 }) 302 303 // Extract holdRead logic to fix bug that update read override predict read result 304 val ftb = Module(new SRAMTemplate(new FTBEntryWithTag, set = numSets, way = numWays, shouldReset = true, holdRead = false, singlePort = true)) 305 val ftb_r_entries = ftb.io.r.resp.data.map(_.entry) 306 307 val pred_rdata = HoldUnless(ftb.io.r.resp.data, RegNext(io.req_pc.valid && !io.update_access)) 308 ftb.io.r.req.valid := io.req_pc.valid || io.u_req_pc.valid // io.s0_fire 309 ftb.io.r.req.bits.setIdx := Mux(io.u_req_pc.valid, ftbAddr.getIdx(io.u_req_pc.bits), ftbAddr.getIdx(io.req_pc.bits)) // s0_idx 310 311 assert(!(io.req_pc.valid && io.u_req_pc.valid)) 312 313 io.req_pc.ready := ftb.io.r.req.ready 314 io.u_req_pc.ready := ftb.io.r.req.ready 315 316 val req_tag = RegEnable(ftbAddr.getTag(io.req_pc.bits)(tagSize-1, 0), io.req_pc.valid) 317 val req_idx = RegEnable(ftbAddr.getIdx(io.req_pc.bits), io.req_pc.valid) 318 319 val u_req_tag = RegEnable(ftbAddr.getTag(io.u_req_pc.bits)(tagSize-1, 0), io.u_req_pc.valid) 320 321 val read_entries = pred_rdata.map(_.entry) 322 val read_tags = pred_rdata.map(_.tag) 323 324 val total_hits = VecInit((0 until numWays).map(b => read_tags(b) === req_tag && read_entries(b).valid && io.s1_fire)) 325 val hit = total_hits.reduce(_||_) 326 // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits)) 327 val hit_way = OHToUInt(total_hits) 328 329 val u_total_hits = VecInit((0 until numWays).map(b => 330 ftb.io.r.resp.data(b).tag === u_req_tag && ftb.io.r.resp.data(b).entry.valid && RegNext(io.update_access))) 331 val u_hit = u_total_hits.reduce(_||_) 332 // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits)) 333 val u_hit_way = OHToUInt(u_total_hits) 334 335 // assert(PopCount(total_hits) === 1.U || PopCount(total_hits) === 0.U) 336 // assert(PopCount(u_total_hits) === 1.U || PopCount(u_total_hits) === 0.U) 337 for (n <- 1 to numWays) { 338 XSPerfAccumulate(f"ftb_pred_${n}_way_hit", PopCount(total_hits) === n.U) 339 XSPerfAccumulate(f"ftb_update_${n}_way_hit", PopCount(u_total_hits) === n.U) 340 } 341 342 val replacer = ReplacementPolicy.fromString(Some("setplru"), numWays, numSets) 343 // val allocWriteWay = replacer.way(req_idx) 344 345 val touch_set = Seq.fill(1)(Wire(UInt(log2Ceil(numSets).W))) 346 val touch_way = Seq.fill(1)(Wire(Valid(UInt(log2Ceil(numWays).W)))) 347 348 val write_set = Wire(UInt(log2Ceil(numSets).W)) 349 val write_way = Wire(Valid(UInt(log2Ceil(numWays).W))) 350 351 val read_set = Wire(UInt(log2Ceil(numSets).W)) 352 val read_way = Wire(Valid(UInt(log2Ceil(numWays).W))) 353 354 read_set := req_idx 355 read_way.valid := hit 356 read_way.bits := hit_way 357 358 touch_set(0) := Mux(write_way.valid, write_set, read_set) 359 360 touch_way(0).valid := write_way.valid || read_way.valid 361 touch_way(0).bits := Mux(write_way.valid, write_way.bits, read_way.bits) 362 363 replacer.access(touch_set, touch_way) 364 365 // def allocWay(valids: UInt, meta_tags: UInt, req_tag: UInt) = { 366 // val randomAlloc = false 367 // if (numWays > 1) { 368 // val w = Wire(UInt(log2Up(numWays).W)) 369 // val valid = WireInit(valids.andR) 370 // val tags = Cat(meta_tags, req_tag) 371 // val l = log2Up(numWays) 372 // val nChunks = (tags.getWidth + l - 1) / l 373 // val chunks = (0 until nChunks).map( i => 374 // tags(min((i+1)*l, tags.getWidth)-1, i*l) 375 // ) 376 // w := Mux(valid, if (randomAlloc) {LFSR64()(log2Up(numWays)-1,0)} else {chunks.reduce(_^_)}, PriorityEncoder(~valids)) 377 // w 378 // } else { 379 // val w = WireInit(0.U) 380 // w 381 // } 382 // } 383 384 // val allocWriteWay = allocWay( 385 // VecInit(read_entries.map(_.valid)).asUInt, 386 // VecInit(read_tags).asUInt, 387 // req_tag 388 // ) 389 390 def allocWay(valids: UInt, idx: UInt) = { 391 if (numWays > 1) { 392 val w = Wire(UInt(log2Up(numWays).W)) 393 val valid = WireInit(valids.andR) 394 w := Mux(valid, replacer.way(idx), PriorityEncoder(~valids)) 395 w 396 }else { 397 val w = WireInit(0.U) 398 w 399 } 400 } 401 402 io.read_resp := Mux1H(total_hits, read_entries) // Mux1H 403 io.read_hits.valid := hit 404 // io.read_hits.bits := Mux(hit, hit_way_1h, VecInit(UIntToOH(allocWriteWay).asBools())) 405 io.read_hits.bits := hit_way 406 407 io.update_hits.valid := u_hit 408 io.update_hits.bits := u_hit_way 409 410 // XSDebug(!hit, "FTB not hit, alloc a way: %d\n", allocWriteWay) 411 412 // Update logic 413 val u_valid = io.update_write_data.valid 414 val u_data = io.update_write_data.bits 415 val u_idx = ftbAddr.getIdx(io.update_pc) 416 val allocWriteWay = allocWay(VecInit(ftb_r_entries.map(_.valid)).asUInt, u_idx) 417 val u_mask = UIntToOH(Mux(io.update_write_alloc, allocWriteWay, io.update_write_way)) 418 419 for (i <- 0 until numWays) { 420 XSPerfAccumulate(f"ftb_replace_way$i", u_valid && io.update_write_alloc && OHToUInt(u_mask) === i.U) 421 XSPerfAccumulate(f"ftb_replace_way${i}_has_empty", u_valid && io.update_write_alloc && !ftb_r_entries.map(_.valid).reduce(_&&_) && OHToUInt(u_mask) === i.U) 422 XSPerfAccumulate(f"ftb_hit_way$i", hit && !io.update_access && hit_way === i.U) 423 } 424 425 ftb.io.w.apply(u_valid, u_data, u_idx, u_mask) 426 427 // for replacer 428 write_set := Mux(u_valid, u_idx, ftbAddr.getIdx(io.try_to_write_pc)) 429 write_way.valid := u_valid || io.try_to_write_way.valid 430 write_way.bits := Mux(u_valid, 431 Mux(io.update_write_alloc, allocWriteWay, io.update_write_way), 432 io.try_to_write_way.bits 433 ) 434 435 // print hit entry info 436 Mux1H(total_hits, ftb.io.r.resp.data).display(true.B) 437 } // FTBBank 438 439 val ftbBank = Module(new FTBBank(numSets, numWays)) 440 441 ftbBank.io.req_pc.valid := io.s0_fire 442 ftbBank.io.req_pc.bits := s0_pc 443 444 val ftb_entry = RegEnable(ftbBank.io.read_resp, io.s1_fire) 445 val s3_ftb_entry = RegEnable(ftb_entry, io.s2_fire) 446 val s1_hit = ftbBank.io.read_hits.valid && io.ctrl.btb_enable 447 val s2_hit = RegEnable(s1_hit, io.s1_fire) 448 val s3_hit = RegEnable(s2_hit, io.s2_fire) 449 val writeWay = ftbBank.io.read_hits.bits 450 451 val fallThruAddr = getFallThroughAddr(s2_pc, ftb_entry.carry, ftb_entry.pftAddr) 452 453 // io.out.bits.resp := RegEnable(io.in.bits.resp_in(0), 0.U.asTypeOf(new BranchPredictionResp), io.s1_fire) 454 io.out.resp := io.in.bits.resp_in(0) 455 456 val s1_latch_call_is_rvc = DontCare // TODO: modify when add RAS 457 458 io.out.resp.s2.full_pred.hit := s2_hit 459 io.out.resp.s2.pc := s2_pc 460 io.out.resp.s2.ftb_entry := ftb_entry 461 io.out.resp.s2.full_pred.fromFtbEntry(ftb_entry, s2_pc, Some((s1_pc, io.s1_fire))) 462 io.out.resp.s2.is_minimal := false.B 463 464 io.out.resp.s3.full_pred.hit := s3_hit 465 io.out.resp.s3.pc := s3_pc 466 io.out.resp.s3.ftb_entry := s3_ftb_entry 467 io.out.resp.s3.full_pred.fromFtbEntry(s3_ftb_entry, s3_pc, Some((s2_pc, io.s2_fire))) 468 io.out.resp.s3.is_minimal := false.B 469 470 io.out.last_stage_meta := RegEnable(RegEnable(FTBMeta(writeWay.asUInt(), s1_hit, GTimer()).asUInt(), io.s1_fire), io.s2_fire) 471 472 // always taken logic 473 for (i <- 0 until numBr) { 474 io.out.resp.s2.full_pred.br_taken_mask(i) := io.in.bits.resp_in(0).s2.full_pred.br_taken_mask(i) || s2_hit && ftb_entry.always_taken(i) 475 io.out.resp.s3.full_pred.br_taken_mask(i) := io.in.bits.resp_in(0).s3.full_pred.br_taken_mask(i) || s3_hit && s3_ftb_entry.always_taken(i) 476 } 477 478 // Update logic 479 val update = RegNext(io.update.bits) 480 481 // val update_queue = Mem(64, new UpdateQueueEntry) 482 // val head, tail = RegInit(UpdateQueuePtr(false.B, 0.U)) 483 // val u_queue = Module(new Queue(new UpdateQueueEntry, entries = 64, flow = true)) 484 // assert(u_queue.io.count < 64.U) 485 486 val u_meta = update.meta.asTypeOf(new FTBMeta) 487 val u_valid = RegNext(io.update.valid && !io.update.bits.old_entry) 488 489 // io.s1_ready := ftbBank.io.req_pc.ready && u_queue.io.count === 0.U && !u_valid 490 io.s1_ready := ftbBank.io.req_pc.ready && !(u_valid && !u_meta.hit) 491 492 // val update_now = u_queue.io.deq.fire && u_queue.io.deq.bits.hit 493 val update_now = u_valid && u_meta.hit 494 495 ftbBank.io.u_req_pc.valid := u_valid && !u_meta.hit 496 ftbBank.io.u_req_pc.bits := update.pc 497 498 // assert(!(u_valid && RegNext(u_valid) && update.pc === RegNext(update.pc))) 499 // assert(!(u_valid && RegNext(u_valid))) 500 501 // val u_way = u_queue.io.deq.bits.hit_way 502 503 val ftb_write = Wire(new FTBEntryWithTag) 504 // ftb_write.entry := Mux(update_now, u_queue.io.deq.bits.ftb_entry, RegNext(u_queue.io.deq.bits.ftb_entry)) 505 // ftb_write.tag := ftbAddr.getTag(Mux(update_now, u_queue.io.deq.bits.pc, RegNext(u_queue.io.deq.bits.pc)))(tagSize-1, 0) 506 ftb_write.entry := Mux(update_now, update.ftb_entry, RegNext(update.ftb_entry)) 507 ftb_write.tag := ftbAddr.getTag(Mux(update_now, update.pc, RegNext(update.pc)))(tagSize-1, 0) 508 509 // val write_valid = update_now || RegNext(u_queue.io.deq.fire && !u_queue.io.deq.bits.hit) 510 val write_valid = update_now || RegNext(u_valid && !u_meta.hit) 511 512 // u_queue.io.enq.valid := u_valid 513 // u_queue.io.enq.bits := UpdateQueueEntry(update.pc, update.ftb_entry, u_meta.hit, u_meta.writeWay) 514 // u_queue.io.deq.ready := RegNext(!u_queue.io.deq.fire || update_now) 515 516 ftbBank.io.update_write_data.valid := write_valid 517 ftbBank.io.update_write_data.bits := ftb_write 518 // ftbBank.io.update_pc := Mux(update_now, u_queue.io.deq.bits.pc, RegNext(u_queue.io.deq.bits.pc)) 519 ftbBank.io.update_pc := Mux(update_now, update.pc, RegNext(update.pc)) 520 ftbBank.io.update_write_way := Mux(update_now, u_meta.writeWay, ftbBank.io.update_hits.bits) 521 // ftbBank.io.update_write_alloc := Mux(update_now, !u_queue.io.deq.bits.hit, !ftbBank.io.update_hits.valid) 522 ftbBank.io.update_write_alloc := Mux(update_now, false.B, !ftbBank.io.update_hits.valid) 523 ftbBank.io.update_access := u_valid && !u_meta.hit 524 ftbBank.io.s1_fire := io.s1_fire 525 526 // for replacer 527 ftbBank.io.try_to_write_way.valid := RegNext(io.update.valid) && u_meta.hit 528 ftbBank.io.try_to_write_way.bits := u_meta.writeWay 529 ftbBank.io.try_to_write_pc := update.pc 530 531 XSDebug("req_v=%b, req_pc=%x, ready=%b (resp at next cycle)\n", io.s0_fire, s0_pc, ftbBank.io.req_pc.ready) 532 XSDebug("s2_hit=%b, hit_way=%b\n", s2_hit, writeWay.asUInt) 533 XSDebug("s2_br_taken_mask=%b, s2_real_taken_mask=%b\n", 534 io.in.bits.resp_in(0).s2.full_pred.br_taken_mask.asUInt, io.out.resp.s2.full_pred.real_slot_taken_mask().asUInt) 535 XSDebug("s2_target=%x\n", io.out.resp.s2.getTarget) 536 537 ftb_entry.display(true.B) 538 539 XSPerfAccumulate("ftb_read_hits", RegNext(io.s0_fire) && s1_hit) 540 XSPerfAccumulate("ftb_read_misses", RegNext(io.s0_fire) && !s1_hit) 541 542 XSPerfAccumulate("ftb_commit_hits", RegNext(io.update.valid) && u_meta.hit) 543 XSPerfAccumulate("ftb_commit_misses", RegNext(io.update.valid) && !u_meta.hit) 544 545 XSPerfAccumulate("ftb_update_req", io.update.valid) 546 XSPerfAccumulate("ftb_update_ignored", io.update.valid && io.update.bits.old_entry) 547 XSPerfAccumulate("ftb_updated", u_valid) 548 549 override val perfEvents = Seq( 550 ("ftb_commit_hits ", RegNext(io.update.valid) && u_meta.hit), 551 ("ftb_commit_misses ", RegNext(io.update.valid) && !u_meta.hit), 552 ) 553 generatePerfEvent() 554} 555