xref: /XiangShan/src/main/scala/xiangshan/frontend/FTB.scala (revision b30c10d68f6c89b2a5fe6a41bcfed69865117e9e)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.stage.{ChiselGeneratorAnnotation, ChiselStage}
22import chisel3.util._
23import xiangshan._
24import utils._
25import chisel3.experimental.chiselName
26
27import scala.math.min
28import os.copy
29
30
31trait FTBParams extends HasXSParameter with HasBPUConst {
32  val numEntries = 2048
33  val numWays    = 4
34  val numSets    = numEntries/numWays // 512
35  val tagSize    = 20
36
37
38
39  val TAR_STAT_SZ = 2
40  def TAR_FIT = 0.U(TAR_STAT_SZ.W)
41  def TAR_OVF = 1.U(TAR_STAT_SZ.W)
42  def TAR_UDF = 2.U(TAR_STAT_SZ.W)
43
44  def BR_OFFSET_LEN = 12
45  def JMP_OFFSET_LEN = 20
46}
47
48class FtbSlot(val offsetLen: Int, val subOffsetLen: Option[Int] = None)(implicit p: Parameters) extends XSBundle with FTBParams {
49  if (subOffsetLen.isDefined) {
50    require(subOffsetLen.get <= offsetLen)
51  }
52  val offset  = UInt(log2Ceil(PredictWidth).W)
53  val lower   = UInt(offsetLen.W)
54  val tarStat = UInt(TAR_STAT_SZ.W)
55  val sharing = Bool()
56  val valid   = Bool()
57
58  def setLowerStatByTarget(pc: UInt, target: UInt, isShare: Boolean) = {
59    def getTargetStatByHigher(pc_higher: UInt, target_higher: UInt) =
60      Mux(target_higher > pc_higher, TAR_OVF,
61        Mux(target_higher < pc_higher, TAR_UDF, TAR_FIT))
62    def getLowerByTarget(target: UInt, offsetLen: Int) = target(offsetLen, 1)
63    val offLen = if (isShare) this.subOffsetLen.get else this.offsetLen
64    val pc_higher = pc(VAddrBits-1, offLen+1)
65    val target_higher = target(VAddrBits-1, offLen+1)
66    val stat = getTargetStatByHigher(pc_higher, target_higher)
67    val lower = ZeroExt(getLowerByTarget(target, offLen), this.offsetLen)
68    this.lower := lower
69    this.tarStat := stat
70    this.sharing := isShare.B
71  }
72
73  def getTarget(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
74    def getTarget(offLen: Int)(pc: UInt, lower: UInt, stat: UInt,
75      last_stage: Option[Tuple2[UInt, Bool]] = None) = {
76      val h = pc(VAddrBits-1, offLen+1)
77      val higher = Wire(UInt((VAddrBits-offLen-1).W))
78      val higher_plus_one = Wire(UInt((VAddrBits-offLen-1).W))
79      val higher_minus_one = Wire(UInt((VAddrBits-offLen-1).W))
80      if (last_stage.isDefined) {
81        val last_stage_pc = last_stage.get._1
82        val last_stage_pc_h = last_stage_pc(VAddrBits-1, offLen+1)
83        val stage_en = last_stage.get._2
84        higher := RegEnable(last_stage_pc_h, stage_en)
85        higher_plus_one := RegEnable(last_stage_pc_h+1.U, stage_en)
86        higher_minus_one := RegEnable(last_stage_pc_h-1.U, stage_en)
87      } else {
88        higher := h
89        higher_plus_one := h + 1.U
90        higher_minus_one := h - 1.U
91      }
92      val target =
93        Cat(
94          Mux1H(Seq(
95            (stat === TAR_OVF, higher_plus_one),
96            (stat === TAR_UDF, higher_minus_one),
97            (stat === TAR_FIT, higher),
98          )),
99          lower(offLen-1, 0), 0.U(1.W)
100        )
101      require(target.getWidth == VAddrBits)
102      require(offLen != 0)
103      target
104    }
105    if (subOffsetLen.isDefined)
106      Mux(sharing,
107        getTarget(subOffsetLen.get)(pc, lower, tarStat, last_stage),
108        getTarget(offsetLen)(pc, lower, tarStat, last_stage)
109      )
110    else
111      getTarget(offsetLen)(pc, lower, tarStat, last_stage)
112  }
113  def fromAnotherSlot(that: FtbSlot) = {
114    require(
115      this.offsetLen > that.offsetLen && this.subOffsetLen.map(_ == that.offsetLen).getOrElse(true) ||
116      this.offsetLen == that.offsetLen
117    )
118    this.offset := that.offset
119    this.tarStat := that.tarStat
120    this.sharing := (this.offsetLen > that.offsetLen && that.offsetLen == this.subOffsetLen.get).B
121    this.valid := that.valid
122    this.lower := ZeroExt(that.lower, this.offsetLen)
123  }
124
125}
126
127class FTBEntry(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils {
128
129
130  val valid       = Bool()
131
132  val brSlots = Vec(numBrSlot, new FtbSlot(BR_OFFSET_LEN))
133
134  // if shareTailSlot is set, this slot can hold a branch or a jal/jalr
135  // else this slot holds only jal/jalr
136  val tailSlot = new FtbSlot(JMP_OFFSET_LEN, Some(BR_OFFSET_LEN))
137
138  // Partial Fall-Through Address
139  val pftAddr     = UInt((log2Up(PredictWidth)+1).W)
140  val carry       = Bool()
141
142  val isCall      = Bool()
143  val isRet       = Bool()
144  val isJalr      = Bool()
145
146  //
147  val oversize    = Bool()
148
149  val last_is_rvc = Bool()
150
151  val always_taken = Vec(numBr, Bool())
152
153  def getSlotForBr(idx: Int): FtbSlot = {
154    require(
155      idx < numBr-1 || idx == numBr-1 && !shareTailSlot ||
156      idx == numBr-1 && shareTailSlot
157    )
158    (idx, numBr, shareTailSlot) match {
159      case (i, n, true) if i == n-1 => this.tailSlot
160      case _ => this.brSlots(idx)
161    }
162  }
163  def allSlotsForBr = {
164    (0 until numBr).map(getSlotForBr(_))
165  }
166  def setByBrTarget(brIdx: Int, pc: UInt, target: UInt) = {
167    val slot = getSlotForBr(brIdx)
168    slot.setLowerStatByTarget(pc, target, shareTailSlot && brIdx == numBr-1)
169  }
170  def setByJmpTarget(pc: UInt, target: UInt) = {
171    this.tailSlot.setLowerStatByTarget(pc, target, false)
172  }
173
174  def getTargetVec(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
175    VecInit((brSlots :+ tailSlot).map(_.getTarget(pc, last_stage)))
176  }
177
178  def getOffsetVec = VecInit(brSlots.map(_.offset) :+ tailSlot.offset)
179  def isJal = !isJalr
180  def getFallThrough(pc: UInt) = getFallThroughAddr(pc, carry, pftAddr)
181  def hasBr(offset: UInt) =
182    brSlots.map{ s => s.valid && s.offset <= offset}.reduce(_||_) ||
183    (shareTailSlot.B && tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
184
185  def getBrMaskByOffset(offset: UInt) =
186    brSlots.map{ s => s.valid && s.offset <= offset } ++
187    (if (shareTailSlot) Seq(tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing) else Nil)
188
189  def getBrRecordedVec(offset: UInt) = {
190    VecInit(
191      brSlots.map(s => s.valid && s.offset === offset) ++
192      (if (shareTailSlot) Seq(tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing) else Nil)
193    )
194  }
195
196  def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_)
197
198  def brValids = {
199    VecInit(
200      brSlots.map(_.valid) ++
201      (if (shareTailSlot) Seq(tailSlot.valid && tailSlot.sharing) else Nil)
202    )
203  }
204
205  def noEmptySlotForNewBr = {
206    VecInit(
207      brSlots.map(_.valid) ++
208      (if (shareTailSlot) Seq(tailSlot.valid) else Nil)
209    ).reduce(_&&_)
210  }
211
212  def newBrCanNotInsert(offset: UInt) = {
213    val lastSlotForBr = if (shareTailSlot) tailSlot else brSlots.last
214    lastSlotForBr.valid && lastSlotForBr.offset < offset
215  }
216
217  def jmpValid = {
218    tailSlot.valid && (!shareTailSlot.B || !tailSlot.sharing)
219  }
220
221  def brOffset = {
222    VecInit(
223      brSlots.map(_.offset) ++
224      (if (shareTailSlot) Seq(tailSlot.offset) else Nil)
225    )
226  }
227
228  def display(cond: Bool): Unit = {
229    XSDebug(cond, p"-----------FTB entry----------- \n")
230    XSDebug(cond, p"v=${valid}\n")
231    for(i <- 0 until numBr) {
232      XSDebug(cond, p"[br$i]: v=${allSlotsForBr(i).valid}, offset=${allSlotsForBr(i).offset}," +
233        p"lower=${Hexadecimal(allSlotsForBr(i).lower)}\n")
234    }
235    XSDebug(cond, p"[tailSlot]: v=${tailSlot.valid}, offset=${tailSlot.offset}," +
236      p"lower=${Hexadecimal(tailSlot.lower)}, sharing=${tailSlot.sharing}}\n")
237    XSDebug(cond, p"pftAddr=${Hexadecimal(pftAddr)}, carry=$carry\n")
238    XSDebug(cond, p"isCall=$isCall, isRet=$isRet, isjalr=$isJalr\n")
239    XSDebug(cond, p"oversize=$oversize, last_is_rvc=$last_is_rvc\n")
240    XSDebug(cond, p"------------------------------- \n")
241  }
242
243}
244
245class FTBEntryWithTag(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils {
246  val entry = new FTBEntry
247  val tag = UInt(tagSize.W)
248  def display(cond: Bool): Unit = {
249    entry.display(cond)
250    XSDebug(cond, p"tag is ${Hexadecimal(tag)}\n------------------------------- \n")
251  }
252}
253
254class FTBMeta(implicit p: Parameters) extends XSBundle with FTBParams {
255  val writeWay = UInt(log2Ceil(numWays).W)
256  val hit = Bool()
257  val pred_cycle = if (!env.FPGAPlatform) Some(UInt(64.W)) else None
258}
259
260object FTBMeta {
261  def apply(writeWay: UInt, hit: Bool, pred_cycle: UInt)(implicit p: Parameters): FTBMeta = {
262    val e = Wire(new FTBMeta)
263    e.writeWay := writeWay
264    e.hit := hit
265    e.pred_cycle.map(_ := pred_cycle)
266    e
267  }
268}
269
270// class UpdateQueueEntry(implicit p: Parameters) extends XSBundle with FTBParams {
271//   val pc = UInt(VAddrBits.W)
272//   val ftb_entry = new FTBEntry
273//   val hit = Bool()
274//   val hit_way = UInt(log2Ceil(numWays).W)
275// }
276//
277// object UpdateQueueEntry {
278//   def apply(pc: UInt, fe: FTBEntry, hit: Bool, hit_way: UInt)(implicit p: Parameters): UpdateQueueEntry = {
279//     val e = Wire(new UpdateQueueEntry)
280//     e.pc := pc
281//     e.ftb_entry := fe
282//     e.hit := hit
283//     e.hit_way := hit_way
284//     e
285//   }
286// }
287
288class FTB(implicit p: Parameters) extends BasePredictor with FTBParams with BPUUtils with HasCircularQueuePtrHelper {
289  override val meta_size = WireInit(0.U.asTypeOf(new FTBMeta)).getWidth
290
291  val ftbAddr = new TableAddr(log2Up(numSets), 1)
292
293  class FTBBank(val numSets: Int, val nWays: Int) extends XSModule with BPUUtils {
294    val io = IO(new Bundle {
295      val s1_fire = Input(Bool())
296
297      // when ftb hit, read_hits.valid is true, and read_hits.bits is OH of hit way
298      // when ftb not hit, read_hits.valid is false, and read_hits is OH of allocWay
299      // val read_hits = Valid(Vec(numWays, Bool()))
300      val req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W)))
301      val read_resp = Output(new FTBEntry)
302      val read_hits = Valid(UInt(log2Ceil(numWays).W))
303
304      val u_req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W)))
305      val update_hits = Valid(UInt(log2Ceil(numWays).W))
306      val update_access = Input(Bool())
307
308      val update_pc = Input(UInt(VAddrBits.W))
309      val update_write_data = Flipped(Valid(new FTBEntryWithTag))
310      val update_write_way = Input(UInt(log2Ceil(numWays).W))
311      val update_write_alloc = Input(Bool())
312    })
313
314    // Extract holdRead logic to fix bug that update read override predict read result
315    val ftb = Module(new SRAMTemplate(new FTBEntryWithTag, set = numSets, way = numWays, shouldReset = true, holdRead = false, singlePort = true))
316
317    val pred_rdata   = HoldUnless(ftb.io.r.resp.data, RegNext(io.req_pc.valid && !io.update_access))
318    ftb.io.r.req.valid := io.req_pc.valid || io.u_req_pc.valid // io.s0_fire
319    ftb.io.r.req.bits.setIdx := Mux(io.u_req_pc.valid, ftbAddr.getIdx(io.u_req_pc.bits), ftbAddr.getIdx(io.req_pc.bits)) // s0_idx
320
321    assert(!(io.req_pc.valid && io.u_req_pc.valid))
322
323    io.req_pc.ready := ftb.io.r.req.ready
324    io.u_req_pc.ready := ftb.io.r.req.ready
325
326    val req_tag = RegEnable(ftbAddr.getTag(io.req_pc.bits)(tagSize-1, 0), io.req_pc.valid)
327    val req_idx = RegEnable(ftbAddr.getIdx(io.req_pc.bits), io.req_pc.valid)
328
329    val u_req_tag = RegEnable(ftbAddr.getTag(io.u_req_pc.bits)(tagSize-1, 0), io.u_req_pc.valid)
330
331    val read_entries = pred_rdata.map(_.entry)
332    val read_tags    = pred_rdata.map(_.tag)
333
334    val total_hits = VecInit((0 until numWays).map(b => read_tags(b) === req_tag && read_entries(b).valid && io.s1_fire))
335    val hit = total_hits.reduce(_||_)
336    // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits))
337    val hit_way = OHToUInt(total_hits)
338
339    val u_total_hits = VecInit((0 until numWays).map(b =>
340        ftb.io.r.resp.data(b).tag === u_req_tag && ftb.io.r.resp.data(b).entry.valid && RegNext(io.update_access)))
341    val u_hit = u_total_hits.reduce(_||_)
342    // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits))
343    val u_hit_way = OHToUInt(u_total_hits)
344
345    assert(PopCount(total_hits) === 1.U || PopCount(total_hits) === 0.U)
346    assert(PopCount(u_total_hits) === 1.U || PopCount(u_total_hits) === 0.U)
347
348    val replacer = ReplacementPolicy.fromString(Some("setplru"), numWays, numSets)
349    // val allocWriteWay = replacer.way(req_idx)
350
351    val touch_set = Seq.fill(1)(Wire(UInt(log2Ceil(numSets).W)))
352    val touch_way = Seq.fill(1)(Wire(Valid(UInt(log2Ceil(numWays).W))))
353
354    touch_set(0) := req_idx
355
356    touch_way(0).valid := hit
357    touch_way(0).bits := hit_way
358
359    replacer.access(touch_set, touch_way)
360
361    // def allocWay(valids: UInt, meta_tags: UInt, req_tag: UInt) = {
362    //   val randomAlloc = false
363    //   if (numWays > 1) {
364    //     val w = Wire(UInt(log2Up(numWays).W))
365    //     val valid = WireInit(valids.andR)
366    //     val tags = Cat(meta_tags, req_tag)
367    //     val l = log2Up(numWays)
368    //     val nChunks = (tags.getWidth + l - 1) / l
369    //     val chunks = (0 until nChunks).map( i =>
370    //       tags(min((i+1)*l, tags.getWidth)-1, i*l)
371    //     )
372    //     w := Mux(valid, if (randomAlloc) {LFSR64()(log2Up(numWays)-1,0)} else {chunks.reduce(_^_)}, PriorityEncoder(~valids))
373    //     w
374    //   } else {
375    //     val w = WireInit(0.U)
376    //     w
377    //   }
378    // }
379
380    // val allocWriteWay = allocWay(
381    //   VecInit(read_entries.map(_.valid)).asUInt,
382    //   VecInit(read_tags).asUInt,
383    //   req_tag
384    // )
385
386    def allocWay(valids: UInt, idx: UInt) = {
387      if (numWays > 1) {
388        val w = Wire(UInt(log2Up(numWays).W))
389        val valid = WireInit(valids.andR)
390        w := Mux(valid, replacer.way(idx), PriorityEncoder(~valids))
391        w
392      }else {
393        val w = WireInit(0.U)
394        w
395      }
396    }
397
398    io.read_resp := Mux1H(total_hits, read_entries) // Mux1H
399    io.read_hits.valid := hit
400    // io.read_hits.bits := Mux(hit, hit_way_1h, VecInit(UIntToOH(allocWriteWay).asBools()))
401    io.read_hits.bits := hit_way
402
403    io.update_hits.valid := u_hit
404    io.update_hits.bits := u_hit_way
405
406    // XSDebug(!hit, "FTB not hit, alloc a way: %d\n", allocWriteWay)
407
408    // Update logic
409    val u_valid = io.update_write_data.valid
410    val u_data = io.update_write_data.bits
411    val u_idx = ftbAddr.getIdx(io.update_pc)
412    val allocWriteWay = allocWay(VecInit(read_entries.map(_.valid)).asUInt, u_idx)
413    val u_mask = UIntToOH(Mux(io.update_write_alloc, allocWriteWay, io.update_write_way))
414
415    for (i <- 0 until numWays) {
416      XSPerfAccumulate(f"ftb_replace_way$i", u_valid && io.update_write_alloc && OHToUInt(u_mask) === i.U)
417      XSPerfAccumulate(f"ftb_replace_way${i}_has_empty", u_valid && io.update_write_alloc && !read_entries.map(_.valid).reduce(_&&_) && OHToUInt(u_mask) === i.U)
418      XSPerfAccumulate(f"ftb_hit_way$i", hit && !io.update_access && hit_way === i.U)
419    }
420
421    ftb.io.w.apply(u_valid, u_data, u_idx, u_mask)
422
423    // print hit entry info
424    Mux1H(total_hits, ftb.io.r.resp.data).display(true.B)
425  } // FTBBank
426
427  val ftbBank = Module(new FTBBank(numSets, numWays))
428
429  ftbBank.io.req_pc.valid := io.s0_fire
430  ftbBank.io.req_pc.bits := s0_pc
431
432  val ftb_entry = RegEnable(ftbBank.io.read_resp, io.s1_fire)
433  val s1_hit = ftbBank.io.read_hits.valid
434  val s2_hit = RegEnable(s1_hit, io.s1_fire)
435  val writeWay = ftbBank.io.read_hits.bits
436
437  val fallThruAddr = getFallThroughAddr(s2_pc, ftb_entry.carry, ftb_entry.pftAddr)
438
439  // io.out.bits.resp := RegEnable(io.in.bits.resp_in(0), 0.U.asTypeOf(new BranchPredictionResp), io.s1_fire)
440  io.out.resp := io.in.bits.resp_in(0)
441
442  val s1_latch_call_is_rvc   = DontCare // TODO: modify when add RAS
443
444  io.out.resp.s2.preds.hit           := s2_hit
445  io.out.resp.s2.pc                  := s2_pc
446  io.out.resp.s2.ftb_entry           := ftb_entry
447  io.out.resp.s2.preds.fromFtbEntry(ftb_entry, s2_pc, Some((s1_pc, io.s1_fire)))
448
449  io.out.last_stage_meta := RegEnable(FTBMeta(writeWay.asUInt(), s1_hit, GTimer()).asUInt(), io.s1_fire)
450
451  // always taken logic
452  for (i <- 0 until numBr) {
453    io.out.resp.s2.preds.br_taken_mask(i) := io.in.bits.resp_in(0).s2.preds.br_taken_mask(i) || s2_hit && ftb_entry.always_taken(i)
454  }
455
456  // Update logic
457  val update = RegNext(io.update.bits)
458
459  // val update_queue = Mem(64, new UpdateQueueEntry)
460  // val head, tail = RegInit(UpdateQueuePtr(false.B, 0.U))
461  // val u_queue = Module(new Queue(new UpdateQueueEntry, entries = 64, flow = true))
462  // assert(u_queue.io.count < 64.U)
463
464  val u_meta = update.meta.asTypeOf(new FTBMeta)
465  val u_valid = RegNext(io.update.valid && !io.update.bits.old_entry)
466
467  // io.s1_ready := ftbBank.io.req_pc.ready && u_queue.io.count === 0.U && !u_valid
468  io.s1_ready := ftbBank.io.req_pc.ready && !(u_valid && !u_meta.hit)
469
470  // val update_now = u_queue.io.deq.fire && u_queue.io.deq.bits.hit
471  val update_now = u_valid && u_meta.hit
472
473  ftbBank.io.u_req_pc.valid := u_valid && !u_meta.hit
474  ftbBank.io.u_req_pc.bits := update.pc
475
476  // assert(!(u_valid && RegNext(u_valid) && update.pc === RegNext(update.pc)))
477  // assert(!(u_valid && RegNext(u_valid)))
478
479  // val u_way = u_queue.io.deq.bits.hit_way
480
481  val ftb_write = Wire(new FTBEntryWithTag)
482  // ftb_write.entry := Mux(update_now, u_queue.io.deq.bits.ftb_entry, RegNext(u_queue.io.deq.bits.ftb_entry))
483  // ftb_write.tag   := ftbAddr.getTag(Mux(update_now, u_queue.io.deq.bits.pc, RegNext(u_queue.io.deq.bits.pc)))(tagSize-1, 0)
484  ftb_write.entry := Mux(update_now, update.ftb_entry, RegNext(update.ftb_entry))
485  ftb_write.tag   := ftbAddr.getTag(Mux(update_now, update.pc, RegNext(update.pc)))(tagSize-1, 0)
486
487  // val write_valid = update_now || RegNext(u_queue.io.deq.fire && !u_queue.io.deq.bits.hit)
488  val write_valid = update_now || RegNext(u_valid && !u_meta.hit)
489
490  // u_queue.io.enq.valid := u_valid
491  // u_queue.io.enq.bits := UpdateQueueEntry(update.pc, update.ftb_entry, u_meta.hit, u_meta.writeWay)
492  // u_queue.io.deq.ready := RegNext(!u_queue.io.deq.fire || update_now)
493
494  ftbBank.io.update_write_data.valid := write_valid
495  ftbBank.io.update_write_data.bits := ftb_write
496  // ftbBank.io.update_pc := Mux(update_now, u_queue.io.deq.bits.pc, RegNext(u_queue.io.deq.bits.pc))
497  ftbBank.io.update_pc := Mux(update_now, update.pc, RegNext(update.pc))
498  ftbBank.io.update_write_way := Mux(update_now, u_meta.writeWay, ftbBank.io.update_hits.bits)
499  // ftbBank.io.update_write_alloc := Mux(update_now, !u_queue.io.deq.bits.hit, !ftbBank.io.update_hits.valid)
500  ftbBank.io.update_write_alloc := Mux(update_now, false.B, !ftbBank.io.update_hits.valid)
501  ftbBank.io.update_access := u_valid && !u_meta.hit
502  ftbBank.io.s1_fire := io.s1_fire
503
504  XSDebug("req_v=%b, req_pc=%x, ready=%b (resp at next cycle)\n", io.s0_fire, s0_pc, ftbBank.io.req_pc.ready)
505  XSDebug("s2_hit=%b, hit_way=%b\n", s2_hit, writeWay.asUInt)
506  XSDebug("s2_br_taken_mask=%b, s2_real_taken_mask=%b\n",
507    io.in.bits.resp_in(0).s2.preds.br_taken_mask.asUInt, io.out.resp.s2.real_slot_taken_mask().asUInt)
508  XSDebug("s2_target=%x\n", io.out.resp.s2.target)
509
510  ftb_entry.display(true.B)
511
512  XSPerfAccumulate("ftb_read_hits", RegNext(io.s0_fire) && s1_hit)
513  XSPerfAccumulate("ftb_read_misses", RegNext(io.s0_fire) && !s1_hit)
514
515  XSPerfAccumulate("ftb_commit_hits", io.update.valid && io.update.bits.preds.hit)
516  XSPerfAccumulate("ftb_commit_misses", io.update.valid && !io.update.bits.preds.hit)
517
518  XSPerfAccumulate("ftb_update_req", io.update.valid)
519  XSPerfAccumulate("ftb_update_ignored", io.update.valid && io.update.bits.old_entry)
520  XSPerfAccumulate("ftb_updated", u_valid)
521
522  val perfinfo = IO(new Bundle(){
523    val perfEvents = Output(new PerfEventsBundle(2))
524  })
525  val perfEvents = Seq(
526    ("ftb_commit_hits            ", u_valid  &&  update.preds.hit),
527    ("ftb_commit_misses          ", u_valid  && !update.preds.hit),
528  )
529
530  for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) {
531    perf_out.incr_step := RegNext(perf)
532  }
533}
534