xref: /XiangShan/src/main/scala/xiangshan/frontend/FTB.scala (revision 82674533125d3d049f50148b1d9e215e1463f136)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import utility._
25
26import scala.math.min
27import scala.{Tuple2 => &}
28import os.copy
29
30
31trait FTBParams extends HasXSParameter with HasBPUConst {
32  val numEntries = FtbSize
33  val numWays    = FtbWays
34  val numSets    = numEntries/numWays // 512
35  val tagSize    = 20
36
37
38
39  val TAR_STAT_SZ = 2
40  def TAR_FIT = 0.U(TAR_STAT_SZ.W)
41  def TAR_OVF = 1.U(TAR_STAT_SZ.W)
42  def TAR_UDF = 2.U(TAR_STAT_SZ.W)
43
44  def BR_OFFSET_LEN = 12
45  def JMP_OFFSET_LEN = 20
46}
47
48class FtbSlot_FtqMem(implicit p: Parameters) extends XSBundle with FTBParams {
49  val offset  = UInt(log2Ceil(PredictWidth).W)
50  val sharing = Bool()
51  val valid   = Bool()
52}
53
54class FtbSlot(val offsetLen: Int, val subOffsetLen: Option[Int] = None)(implicit p: Parameters) extends FtbSlot_FtqMem with FTBParams {
55  if (subOffsetLen.isDefined) {
56    require(subOffsetLen.get <= offsetLen)
57  }
58  val lower   = UInt(offsetLen.W)
59  val tarStat = UInt(TAR_STAT_SZ.W)
60
61  def setLowerStatByTarget(pc: UInt, target: UInt, isShare: Boolean) = {
62    def getTargetStatByHigher(pc_higher: UInt, target_higher: UInt) =
63      Mux(target_higher > pc_higher, TAR_OVF,
64        Mux(target_higher < pc_higher, TAR_UDF, TAR_FIT))
65    def getLowerByTarget(target: UInt, offsetLen: Int) = target(offsetLen, 1)
66    val offLen = if (isShare) this.subOffsetLen.get else this.offsetLen
67    val pc_higher = pc(VAddrBits-1, offLen+1)
68    val target_higher = target(VAddrBits-1, offLen+1)
69    val stat = getTargetStatByHigher(pc_higher, target_higher)
70    val lower = ZeroExt(getLowerByTarget(target, offLen), this.offsetLen)
71    this.lower := lower
72    this.tarStat := stat
73    this.sharing := isShare.B
74  }
75
76  def getTarget(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
77    def getTarget(offLen: Int)(pc: UInt, lower: UInt, stat: UInt,
78      last_stage: Option[Tuple2[UInt, Bool]] = None) = {
79      val h                = pc(VAddrBits - 1, offLen + 1)
80      val higher           = Wire(UInt((VAddrBits - offLen - 1).W))
81      val higher_plus_one  = Wire(UInt((VAddrBits - offLen - 1).W))
82      val higher_minus_one = Wire(UInt((VAddrBits-offLen-1).W))
83
84      // Switch between previous stage pc and current stage pc
85      // Give flexibility for timing
86      if (last_stage.isDefined) {
87        val last_stage_pc = last_stage.get._1
88        val last_stage_pc_h = last_stage_pc(VAddrBits-1, offLen+1)
89        val stage_en = last_stage.get._2
90        higher := RegEnable(last_stage_pc_h, stage_en)
91        higher_plus_one := RegEnable(last_stage_pc_h+1.U, stage_en)
92        higher_minus_one := RegEnable(last_stage_pc_h-1.U, stage_en)
93      } else {
94        higher := h
95        higher_plus_one := h + 1.U
96        higher_minus_one := h - 1.U
97      }
98      val target =
99        Cat(
100          Mux1H(Seq(
101            (stat === TAR_OVF, higher_plus_one),
102            (stat === TAR_UDF, higher_minus_one),
103            (stat === TAR_FIT, higher),
104          )),
105          lower(offLen-1, 0), 0.U(1.W)
106        )
107      require(target.getWidth == VAddrBits)
108      require(offLen != 0)
109      target
110    }
111    if (subOffsetLen.isDefined)
112      Mux(sharing,
113        getTarget(subOffsetLen.get)(pc, lower, tarStat, last_stage),
114        getTarget(offsetLen)(pc, lower, tarStat, last_stage)
115      )
116    else
117      getTarget(offsetLen)(pc, lower, tarStat, last_stage)
118  }
119  def fromAnotherSlot(that: FtbSlot) = {
120    require(
121      this.offsetLen > that.offsetLen && this.subOffsetLen.map(_ == that.offsetLen).getOrElse(true) ||
122      this.offsetLen == that.offsetLen
123    )
124    this.offset := that.offset
125    this.tarStat := that.tarStat
126    this.sharing := (this.offsetLen > that.offsetLen && that.offsetLen == this.subOffsetLen.get).B
127    this.valid := that.valid
128    this.lower := ZeroExt(that.lower, this.offsetLen)
129  }
130
131}
132
133
134class FTBEntry_part(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils {
135  val isCall      = Bool()
136  val isRet       = Bool()
137  val isJalr      = Bool()
138
139  def isJal = !isJalr
140}
141
142class FTBEntry_FtqMem(implicit p: Parameters) extends FTBEntry_part with FTBParams with BPUUtils {
143
144  val brSlots = Vec(numBrSlot, new FtbSlot_FtqMem)
145  val tailSlot = new FtbSlot_FtqMem
146
147  def jmpValid = {
148    tailSlot.valid && !tailSlot.sharing
149  }
150
151  def getBrRecordedVec(offset: UInt) = {
152    VecInit(
153      brSlots.map(s => s.valid && s.offset === offset) :+
154      (tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing)
155    )
156  }
157
158  def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_)
159
160  def getBrMaskByOffset(offset: UInt) =
161    brSlots.map{ s => s.valid && s.offset <= offset } :+
162    (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
163
164  def newBrCanNotInsert(offset: UInt) = {
165    val lastSlotForBr = tailSlot
166    lastSlotForBr.valid && lastSlotForBr.offset < offset
167  }
168
169}
170
171class FTBEntry(implicit p: Parameters) extends FTBEntry_part with FTBParams with BPUUtils {
172
173
174  val valid       = Bool()
175
176  val brSlots = Vec(numBrSlot, new FtbSlot(BR_OFFSET_LEN))
177
178  val tailSlot = new FtbSlot(JMP_OFFSET_LEN, Some(BR_OFFSET_LEN))
179
180  // Partial Fall-Through Address
181  val pftAddr     = UInt(log2Up(PredictWidth).W)
182  val carry       = Bool()
183
184  val last_may_be_rvi_call = Bool()
185
186  val always_taken = Vec(numBr, Bool())
187
188  def getSlotForBr(idx: Int): FtbSlot = {
189    require(idx <= numBr-1)
190    (idx, numBr) match {
191      case (i, n) if i == n-1 => this.tailSlot
192      case _ => this.brSlots(idx)
193    }
194  }
195  def allSlotsForBr = {
196    (0 until numBr).map(getSlotForBr(_))
197  }
198  def setByBrTarget(brIdx: Int, pc: UInt, target: UInt) = {
199    val slot = getSlotForBr(brIdx)
200    slot.setLowerStatByTarget(pc, target, brIdx == numBr-1)
201  }
202  def setByJmpTarget(pc: UInt, target: UInt) = {
203    this.tailSlot.setLowerStatByTarget(pc, target, false)
204  }
205
206  def getTargetVec(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
207    VecInit((brSlots :+ tailSlot).map(_.getTarget(pc, last_stage)))
208  }
209
210  def getOffsetVec = VecInit(brSlots.map(_.offset) :+ tailSlot.offset)
211  def getFallThrough(pc: UInt, last_stage_entry: Option[Tuple2[FTBEntry, Bool]] = None) = {
212    if (last_stage_entry.isDefined) {
213      var stashed_carry = RegEnable(last_stage_entry.get._1.carry, last_stage_entry.get._2)
214      getFallThroughAddr(pc, stashed_carry, pftAddr)
215    } else {
216      getFallThroughAddr(pc, carry, pftAddr)
217    }
218  }
219
220  def hasBr(offset: UInt) =
221    brSlots.map{ s => s.valid && s.offset <= offset}.reduce(_||_) ||
222    (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
223
224  def getBrMaskByOffset(offset: UInt) =
225    brSlots.map{ s => s.valid && s.offset <= offset } :+
226    (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
227
228  def getBrRecordedVec(offset: UInt) = {
229    VecInit(
230      brSlots.map(s => s.valid && s.offset === offset) :+
231      (tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing)
232    )
233  }
234
235  def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_)
236
237  def brValids = {
238    VecInit(
239      brSlots.map(_.valid) :+ (tailSlot.valid && tailSlot.sharing)
240    )
241  }
242
243  def noEmptySlotForNewBr = {
244    VecInit(brSlots.map(_.valid) :+ tailSlot.valid).reduce(_&&_)
245  }
246
247  def newBrCanNotInsert(offset: UInt) = {
248    val lastSlotForBr = tailSlot
249    lastSlotForBr.valid && lastSlotForBr.offset < offset
250  }
251
252  def jmpValid = {
253    tailSlot.valid && !tailSlot.sharing
254  }
255
256  def brOffset = {
257    VecInit(brSlots.map(_.offset) :+ tailSlot.offset)
258  }
259
260  def display(cond: Bool): Unit = {
261    XSDebug(cond, p"-----------FTB entry----------- \n")
262    XSDebug(cond, p"v=${valid}\n")
263    for(i <- 0 until numBr) {
264      XSDebug(cond, p"[br$i]: v=${allSlotsForBr(i).valid}, offset=${allSlotsForBr(i).offset}," +
265        p"lower=${Hexadecimal(allSlotsForBr(i).lower)}\n")
266    }
267    XSDebug(cond, p"[tailSlot]: v=${tailSlot.valid}, offset=${tailSlot.offset}," +
268      p"lower=${Hexadecimal(tailSlot.lower)}, sharing=${tailSlot.sharing}}\n")
269    XSDebug(cond, p"pftAddr=${Hexadecimal(pftAddr)}, carry=$carry\n")
270    XSDebug(cond, p"isCall=$isCall, isRet=$isRet, isjalr=$isJalr\n")
271    XSDebug(cond, p"last_may_be_rvi_call=$last_may_be_rvi_call\n")
272    XSDebug(cond, p"------------------------------- \n")
273  }
274
275}
276
277class FTBEntryWithTag(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils {
278  val entry = new FTBEntry
279  val tag = UInt(tagSize.W)
280  def display(cond: Bool): Unit = {
281    entry.display(cond)
282    XSDebug(cond, p"tag is ${Hexadecimal(tag)}\n------------------------------- \n")
283  }
284}
285
286class FTBMeta(implicit p: Parameters) extends XSBundle with FTBParams {
287  val writeWay = UInt(log2Ceil(numWays).W)
288  val hit = Bool()
289  val pred_cycle = if (!env.FPGAPlatform) Some(UInt(64.W)) else None
290}
291
292object FTBMeta {
293  def apply(writeWay: UInt, hit: Bool, pred_cycle: UInt)(implicit p: Parameters): FTBMeta = {
294    val e = Wire(new FTBMeta)
295    e.writeWay := writeWay
296    e.hit := hit
297    e.pred_cycle.map(_ := pred_cycle)
298    e
299  }
300}
301
302// class UpdateQueueEntry(implicit p: Parameters) extends XSBundle with FTBParams {
303//   val pc = UInt(VAddrBits.W)
304//   val ftb_entry = new FTBEntry
305//   val hit = Bool()
306//   val hit_way = UInt(log2Ceil(numWays).W)
307// }
308//
309// object UpdateQueueEntry {
310//   def apply(pc: UInt, fe: FTBEntry, hit: Bool, hit_way: UInt)(implicit p: Parameters): UpdateQueueEntry = {
311//     val e = Wire(new UpdateQueueEntry)
312//     e.pc := pc
313//     e.ftb_entry := fe
314//     e.hit := hit
315//     e.hit_way := hit_way
316//     e
317//   }
318// }
319
320class FTB(implicit p: Parameters) extends BasePredictor with FTBParams with BPUUtils
321  with HasCircularQueuePtrHelper with HasPerfEvents {
322  override val meta_size = WireInit(0.U.asTypeOf(new FTBMeta)).getWidth
323
324  val ftbAddr = new TableAddr(log2Up(numSets), 1)
325
326  class FTBBank(val numSets: Int, val nWays: Int) extends XSModule with BPUUtils {
327    val io = IO(new Bundle {
328      val s1_fire = Input(Bool())
329
330      // when ftb hit, read_hits.valid is true, and read_hits.bits is OH of hit way
331      // when ftb not hit, read_hits.valid is false, and read_hits is OH of allocWay
332      // val read_hits = Valid(Vec(numWays, Bool()))
333      val req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W)))
334      val read_resp = Output(new FTBEntry)
335      val read_hits = Valid(UInt(log2Ceil(numWays).W))
336
337      val u_req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W)))
338      val update_hits = Valid(UInt(log2Ceil(numWays).W))
339      val update_access = Input(Bool())
340
341      val update_pc = Input(UInt(VAddrBits.W))
342      val update_write_data = Flipped(Valid(new FTBEntryWithTag))
343      val update_write_way = Input(UInt(log2Ceil(numWays).W))
344      val update_write_alloc = Input(Bool())
345    })
346
347    // Extract holdRead logic to fix bug that update read override predict read result
348    val ftb = Module(new SRAMTemplate(new FTBEntryWithTag, set = numSets, way = numWays, shouldReset = true, holdRead = false, singlePort = true))
349    val ftb_r_entries = ftb.io.r.resp.data.map(_.entry)
350
351    val pred_rdata   = HoldUnless(ftb.io.r.resp.data, RegNext(io.req_pc.valid && !io.update_access))
352    ftb.io.r.req.valid := io.req_pc.valid || io.u_req_pc.valid // io.s0_fire
353    ftb.io.r.req.bits.setIdx := Mux(io.u_req_pc.valid, ftbAddr.getIdx(io.u_req_pc.bits), ftbAddr.getIdx(io.req_pc.bits)) // s0_idx
354
355    assert(!(io.req_pc.valid && io.u_req_pc.valid))
356
357    io.req_pc.ready := ftb.io.r.req.ready
358    io.u_req_pc.ready := ftb.io.r.req.ready
359
360    val req_tag = RegEnable(ftbAddr.getTag(io.req_pc.bits)(tagSize-1, 0), io.req_pc.valid)
361    val req_idx = RegEnable(ftbAddr.getIdx(io.req_pc.bits), io.req_pc.valid)
362
363    val u_req_tag = RegEnable(ftbAddr.getTag(io.u_req_pc.bits)(tagSize-1, 0), io.u_req_pc.valid)
364
365    val read_entries = pred_rdata.map(_.entry)
366    val read_tags    = pred_rdata.map(_.tag)
367
368    val total_hits = VecInit((0 until numWays).map(b => read_tags(b) === req_tag && read_entries(b).valid && io.s1_fire))
369    val hit = total_hits.reduce(_||_)
370    // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits))
371    val hit_way = OHToUInt(total_hits)
372
373    val u_total_hits = VecInit((0 until numWays).map(b =>
374        ftb.io.r.resp.data(b).tag === u_req_tag && ftb.io.r.resp.data(b).entry.valid && RegNext(io.update_access)))
375    val u_hit = u_total_hits.reduce(_||_)
376    // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits))
377    val u_hit_way = OHToUInt(u_total_hits)
378
379    // assert(PopCount(total_hits) === 1.U || PopCount(total_hits) === 0.U)
380    // assert(PopCount(u_total_hits) === 1.U || PopCount(u_total_hits) === 0.U)
381    for (n <- 1 to numWays) {
382      XSPerfAccumulate(f"ftb_pred_${n}_way_hit", PopCount(total_hits) === n.U)
383      XSPerfAccumulate(f"ftb_update_${n}_way_hit", PopCount(u_total_hits) === n.U)
384    }
385
386    val replacer = ReplacementPolicy.fromString(Some("setplru"), numWays, numSets)
387    // val allocWriteWay = replacer.way(req_idx)
388
389    val touch_set = Seq.fill(1)(Wire(UInt(log2Ceil(numSets).W)))
390    val touch_way = Seq.fill(1)(Wire(Valid(UInt(log2Ceil(numWays).W))))
391
392    val write_set = Wire(UInt(log2Ceil(numSets).W))
393    val write_way = Wire(Valid(UInt(log2Ceil(numWays).W)))
394
395    val read_set = Wire(UInt(log2Ceil(numSets).W))
396    val read_way = Wire(Valid(UInt(log2Ceil(numWays).W)))
397
398    read_set := req_idx
399    read_way.valid := hit
400    read_way.bits  := hit_way
401
402    // Read replacer access is postponed for 1 cycle
403    // this helps timing
404    touch_set(0) := Mux(write_way.valid, write_set, RegNext(read_set))
405    touch_way(0).valid := write_way.valid || RegNext(read_way.valid)
406    touch_way(0).bits := Mux(write_way.valid, write_way.bits, RegNext(read_way.bits))
407
408    replacer.access(touch_set, touch_way)
409
410    // Select the update allocate way
411    // Selection logic:
412    //    1. if any entries within the same index is not valid, select it
413    //    2. if all entries is valid, use replacer
414    def allocWay(valids: UInt, idx: UInt): UInt = {
415      if (numWays > 1) {
416        val w = Wire(UInt(log2Up(numWays).W))
417        val valid = WireInit(valids.andR)
418        w := Mux(valid, replacer.way(idx), PriorityEncoder(~valids))
419        w
420      } else {
421        val w = WireInit(0.U(log2Up(numWays).W))
422        w
423      }
424    }
425
426    io.read_resp := Mux1H(total_hits, read_entries) // Mux1H
427    io.read_hits.valid := hit
428    io.read_hits.bits := hit_way
429
430    io.update_hits.valid := u_hit
431    io.update_hits.bits := u_hit_way
432
433    // Update logic
434    val u_valid = io.update_write_data.valid
435    val u_data = io.update_write_data.bits
436    val u_idx = ftbAddr.getIdx(io.update_pc)
437    val allocWriteWay = allocWay(RegNext(VecInit(ftb_r_entries.map(_.valid))).asUInt, u_idx)
438    val u_way = Mux(io.update_write_alloc, allocWriteWay, io.update_write_way)
439    val u_mask = UIntToOH(u_way)
440
441    for (i <- 0 until numWays) {
442      XSPerfAccumulate(f"ftb_replace_way$i", u_valid && io.update_write_alloc && u_way === i.U)
443      XSPerfAccumulate(f"ftb_replace_way${i}_has_empty", u_valid && io.update_write_alloc && !ftb_r_entries.map(_.valid).reduce(_&&_) && u_way === i.U)
444      XSPerfAccumulate(f"ftb_hit_way$i", hit && !io.update_access && hit_way === i.U)
445    }
446
447    ftb.io.w.apply(u_valid, u_data, u_idx, u_mask)
448
449    // for replacer
450    write_set := u_idx
451    write_way.valid := u_valid
452    write_way.bits := Mux(io.update_write_alloc, allocWriteWay, io.update_write_way)
453
454    // print hit entry info
455    Mux1H(total_hits, ftb.io.r.resp.data).display(true.B)
456  } // FTBBank
457
458  val ftbBank = Module(new FTBBank(numSets, numWays))
459
460  ftbBank.io.req_pc.valid := io.s0_fire(0)
461  ftbBank.io.req_pc.bits := s0_pc_dup(0)
462
463  val btb_enable_dup = dup(RegNext(io.ctrl.btb_enable))
464  val s2_ftb_entry_dup = io.s1_fire.map(f => RegEnable(ftbBank.io.read_resp, f))
465  val s3_ftb_entry_dup = io.s2_fire.zip(s2_ftb_entry_dup).map {case (f, e) => RegEnable(e, f)}
466
467  val s1_hit = ftbBank.io.read_hits.valid && io.ctrl.btb_enable
468  val s2_hit_dup = io.s1_fire.map(f => RegEnable(s1_hit, 0.B, f))
469  val s3_hit_dup = io.s2_fire.zip(s2_hit_dup).map {case (f, h) => RegEnable(h, 0.B, f)}
470  val writeWay = ftbBank.io.read_hits.bits
471
472  // io.out.bits.resp := RegEnable(io.in.bits.resp_in(0), 0.U.asTypeOf(new BranchPredictionResp), io.s1_fire)
473  io.out := io.in.bits.resp_in(0)
474
475  io.out.s2.full_pred.zip(s2_hit_dup).map {case (fp, h) => fp.hit := h}
476  io.out.s2.pc                  := s2_pc_dup
477  for (full_pred & s2_ftb_entry & s2_pc & s1_pc & s1_fire <-
478    io.out.s2.full_pred zip s2_ftb_entry_dup zip s2_pc_dup zip s1_pc_dup zip io.s1_fire) {
479      full_pred.fromFtbEntry(s2_ftb_entry,
480        s2_pc,
481        // Previous stage meta for better timing
482        Some(s1_pc, s1_fire),
483        Some(ftbBank.io.read_resp, s1_fire)
484      )
485  }
486
487  io.out.s3.full_pred.zip(s3_hit_dup).map {case (fp, h) => fp.hit := h}
488  io.out.s3.pc                  := s3_pc_dup
489  for (full_pred & s3_ftb_entry & s3_pc & s2_pc & s2_fire <-
490    io.out.s3.full_pred zip s3_ftb_entry_dup zip s3_pc_dup zip s2_pc_dup zip io.s2_fire)
491      full_pred.fromFtbEntry(s3_ftb_entry, s3_pc, Some((s2_pc, s2_fire)))
492
493  io.out.last_stage_ftb_entry := s3_ftb_entry_dup(0)
494  io.out.last_stage_meta := RegEnable(RegEnable(FTBMeta(writeWay.asUInt, s1_hit, GTimer()).asUInt, io.s1_fire(0)), io.s2_fire(0))
495
496  // always taken logic
497  for (i <- 0 until numBr) {
498    for (out_fp & in_fp & s2_hit & s2_ftb_entry <-
499      io.out.s2.full_pred zip io.in.bits.resp_in(0).s2.full_pred zip s2_hit_dup zip s2_ftb_entry_dup)
500      out_fp.br_taken_mask(i) := in_fp.br_taken_mask(i) || s2_hit && s2_ftb_entry.always_taken(i)
501    for (out_fp & in_fp & s3_hit & s3_ftb_entry <-
502      io.out.s3.full_pred zip io.in.bits.resp_in(0).s3.full_pred zip s3_hit_dup zip s3_ftb_entry_dup)
503      out_fp.br_taken_mask(i) := in_fp.br_taken_mask(i) || s3_hit && s3_ftb_entry.always_taken(i)
504  }
505
506  // Update logic
507  val update = io.update.bits
508
509  val u_meta = update.meta.asTypeOf(new FTBMeta)
510  val u_valid = io.update.valid && !io.update.bits.old_entry
511
512  val (_, delay2_pc) = DelayNWithValid(update.pc, u_valid, 2)
513  val (_, delay2_entry) = DelayNWithValid(update.ftb_entry, u_valid, 2)
514
515
516  val update_now = u_valid && u_meta.hit
517  val update_need_read = u_valid && !u_meta.hit
518  // stall one more cycle because we use a whole cycle to do update read tag hit
519  io.s1_ready := ftbBank.io.req_pc.ready && !(update_need_read) && !RegNext(update_need_read)
520
521  ftbBank.io.u_req_pc.valid := update_need_read
522  ftbBank.io.u_req_pc.bits := update.pc
523
524
525
526  val ftb_write = Wire(new FTBEntryWithTag)
527  ftb_write.entry := Mux(update_now, update.ftb_entry, delay2_entry)
528  ftb_write.tag   := ftbAddr.getTag(Mux(update_now, update.pc, delay2_pc))(tagSize-1, 0)
529
530  val write_valid = update_now || DelayN(u_valid && !u_meta.hit, 2)
531
532  ftbBank.io.update_write_data.valid := write_valid
533  ftbBank.io.update_write_data.bits := ftb_write
534  ftbBank.io.update_pc          := Mux(update_now, update.pc,       delay2_pc)
535  ftbBank.io.update_write_way   := Mux(update_now, u_meta.writeWay, RegNext(ftbBank.io.update_hits.bits)) // use it one cycle later
536  ftbBank.io.update_write_alloc := Mux(update_now, false.B,         RegNext(!ftbBank.io.update_hits.valid)) // use it one cycle later
537  ftbBank.io.update_access := u_valid && !u_meta.hit
538  ftbBank.io.s1_fire := io.s1_fire(0)
539
540  XSDebug("req_v=%b, req_pc=%x, ready=%b (resp at next cycle)\n", io.s0_fire(0), s0_pc_dup(0), ftbBank.io.req_pc.ready)
541  XSDebug("s2_hit=%b, hit_way=%b\n", s2_hit_dup(0), writeWay.asUInt)
542  XSDebug("s2_br_taken_mask=%b, s2_real_taken_mask=%b\n",
543    io.in.bits.resp_in(0).s2.full_pred(0).br_taken_mask.asUInt, io.out.s2.full_pred(0).real_slot_taken_mask().asUInt)
544  XSDebug("s2_target=%x\n", io.out.s2.getTarget(0))
545
546  s2_ftb_entry_dup(0).display(true.B)
547
548  XSPerfAccumulate("ftb_read_hits", RegNext(io.s0_fire(0)) && s1_hit)
549  XSPerfAccumulate("ftb_read_misses", RegNext(io.s0_fire(0)) && !s1_hit)
550
551  XSPerfAccumulate("ftb_commit_hits", io.update.valid && u_meta.hit)
552  XSPerfAccumulate("ftb_commit_misses", io.update.valid && !u_meta.hit)
553
554  XSPerfAccumulate("ftb_update_req", io.update.valid)
555  XSPerfAccumulate("ftb_update_ignored", io.update.valid && io.update.bits.old_entry)
556  XSPerfAccumulate("ftb_updated", u_valid)
557
558  override val perfEvents = Seq(
559    ("ftb_commit_hits            ", io.update.valid  &&  u_meta.hit),
560    ("ftb_commit_misses          ", io.update.valid  && !u_meta.hit),
561  )
562  generatePerfEvent()
563}
564