xref: /XiangShan/src/main/scala/xiangshan/frontend/FTB.scala (revision fd3aa0577117b390ca78476eb2755133f758af37)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import utility._
25
26import scala.math.min
27import scala.{Tuple2 => &}
28import os.copy
29
30
31trait FTBParams extends HasXSParameter with HasBPUConst {
32  val numEntries = FtbSize
33  val numWays    = FtbWays
34  val numSets    = numEntries/numWays // 512
35  val tagSize    = 20
36
37
38
39  val TAR_STAT_SZ = 2
40  def TAR_FIT = 0.U(TAR_STAT_SZ.W)
41  def TAR_OVF = 1.U(TAR_STAT_SZ.W)
42  def TAR_UDF = 2.U(TAR_STAT_SZ.W)
43
44  def BR_OFFSET_LEN = 12
45  def JMP_OFFSET_LEN = 20
46
47  def FTBCLOSE_THRESHOLD_SZ = log2Ceil(500)
48  def FTBCLOSE_THRESHOLD = 500.U(FTBCLOSE_THRESHOLD_SZ.W) //can be modified
49}
50
51class FtbSlot_FtqMem(implicit p: Parameters) extends XSBundle with FTBParams {
52  val offset  = UInt(log2Ceil(PredictWidth).W)
53  val sharing = Bool()
54  val valid   = Bool()
55}
56
57class FtbSlot(val offsetLen: Int, val subOffsetLen: Option[Int] = None)(implicit p: Parameters) extends FtbSlot_FtqMem with FTBParams {
58  if (subOffsetLen.isDefined) {
59    require(subOffsetLen.get <= offsetLen)
60  }
61  val lower   = UInt(offsetLen.W)
62  val tarStat = UInt(TAR_STAT_SZ.W)
63
64  def setLowerStatByTarget(pc: UInt, target: UInt, isShare: Boolean) = {
65    def getTargetStatByHigher(pc_higher: UInt, target_higher: UInt) =
66      Mux(target_higher > pc_higher, TAR_OVF,
67        Mux(target_higher < pc_higher, TAR_UDF, TAR_FIT))
68    def getLowerByTarget(target: UInt, offsetLen: Int) = target(offsetLen, 1)
69    val offLen = if (isShare) this.subOffsetLen.get else this.offsetLen
70    val pc_higher = pc(VAddrBits-1, offLen+1)
71    val target_higher = target(VAddrBits-1, offLen+1)
72    val stat = getTargetStatByHigher(pc_higher, target_higher)
73    val lower = ZeroExt(getLowerByTarget(target, offLen), this.offsetLen)
74    this.lower := lower
75    this.tarStat := stat
76    this.sharing := isShare.B
77  }
78
79  def getTarget(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
80    def getTarget(offLen: Int)(pc: UInt, lower: UInt, stat: UInt,
81      last_stage: Option[Tuple2[UInt, Bool]] = None) = {
82      val h                = pc(VAddrBits - 1, offLen + 1)
83      val higher           = Wire(UInt((VAddrBits - offLen - 1).W))
84      val higher_plus_one  = Wire(UInt((VAddrBits - offLen - 1).W))
85      val higher_minus_one = Wire(UInt((VAddrBits-offLen-1).W))
86
87      // Switch between previous stage pc and current stage pc
88      // Give flexibility for timing
89      if (last_stage.isDefined) {
90        val last_stage_pc = last_stage.get._1
91        val last_stage_pc_h = last_stage_pc(VAddrBits-1, offLen+1)
92        val stage_en = last_stage.get._2
93        higher := RegEnable(last_stage_pc_h, stage_en)
94        higher_plus_one := RegEnable(last_stage_pc_h+1.U, stage_en)
95        higher_minus_one := RegEnable(last_stage_pc_h-1.U, stage_en)
96      } else {
97        higher := h
98        higher_plus_one := h + 1.U
99        higher_minus_one := h - 1.U
100      }
101      val target =
102        Cat(
103          Mux1H(Seq(
104            (stat === TAR_OVF, higher_plus_one),
105            (stat === TAR_UDF, higher_minus_one),
106            (stat === TAR_FIT, higher),
107          )),
108          lower(offLen-1, 0), 0.U(1.W)
109        )
110      require(target.getWidth == VAddrBits)
111      require(offLen != 0)
112      target
113    }
114    if (subOffsetLen.isDefined)
115      Mux(sharing,
116        getTarget(subOffsetLen.get)(pc, lower, tarStat, last_stage),
117        getTarget(offsetLen)(pc, lower, tarStat, last_stage)
118      )
119    else
120      getTarget(offsetLen)(pc, lower, tarStat, last_stage)
121  }
122  def fromAnotherSlot(that: FtbSlot) = {
123    require(
124      this.offsetLen > that.offsetLen && this.subOffsetLen.map(_ == that.offsetLen).getOrElse(true) ||
125      this.offsetLen == that.offsetLen
126    )
127    this.offset := that.offset
128    this.tarStat := that.tarStat
129    this.sharing := (this.offsetLen > that.offsetLen && that.offsetLen == this.subOffsetLen.get).B
130    this.valid := that.valid
131    this.lower := ZeroExt(that.lower, this.offsetLen)
132  }
133
134  def slotConsistent(that: FtbSlot) = {
135    VecInit(
136      this.offset  === that.offset,
137      this.lower   === that.lower,
138      this.tarStat === that.tarStat,
139      this.sharing === that.sharing,
140      this.valid   === that.valid
141    ).reduce(_&&_)
142  }
143
144}
145
146
147class FTBEntry_part(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils {
148  val isCall      = Bool()
149  val isRet       = Bool()
150  val isJalr      = Bool()
151
152  def isJal = !isJalr
153}
154
155class FTBEntry_FtqMem(implicit p: Parameters) extends FTBEntry_part with FTBParams with BPUUtils {
156
157  val brSlots = Vec(numBrSlot, new FtbSlot_FtqMem)
158  val tailSlot = new FtbSlot_FtqMem
159
160  def jmpValid = {
161    tailSlot.valid && !tailSlot.sharing
162  }
163
164  def getBrRecordedVec(offset: UInt) = {
165    VecInit(
166      brSlots.map(s => s.valid && s.offset === offset) :+
167      (tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing)
168    )
169  }
170
171  def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_)
172
173  def getBrMaskByOffset(offset: UInt) =
174    brSlots.map{ s => s.valid && s.offset <= offset } :+
175    (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
176
177  def newBrCanNotInsert(offset: UInt) = {
178    val lastSlotForBr = tailSlot
179    lastSlotForBr.valid && lastSlotForBr.offset < offset
180  }
181
182}
183
184class FTBEntry(implicit p: Parameters) extends FTBEntry_part with FTBParams with BPUUtils {
185
186
187  val valid       = Bool()
188
189  val brSlots = Vec(numBrSlot, new FtbSlot(BR_OFFSET_LEN))
190
191  val tailSlot = new FtbSlot(JMP_OFFSET_LEN, Some(BR_OFFSET_LEN))
192
193  // Partial Fall-Through Address
194  val pftAddr     = UInt(log2Up(PredictWidth).W)
195  val carry       = Bool()
196
197  val last_may_be_rvi_call = Bool()
198
199  val always_taken = Vec(numBr, Bool())
200
201  def getSlotForBr(idx: Int): FtbSlot = {
202    require(idx <= numBr-1)
203    (idx, numBr) match {
204      case (i, n) if i == n-1 => this.tailSlot
205      case _ => this.brSlots(idx)
206    }
207  }
208  def allSlotsForBr = {
209    (0 until numBr).map(getSlotForBr(_))
210  }
211  def setByBrTarget(brIdx: Int, pc: UInt, target: UInt) = {
212    val slot = getSlotForBr(brIdx)
213    slot.setLowerStatByTarget(pc, target, brIdx == numBr-1)
214  }
215  def setByJmpTarget(pc: UInt, target: UInt) = {
216    this.tailSlot.setLowerStatByTarget(pc, target, false)
217  }
218
219  def getTargetVec(pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
220    VecInit((brSlots :+ tailSlot).map(_.getTarget(pc, last_stage)))
221  }
222
223  def getOffsetVec = VecInit(brSlots.map(_.offset) :+ tailSlot.offset)
224  def getFallThrough(pc: UInt, last_stage_entry: Option[Tuple2[FTBEntry, Bool]] = None) = {
225    if (last_stage_entry.isDefined) {
226      var stashed_carry = RegEnable(last_stage_entry.get._1.carry, last_stage_entry.get._2)
227      getFallThroughAddr(pc, stashed_carry, pftAddr)
228    } else {
229      getFallThroughAddr(pc, carry, pftAddr)
230    }
231  }
232
233  def hasBr(offset: UInt) =
234    brSlots.map{ s => s.valid && s.offset <= offset}.reduce(_||_) ||
235    (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
236
237  def getBrMaskByOffset(offset: UInt) =
238    brSlots.map{ s => s.valid && s.offset <= offset } :+
239    (tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing)
240
241  def getBrRecordedVec(offset: UInt) = {
242    VecInit(
243      brSlots.map(s => s.valid && s.offset === offset) :+
244      (tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing)
245    )
246  }
247
248  def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_)
249
250  def brValids = {
251    VecInit(
252      brSlots.map(_.valid) :+ (tailSlot.valid && tailSlot.sharing)
253    )
254  }
255
256  def noEmptySlotForNewBr = {
257    VecInit(brSlots.map(_.valid) :+ tailSlot.valid).reduce(_&&_)
258  }
259
260  def newBrCanNotInsert(offset: UInt) = {
261    val lastSlotForBr = tailSlot
262    lastSlotForBr.valid && lastSlotForBr.offset < offset
263  }
264
265  def jmpValid = {
266    tailSlot.valid && !tailSlot.sharing
267  }
268
269  def brOffset = {
270    VecInit(brSlots.map(_.offset) :+ tailSlot.offset)
271  }
272
273  def entryConsistent(that: FTBEntry) = {
274    val validDiff     = this.valid === that.valid
275    val brSlotsDiffSeq  : IndexedSeq[Bool] =
276      this.brSlots.zip(that.brSlots).map{
277        case(x,y) => x.slotConsistent(y)
278      }
279    val tailSlotDiff  = this.tailSlot.slotConsistent(that.tailSlot)
280    val pftAddrDiff   = this.pftAddr === that.pftAddr
281    val carryDiff     = this.carry   === that.carry
282    val isCallDiff    = this.isCall  === that.isCall
283    val isRetDiff     = this.isRet   === that.isRet
284    val isJalrDiff    = this.isJalr  === that.isJalr
285    val lastMayBeRviCallDiff = this.last_may_be_rvi_call === that.last_may_be_rvi_call
286    val alwaysTakenDiff : IndexedSeq[Bool] =
287      this.always_taken.zip(that.always_taken).map{
288        case(x,y) => x === y
289      }
290    VecInit(
291      validDiff,
292      brSlotsDiffSeq.reduce(_&&_),
293      tailSlotDiff,
294      pftAddrDiff,
295      carryDiff,
296      isCallDiff,
297      isRetDiff,
298      isJalrDiff,
299      lastMayBeRviCallDiff,
300      alwaysTakenDiff.reduce(_&&_)
301    ).reduce(_&&_)
302  }
303
304  def display(cond: Bool): Unit = {
305    XSDebug(cond, p"-----------FTB entry----------- \n")
306    XSDebug(cond, p"v=${valid}\n")
307    for(i <- 0 until numBr) {
308      XSDebug(cond, p"[br$i]: v=${allSlotsForBr(i).valid}, offset=${allSlotsForBr(i).offset}," +
309        p"lower=${Hexadecimal(allSlotsForBr(i).lower)}\n")
310    }
311    XSDebug(cond, p"[tailSlot]: v=${tailSlot.valid}, offset=${tailSlot.offset}," +
312      p"lower=${Hexadecimal(tailSlot.lower)}, sharing=${tailSlot.sharing}}\n")
313    XSDebug(cond, p"pftAddr=${Hexadecimal(pftAddr)}, carry=$carry\n")
314    XSDebug(cond, p"isCall=$isCall, isRet=$isRet, isjalr=$isJalr\n")
315    XSDebug(cond, p"last_may_be_rvi_call=$last_may_be_rvi_call\n")
316    XSDebug(cond, p"------------------------------- \n")
317  }
318
319}
320
321class FTBEntryWithTag(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils {
322  val entry = new FTBEntry
323  val tag = UInt(tagSize.W)
324  def display(cond: Bool): Unit = {
325    entry.display(cond)
326    XSDebug(cond, p"tag is ${Hexadecimal(tag)}\n------------------------------- \n")
327  }
328}
329
330class FTBMeta(implicit p: Parameters) extends XSBundle with FTBParams {
331  val writeWay = UInt(log2Ceil(numWays).W)
332  val hit = Bool()
333  val pred_cycle = if (!env.FPGAPlatform) Some(UInt(64.W)) else None
334}
335
336object FTBMeta {
337  def apply(writeWay: UInt, hit: Bool, pred_cycle: UInt)(implicit p: Parameters): FTBMeta = {
338    val e = Wire(new FTBMeta)
339    e.writeWay := writeWay
340    e.hit := hit
341    e.pred_cycle.map(_ := pred_cycle)
342    e
343  }
344}
345
346// class UpdateQueueEntry(implicit p: Parameters) extends XSBundle with FTBParams {
347//   val pc = UInt(VAddrBits.W)
348//   val ftb_entry = new FTBEntry
349//   val hit = Bool()
350//   val hit_way = UInt(log2Ceil(numWays).W)
351// }
352//
353// object UpdateQueueEntry {
354//   def apply(pc: UInt, fe: FTBEntry, hit: Bool, hit_way: UInt)(implicit p: Parameters): UpdateQueueEntry = {
355//     val e = Wire(new UpdateQueueEntry)
356//     e.pc := pc
357//     e.ftb_entry := fe
358//     e.hit := hit
359//     e.hit_way := hit_way
360//     e
361//   }
362// }
363
364class FTB(implicit p: Parameters) extends BasePredictor with FTBParams with BPUUtils
365  with HasCircularQueuePtrHelper with HasPerfEvents {
366  override val meta_size = WireInit(0.U.asTypeOf(new FTBMeta)).getWidth
367
368  val ftbAddr = new TableAddr(log2Up(numSets), 1)
369
370  class FTBBank(val numSets: Int, val nWays: Int) extends XSModule with BPUUtils {
371    val io = IO(new Bundle {
372      val s1_fire = Input(Bool())
373
374      // when ftb hit, read_hits.valid is true, and read_hits.bits is OH of hit way
375      // when ftb not hit, read_hits.valid is false, and read_hits is OH of allocWay
376      // val read_hits = Valid(Vec(numWays, Bool()))
377      val req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W)))
378      val read_resp = Output(new FTBEntry)
379      val read_hits = Valid(UInt(log2Ceil(numWays).W))
380
381      val read_multi_entry = Output(new FTBEntry)
382      val read_multi_hits = Valid(UInt(log2Ceil(numWays).W))
383
384      val u_req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W)))
385      val update_hits = Valid(UInt(log2Ceil(numWays).W))
386      val update_access = Input(Bool())
387
388      val update_pc = Input(UInt(VAddrBits.W))
389      val update_write_data = Flipped(Valid(new FTBEntryWithTag))
390      val update_write_way = Input(UInt(log2Ceil(numWays).W))
391      val update_write_alloc = Input(Bool())
392    })
393
394    // Extract holdRead logic to fix bug that update read override predict read result
395    val ftb = Module(new SRAMTemplate(new FTBEntryWithTag, set = numSets, way = numWays, shouldReset = true, holdRead = false, singlePort = true))
396    val ftb_r_entries = ftb.io.r.resp.data.map(_.entry)
397
398    val pred_rdata   = HoldUnless(ftb.io.r.resp.data, RegNext(io.req_pc.valid && !io.update_access))
399    ftb.io.r.req.valid := io.req_pc.valid || io.u_req_pc.valid // io.s0_fire
400    ftb.io.r.req.bits.setIdx := Mux(io.u_req_pc.valid, ftbAddr.getIdx(io.u_req_pc.bits), ftbAddr.getIdx(io.req_pc.bits)) // s0_idx
401
402    assert(!(io.req_pc.valid && io.u_req_pc.valid))
403
404    io.req_pc.ready := ftb.io.r.req.ready
405    io.u_req_pc.ready := ftb.io.r.req.ready
406
407    val req_tag = RegEnable(ftbAddr.getTag(io.req_pc.bits)(tagSize-1, 0), io.req_pc.valid)
408    val req_idx = RegEnable(ftbAddr.getIdx(io.req_pc.bits), io.req_pc.valid)
409
410    val u_req_tag = RegEnable(ftbAddr.getTag(io.u_req_pc.bits)(tagSize-1, 0), io.u_req_pc.valid)
411
412    val read_entries = pred_rdata.map(_.entry)
413    val read_tags    = pred_rdata.map(_.tag)
414
415    val total_hits = VecInit((0 until numWays).map(b => read_tags(b) === req_tag && read_entries(b).valid && io.s1_fire))
416    val hit = total_hits.reduce(_||_)
417    // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits))
418    val hit_way = OHToUInt(total_hits)
419
420    //There may be two hits in the four paths of the ftbBank, and the OHToUInt will fail.
421    //If there is a redirect in s2 at this time, the wrong FTBEntry will be used to calculate the target,
422    //resulting in an address error and affecting performance.
423    //The solution is to select a hit entry during multi hit as the entry for s2.
424    //Considering timing, use this entry in s3 and trigger s3-redirect.
425    val total_hits_reg = RegEnable(total_hits,io.s1_fire)
426    val read_entries_reg = read_entries.map(w => RegEnable(w,io.s1_fire))
427
428    val multi_hit = VecInit((0 until numWays).map{
429      i => (0 until numWays).map(j => {
430        if(i < j) total_hits_reg(i) && total_hits_reg(j)
431        else false.B
432      }).reduce(_||_)
433    }).reduce(_||_)
434    val multi_way = PriorityMux(Seq.tabulate(numWays)(i => ((total_hits_reg(i)) -> i.asUInt(log2Ceil(numWays).W))))
435    val multi_hit_selectEntry = PriorityMux(Seq.tabulate(numWays)(i => ((total_hits_reg(i)) -> read_entries_reg(i))))
436
437    //Check if the entry read by ftbBank is legal.
438    for (n <- 0 to numWays -1 ) {
439      val req_pc_reg = RegEnable(io.req_pc.bits, io.req_pc.valid)
440      val ftb_entry_fallThrough = read_entries(n).getFallThrough(req_pc_reg)
441      when(read_entries(n).valid && total_hits(n) && io.s1_fire){
442        assert(req_pc_reg + (2*PredictWidth).U >= ftb_entry_fallThrough, s"FTB sram entry in way${n} fallThrough address error!")
443      }
444    }
445
446    val u_total_hits = VecInit((0 until numWays).map(b =>
447        ftb.io.r.resp.data(b).tag === u_req_tag && ftb.io.r.resp.data(b).entry.valid && RegNext(io.update_access)))
448    val u_hit = u_total_hits.reduce(_||_)
449    // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits))
450    val u_hit_way = OHToUInt(u_total_hits)
451
452    // assert(PopCount(total_hits) === 1.U || PopCount(total_hits) === 0.U)
453    // assert(PopCount(u_total_hits) === 1.U || PopCount(u_total_hits) === 0.U)
454    for (n <- 1 to numWays) {
455      XSPerfAccumulate(f"ftb_pred_${n}_way_hit", PopCount(total_hits) === n.U)
456      XSPerfAccumulate(f"ftb_update_${n}_way_hit", PopCount(u_total_hits) === n.U)
457    }
458
459    val replacer = ReplacementPolicy.fromString(Some("setplru"), numWays, numSets)
460    // val allocWriteWay = replacer.way(req_idx)
461
462    val touch_set = Seq.fill(1)(Wire(UInt(log2Ceil(numSets).W)))
463    val touch_way = Seq.fill(1)(Wire(Valid(UInt(log2Ceil(numWays).W))))
464
465    val write_set = Wire(UInt(log2Ceil(numSets).W))
466    val write_way = Wire(Valid(UInt(log2Ceil(numWays).W)))
467
468    val read_set = Wire(UInt(log2Ceil(numSets).W))
469    val read_way = Wire(Valid(UInt(log2Ceil(numWays).W)))
470
471    read_set := req_idx
472    read_way.valid := hit
473    read_way.bits  := hit_way
474
475    // Read replacer access is postponed for 1 cycle
476    // this helps timing
477    touch_set(0) := Mux(write_way.valid, write_set, RegNext(read_set))
478    touch_way(0).valid := write_way.valid || RegNext(read_way.valid)
479    touch_way(0).bits := Mux(write_way.valid, write_way.bits, RegNext(read_way.bits))
480
481    replacer.access(touch_set, touch_way)
482
483    // Select the update allocate way
484    // Selection logic:
485    //    1. if any entries within the same index is not valid, select it
486    //    2. if all entries is valid, use replacer
487    def allocWay(valids: UInt, idx: UInt): UInt = {
488      if (numWays > 1) {
489        val w = Wire(UInt(log2Up(numWays).W))
490        val valid = WireInit(valids.andR)
491        w := Mux(valid, replacer.way(idx), PriorityEncoder(~valids))
492        w
493      } else {
494        val w = WireInit(0.U(log2Up(numWays).W))
495        w
496      }
497    }
498
499    io.read_resp := Mux1H(total_hits, read_entries) // Mux1H
500    io.read_hits.valid := hit
501    io.read_hits.bits := hit_way
502
503    io.read_multi_entry := multi_hit_selectEntry
504    io.read_multi_hits.valid := multi_hit
505    io.read_multi_hits.bits := multi_way
506
507    io.update_hits.valid := u_hit
508    io.update_hits.bits := u_hit_way
509
510    // Update logic
511    val u_valid = io.update_write_data.valid
512    val u_data = io.update_write_data.bits
513    val u_idx = ftbAddr.getIdx(io.update_pc)
514    val allocWriteWay = allocWay(RegNext(VecInit(ftb_r_entries.map(_.valid))).asUInt, u_idx)
515    val u_way = Mux(io.update_write_alloc, allocWriteWay, io.update_write_way)
516    val u_mask = UIntToOH(u_way)
517
518    for (i <- 0 until numWays) {
519      XSPerfAccumulate(f"ftb_replace_way$i", u_valid && io.update_write_alloc && u_way === i.U)
520      XSPerfAccumulate(f"ftb_replace_way${i}_has_empty", u_valid && io.update_write_alloc && !ftb_r_entries.map(_.valid).reduce(_&&_) && u_way === i.U)
521      XSPerfAccumulate(f"ftb_hit_way$i", hit && !io.update_access && hit_way === i.U)
522    }
523
524    ftb.io.w.apply(u_valid, u_data, u_idx, u_mask)
525
526    // for replacer
527    write_set := u_idx
528    write_way.valid := u_valid
529    write_way.bits := Mux(io.update_write_alloc, allocWriteWay, io.update_write_way)
530
531    // print hit entry info
532    Mux1H(total_hits, ftb.io.r.resp.data).display(true.B)
533  } // FTBBank
534
535  //FTB switch register & temporary storage of fauftb prediction results
536  val s0_close_ftb_req = RegInit(false.B)
537  val s1_close_ftb_req = RegEnable(s0_close_ftb_req, false.B, io.s0_fire(0))
538  val s2_close_ftb_req = RegEnable(s1_close_ftb_req, false.B, io.s1_fire(0))
539  val s2_fauftb_ftb_entry_dup = io.s1_fire.map(f => RegEnable(io.fauftb_entry_in, f))
540  val s2_fauftb_ftb_entry_hit_dup = io.s1_fire.map(f => RegEnable(io.fauftb_entry_hit_in, f))
541
542  val ftbBank = Module(new FTBBank(numSets, numWays))
543
544  //for close ftb read_req
545  ftbBank.io.req_pc.valid := io.s0_fire(0) && !s0_close_ftb_req
546  ftbBank.io.req_pc.bits := s0_pc_dup(0)
547
548  val s2_multi_hit = ftbBank.io.read_multi_hits.valid && io.s2_fire(0)
549  val s2_multi_hit_way = ftbBank.io.read_multi_hits.bits
550  val s2_multi_hit_entry = ftbBank.io.read_multi_entry
551  val s2_multi_hit_enable = s2_multi_hit && io.s2_redirect(0)
552  XSPerfAccumulate("ftb_s2_multi_hit",s2_multi_hit)
553  XSPerfAccumulate("ftb_s2_multi_hit_enable",s2_multi_hit_enable)
554
555  //After closing ftb, the entry output from s2 is the entry of FauFTB cached in s1
556  val btb_enable_dup = dup(RegNext(io.ctrl.btb_enable))
557  val s1_read_resp = Mux(s1_close_ftb_req,io.fauftb_entry_in,ftbBank.io.read_resp)
558  val s2_ftbBank_dup = io.s1_fire.map(f => RegEnable(ftbBank.io.read_resp, f))
559  val s2_ftb_entry_dup = dup(0.U.asTypeOf(new FTBEntry))
560  for(((s2_fauftb_entry,s2_ftbBank_entry),s2_ftb_entry) <-
561    s2_fauftb_ftb_entry_dup zip s2_ftbBank_dup zip s2_ftb_entry_dup){
562      s2_ftb_entry := Mux(s2_close_ftb_req,s2_fauftb_entry,s2_ftbBank_entry)
563  }
564  val s3_ftb_entry_dup = io.s2_fire.zip(s2_ftb_entry_dup).map {case (f, e) => RegEnable(Mux(s2_multi_hit_enable, s2_multi_hit_entry, e), f)}
565
566  //After closing ftb, the hit output from s2 is the hit of FauFTB cached in s1.
567  //s1_hit is the ftbBank hit.
568  val s1_hit = Mux(s1_close_ftb_req,false.B,ftbBank.io.read_hits.valid && io.ctrl.btb_enable)
569  val s2_ftb_hit_dup = io.s1_fire.map(f => RegEnable(s1_hit, 0.B, f))
570  val s2_hit_dup = dup(0.U.asTypeOf(Bool()))
571  for(((s2_fauftb_hit,s2_ftb_hit),s2_hit) <-
572    s2_fauftb_ftb_entry_hit_dup zip s2_ftb_hit_dup zip s2_hit_dup){
573      s2_hit := Mux(s2_close_ftb_req,s2_fauftb_hit,s2_ftb_hit)
574  }
575  val s3_hit_dup = io.s2_fire.zip(s2_hit_dup).map {case (f, h) => RegEnable(Mux(s2_multi_hit_enable, s2_multi_hit, h), 0.B, f)}
576  val s3_mult_hit_dup = io.s2_fire.map(f => RegEnable(s2_multi_hit_enable,f))
577  val writeWay = Mux(s1_close_ftb_req,0.U,ftbBank.io.read_hits.bits)
578  val s2_ftb_meta = RegEnable(FTBMeta(writeWay.asUInt, s1_hit, GTimer()).asUInt, io.s1_fire(0))
579  val s2_multi_hit_meta = FTBMeta(s2_multi_hit_way.asUInt, s2_multi_hit, GTimer()).asUInt
580
581  //Consistent count of entries for fauftb and ftb
582  val fauftb_ftb_entry_consistent_counter = RegInit(0.U(FTBCLOSE_THRESHOLD_SZ.W))
583  val fauftb_ftb_entry_consistent = s2_fauftb_ftb_entry_dup(0).entryConsistent(s2_ftbBank_dup(0))
584
585  //if close ftb_req, the counter need keep
586  when(io.s2_fire(0) && s2_fauftb_ftb_entry_hit_dup(0) && s2_ftb_hit_dup(0) ){
587    fauftb_ftb_entry_consistent_counter := Mux(fauftb_ftb_entry_consistent, fauftb_ftb_entry_consistent_counter + 1.U, 0.U)
588  } .elsewhen(io.s2_fire(0) && !s2_fauftb_ftb_entry_hit_dup(0) && s2_ftb_hit_dup(0) ){
589    fauftb_ftb_entry_consistent_counter := 0.U
590  }
591
592  when((fauftb_ftb_entry_consistent_counter >= FTBCLOSE_THRESHOLD) && io.s0_fire(0)){
593    s0_close_ftb_req := true.B
594  }
595
596  //Clear counter during false_hit or ifuRedirect
597  val ftb_false_hit = WireInit(false.B)
598  val needReopen = s0_close_ftb_req && (ftb_false_hit || io.redirectFromIFU)
599  ftb_false_hit := io.update.valid && io.update.bits.false_hit
600  when(needReopen){
601    fauftb_ftb_entry_consistent_counter := 0.U
602    s0_close_ftb_req := false.B
603  }
604
605  val s2_close_consistent = s2_fauftb_ftb_entry_dup(0).entryConsistent(s2_ftb_entry_dup(0))
606  val s2_not_close_consistent = s2_ftbBank_dup(0).entryConsistent(s2_ftb_entry_dup(0))
607
608  when(s2_close_ftb_req && io.s2_fire(0)){
609    assert(s2_close_consistent, s"Entry inconsistency after ftb req is closed!")
610  }.elsewhen(!s2_close_ftb_req &&  io.s2_fire(0)){
611    assert(s2_not_close_consistent, s"Entry inconsistency after ftb req is not closed!")
612  }
613
614  val  reopenCounter = !s1_close_ftb_req && s2_close_ftb_req &&  io.s2_fire(0)
615  val  falseHitReopenCounter = ftb_false_hit && s1_close_ftb_req
616  XSPerfAccumulate("ftb_req_reopen_counter",reopenCounter)
617  XSPerfAccumulate("false_hit_reopen_Counter",falseHitReopenCounter)
618  XSPerfAccumulate("ifuRedirec_needReopen",s1_close_ftb_req && io.redirectFromIFU)
619  XSPerfAccumulate("this_cycle_is_close",s2_close_ftb_req && io.s2_fire(0))
620  XSPerfAccumulate("this_cycle_is_open",!s2_close_ftb_req && io.s2_fire(0))
621
622  // io.out.bits.resp := RegEnable(io.in.bits.resp_in(0), 0.U.asTypeOf(new BranchPredictionResp), io.s1_fire)
623  io.out := io.in.bits.resp_in(0)
624
625  io.out.s2.full_pred.map {case fp => fp.multiHit := false.B}
626
627  io.out.s2.full_pred.zip(s2_hit_dup).map {case (fp, h) => fp.hit := h}
628  io.out.s2.pc                  := s2_pc_dup
629  for (full_pred & s2_ftb_entry & s2_pc & s1_pc & s1_fire <-
630    io.out.s2.full_pred zip s2_ftb_entry_dup zip s2_pc_dup zip s1_pc_dup zip io.s1_fire) {
631      full_pred.fromFtbEntry(s2_ftb_entry,
632        s2_pc,
633        // Previous stage meta for better timing
634        Some(s1_pc, s1_fire),
635        Some(s1_read_resp, s1_fire)
636      )
637  }
638
639  io.out.s3.full_pred.zip(s3_hit_dup).map {case (fp, h) => fp.hit := h}
640  io.out.s3.full_pred.zip(s3_mult_hit_dup).map {case (fp, m) => fp.multiHit := m}
641  io.out.s3.pc                  := s3_pc_dup
642  for (full_pred & s3_ftb_entry & s3_pc & s2_pc & s2_fire <-
643    io.out.s3.full_pred zip s3_ftb_entry_dup zip s3_pc_dup zip s2_pc_dup zip io.s2_fire)
644      full_pred.fromFtbEntry(s3_ftb_entry, s3_pc, Some((s2_pc, s2_fire)))
645
646  io.out.last_stage_ftb_entry := s3_ftb_entry_dup(0)
647  io.out.last_stage_meta := RegEnable(Mux(s2_multi_hit_enable, s2_multi_hit_meta, s2_ftb_meta), io.s2_fire(0))
648
649  // always taken logic
650  for (i <- 0 until numBr) {
651    for (out_fp & in_fp & s2_hit & s2_ftb_entry <-
652      io.out.s2.full_pred zip io.in.bits.resp_in(0).s2.full_pred zip s2_hit_dup zip s2_ftb_entry_dup)
653      out_fp.br_taken_mask(i) := in_fp.br_taken_mask(i) || s2_hit && s2_ftb_entry.always_taken(i)
654    for (out_fp & in_fp & s3_hit & s3_ftb_entry <-
655      io.out.s3.full_pred zip io.in.bits.resp_in(0).s3.full_pred zip s3_hit_dup zip s3_ftb_entry_dup)
656      out_fp.br_taken_mask(i) := in_fp.br_taken_mask(i) || s3_hit && s3_ftb_entry.always_taken(i)
657  }
658
659  // Update logic
660  val update = io.update.bits
661
662  val u_meta = update.meta.asTypeOf(new FTBMeta)
663  val u_valid = io.update.valid && !io.update.bits.old_entry
664
665  val (_, delay2_pc) = DelayNWithValid(update.pc, u_valid, 2)
666  val (_, delay2_entry) = DelayNWithValid(update.ftb_entry, u_valid, 2)
667
668
669  val update_now = u_valid && u_meta.hit
670  val update_need_read = u_valid && !u_meta.hit
671  // stall one more cycle because we use a whole cycle to do update read tag hit
672  io.s1_ready := ftbBank.io.req_pc.ready && !(update_need_read) && !RegNext(update_need_read)
673
674  ftbBank.io.u_req_pc.valid := update_need_read
675  ftbBank.io.u_req_pc.bits := update.pc
676
677
678
679  val ftb_write = Wire(new FTBEntryWithTag)
680  ftb_write.entry := Mux(update_now, update.ftb_entry, delay2_entry)
681  ftb_write.tag   := ftbAddr.getTag(Mux(update_now, update.pc, delay2_pc))(tagSize-1, 0)
682
683  val write_valid = update_now || DelayN(u_valid && !u_meta.hit, 2)
684  val write_pc    = Mux(update_now, update.pc,       delay2_pc)
685
686  ftbBank.io.update_write_data.valid := write_valid
687  ftbBank.io.update_write_data.bits := ftb_write
688  ftbBank.io.update_pc          := write_pc
689  ftbBank.io.update_write_way   := Mux(update_now, u_meta.writeWay, RegNext(ftbBank.io.update_hits.bits)) // use it one cycle later
690  ftbBank.io.update_write_alloc := Mux(update_now, false.B,         RegNext(!ftbBank.io.update_hits.valid)) // use it one cycle later
691  ftbBank.io.update_access := u_valid && !u_meta.hit
692  ftbBank.io.s1_fire := io.s1_fire(0)
693
694  val ftb_write_fallThrough = ftb_write.entry.getFallThrough(write_pc)
695  when(write_valid){
696    assert(write_pc + (FetchWidth * 4).U >= ftb_write_fallThrough, s"FTB write_entry fallThrough address error!")
697  }
698
699  XSDebug("req_v=%b, req_pc=%x, ready=%b (resp at next cycle)\n", io.s0_fire(0), s0_pc_dup(0), ftbBank.io.req_pc.ready)
700  XSDebug("s2_hit=%b, hit_way=%b\n", s2_hit_dup(0), writeWay.asUInt)
701  XSDebug("s2_br_taken_mask=%b, s2_real_taken_mask=%b\n",
702    io.in.bits.resp_in(0).s2.full_pred(0).br_taken_mask.asUInt, io.out.s2.full_pred(0).real_slot_taken_mask().asUInt)
703  XSDebug("s2_target=%x\n", io.out.s2.getTarget(0))
704
705  s2_ftb_entry_dup(0).display(true.B)
706
707  XSPerfAccumulate("ftb_read_hits", RegNext(io.s0_fire(0)) && s1_hit)
708  XSPerfAccumulate("ftb_read_misses", RegNext(io.s0_fire(0)) && !s1_hit)
709
710  XSPerfAccumulate("ftb_commit_hits", io.update.valid && u_meta.hit)
711  XSPerfAccumulate("ftb_commit_misses", io.update.valid && !u_meta.hit)
712
713  XSPerfAccumulate("ftb_update_req", io.update.valid)
714  XSPerfAccumulate("ftb_update_ignored", io.update.valid && io.update.bits.old_entry)
715  XSPerfAccumulate("ftb_updated", u_valid)
716
717  override val perfEvents = Seq(
718    ("ftb_commit_hits            ", io.update.valid  &&  u_meta.hit),
719    ("ftb_commit_misses          ", io.update.valid  && !u_meta.hit),
720  )
721  generatePerfEvent()
722}
723