xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision c31be712042c7f8fe42b8b1ecf71af6f3c85cc33)
1/***************************************************************************************
2* Copyright (c) 2021-2025 Beijing Institute of Open Source Chip (BOSC)
3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences
4* Copyright (c) 2020-2021 Peng Cheng Laboratory
5* Copyright (c) 2024-2025 Institute of Information Engineering, Chinese Academy of Sciences
6*
7* XiangShan is licensed under Mulan PSL v2.
8* You can use this software according to the terms and conditions of the Mulan PSL v2.
9* You may obtain a copy of Mulan PSL v2 at:
10*          http://license.coscl.org.cn/MulanPSL2
11*
12* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
13* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
14* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
15*
16* See the Mulan PSL v2 for more details.
17***************************************************************************************/
18
19package xiangshan.cache.mmu
20
21import org.chipsalliance.cde.config.Parameters
22import chisel3._
23import chisel3.util._
24import xiangshan._
25import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
26import utils._
27import utility._
28import coupledL2.utils.SplittedSRAM
29import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
30import freechips.rocketchip.tilelink._
31import utility.mbist.MbistPipeline
32
33/* ptw cache caches the page table of all the three layers
34 * ptw cache resp at next cycle
35 * the cache should not be blocked
36 * when miss queue if full, just block req outside
37 */
38
39class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle {
40  val hit = Bool()
41  val pre = Bool()
42  val ppn = UInt(gvpnLen.W)
43  val pbmt = UInt(ptePbmtLen.W)
44  val perm = new PtePermBundle()
45  val n = UInt(pteNLen.W)
46  val ecc = Bool()
47  val level = UInt(2.W)
48  val v = Bool()
49  val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
50    val jmp_bitmap_check = Bool()
51    val pte = UInt(XLEN.W) // Page Table Entry
52  })
53
54  def apply(hit: Bool, pre: Bool, ppn: UInt, pbmt: UInt = 0.U, n: UInt = 0.U,
55            perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()),
56            ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B, jmp_bitmap_check: Bool = false.B,
57            pte: UInt = 0.U): Unit = {
58    this.hit := hit && !ecc
59    this.pre := pre
60    this.ppn := ppn
61    this.n := n
62    this.pbmt := pbmt
63    this.perm := perm
64    this.ecc := ecc && hit
65    this.level := level
66    this.v := valid
67    if (HasBitmapCheck) {
68      this.bitmapCheck.get.jmp_bitmap_check := jmp_bitmap_check
69      this.bitmapCheck.get.pte := pte
70    }
71  }
72}
73
74class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle {
75  assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!")
76  val hit = Bool()
77  val pre = Bool()
78  val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W))
79  val pbmt = Vec(tlbcontiguous, UInt(ptePbmtLen.W))
80  val perm = Vec(tlbcontiguous, new PtePermBundle())
81  val ecc = Bool()
82  val level = UInt(2.W)
83  val v = Vec(tlbcontiguous, Bool())
84  val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
85    val jmp_bitmap_check = Bool()
86    val hitway = UInt(l2tlbParams.l0nWays.W)
87    val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector
88    val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector
89  })
90
91  def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], pbmt: Vec[UInt] = Vec(tlbcontiguous, 0.U),
92            perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())),
93            ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B),
94            jmp_bitmap_check: Bool = false.B,
95            hitway: UInt = 0.U, ptes: Vec[UInt] , cfs: Vec[Bool]): Unit = {
96    this.hit := hit && !ecc
97    this.pre := pre
98    this.ppn := ppn
99    this.pbmt := pbmt
100    this.perm := perm
101    this.ecc := ecc && hit
102    this.level := level
103    this.v := valid
104    if (HasBitmapCheck) {
105      this.bitmapCheck.get.jmp_bitmap_check := jmp_bitmap_check
106      this.bitmapCheck.get.hitway := hitway
107      this.bitmapCheck.get.ptes := ptes
108      this.bitmapCheck.get.cfs := cfs
109    }
110  }
111}
112
113class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle {
114  val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None
115  val l2 = new PageCachePerPespBundle
116  val l1 = new PageCachePerPespBundle
117  val l0 = new PageCacheMergePespBundle
118  val sp = new PageCachePerPespBundle
119}
120
121class PtwCacheReq(implicit p: Parameters) extends PtwBundle {
122  val req_info = new L2TlbInnerBundle()
123  val isFirst = Bool()
124  val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool())
125  val isHptwReq = Bool()
126  val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W)
127}
128
129class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst {
130  val req = Flipped(DecoupledIO(new PtwCacheReq()))
131  val resp = DecoupledIO(new Bundle {
132    val req_info = new L2TlbInnerBundle()
133    val isFirst = Bool()
134    val hit = Bool()
135    val prefetch = Bool() // is the entry fetched by prefetch
136    val bypassed = Bool()
137    val toFsm = new Bundle {
138      val l3Hit = if (EnableSv48) Some(Bool()) else None
139      val l2Hit = Bool()
140      val l1Hit = Bool()
141      val ppn = UInt(gvpnLen.W)
142      val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW
143      val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
144        val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check
145        val toLLPTW = Bool()
146        val hitway = UInt(l2tlbParams.l0nWays.W)
147        val pte = UInt(XLEN.W) // Page Table Entry
148        val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector
149        val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector
150        val SPlevel = UInt(log2Up(Level).W)
151      })
152    }
153    val stage1 = new PtwMergeResp()
154    val isHptwReq = Bool()
155    val toHptw = new Bundle {
156      val l3Hit = if (EnableSv48) Some(Bool()) else None
157      val l2Hit = Bool()
158      val l1Hit = Bool()
159      val ppn = UInt(ppnLen.W)
160      val id = UInt(log2Up(l2tlbParams.llptwsize).W)
161      val resp = new HptwResp() // used if hit
162      val bypassed = Bool()
163      val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
164        val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check
165        val hitway = UInt(l2tlbParams.l0nWays.W)
166        val pte = UInt(XLEN.W) // Page Table Entry
167        val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector
168        val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector
169        val fromSP = Bool()
170        val SPlevel = UInt(log2Up(Level).W)
171      })
172    }
173  })
174  val refill = Flipped(ValidIO(new Bundle {
175    val ptes = UInt(blockBits.W)
176    val levelOH = new Bundle {
177      // NOTE: levelOH has (Level+1) bits, each stands for page cache entries
178      val sp = Bool()
179      val l0 = Bool()
180      val l1 = Bool()
181      val l2 = Bool()
182      val l3 = if (EnableSv48) Some(Bool()) else None
183      def apply(levelUInt: UInt, valid: Bool) = {
184        sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B)
185        l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B)
186        l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B)
187        l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B)
188        l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B))
189      }
190    }
191    // duplicate level and sel_pte for each page caches, for better fanout
192    val req_info_dup = Vec(3, new L2TlbInnerBundle())
193    val level_dup = Vec(3, UInt(log2Up(Level + 1).W))
194    val sel_pte_dup = Vec(3, UInt(XLEN.W))
195  }))
196  // when refill l0,save way info for late bitmap wakeup convenient
197  // valid at same cycle of refill.levelOH.l0
198  val l0_way_info = Option.when(HasBitmapCheck)(Output(UInt(l2tlbParams.l0nWays.W)))
199  val sfence_dup = Vec(4, Input(new SfenceBundle()))
200  val csr_dup = Vec(3, Input(new TlbCsrBundle()))
201  val bitmap_wakeup = Option.when(HasBitmapCheck)(Flipped(ValidIO(new Bundle {
202    val setIndex = Input(UInt(PtwL0SetIdxLen.W))
203    val tag = Input(UInt(SPTagLen.W))
204    val isSp = Input(Bool())
205    val way_info = UInt(l2tlbParams.l0nWays.W)
206    val pte_index = UInt(sectortlbwidth.W)
207    val check_success = Bool()
208  })))
209}
210
211class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents {
212  val io = IO(new PtwCacheIO)
213  val ecc = Code.fromString(l2tlbParams.ecc)
214  val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false, ReservedBits = l2tlbParams.l1ReservedBits)
215  val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true, ReservedBits = l2tlbParams.l0ReservedBits)
216
217  // use two additional regs to record corresponding cache entry whether via bitmap check
218  // 32(l0nSets)* 8 (l0nWays) * 8 (tlbcontiguous)
219  val l0BitmapReg = RegInit(VecInit(Seq.fill(l2tlbParams.l0nSets)(VecInit(Seq.fill(l2tlbParams.l0nWays)(VecInit(Seq.fill(tlbcontiguous)(0.U(1.W))))))))
220  val spBitmapReg = RegInit(VecInit(Seq.fill(l2tlbParams.spSize)(0.U(1.W))))
221
222  val bitmapEnable = io.csr_dup(0).mbmc.BME === 1.U && io.csr_dup(0).mbmc.CMODE === 0.U
223  // TODO: four caches make the codes dirty, think about how to deal with it
224
225  val sfence_dup = io.sfence_dup
226  val refill = io.refill.bits
227  val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source))
228  val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate))
229  val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed)
230  val flush = flush_dup(0)
231
232  // when refill, refuce to accept new req
233  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
234
235  // handle hand signal and req_info
236  // TODO: replace with FlushableQueue
237  val stageReq = Wire(Decoupled(new PtwCacheReq()))         // enq stage & read page cache valid
238  val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp
239  val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc
240  val stageResp = Wire(Decoupled(new PtwCacheReq()))         // deq stage
241
242  val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush)      // catch ram data
243  val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter
244  val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool()))
245  stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush))  // ecc flush
246
247  stageReq <> io.req
248  PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad)
249  InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle)
250  PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush)
251  InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle)
252  PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush)
253  stageResp.ready := !stageResp.valid || io.resp.ready
254
255  // l3: level 3 non-leaf pte
256  val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None
257  val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None
258  val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None
259  val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None
260  val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None
261  val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None
262
263  // l2: level 2 non-leaf pte
264  val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen)))
265  val l2v = RegInit(0.U(l2tlbParams.l2Size.W))
266  val l2g = Reg(UInt(l2tlbParams.l2Size.W))
267  val l2asids = l2.map(_.asid)
268  val l2vmids = l2.map(_.vmid)
269  val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W)))
270
271  // l1: level 1 non-leaf pte
272  val l1 = Module(new SplittedSRAM(
273    l1EntryType,
274    set = l2tlbParams.l1nSets,
275    way = l2tlbParams.l1nWays,
276    waySplit = 1,
277    dataSplit = 4,
278    singlePort = sramSinglePort,
279    readMCP2 = false,
280    hasMbist = hasMbist,
281    hasSramCtl = hasSramCtl
282  ))
283  val mbistPlL1 = MbistPipeline.PlaceMbistPipeline(1, s"MbistPipePtwL1", hasMbist)
284  val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
285  val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
286  val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W))))
287  val l1asids = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(l2tlbParams.hashAsidWidth.W))))
288  val l1vmids = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(l2tlbParams.hashAsidWidth.W))))
289  def getl1vSet(vpn: UInt) = {
290    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
291    val set = genPtwL1SetIdx(vpn)
292    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
293    val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W)))
294    l1vVec(set)
295  }
296  def getl1hSet(vpn: UInt) = {
297    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
298    val set = genPtwL1SetIdx(vpn)
299    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
300    l1h(set)
301  }
302
303  // l0: level 0 leaf pte of 4KB pages
304  val l0 = Module(new SplittedSRAM(
305    l0EntryType,
306    set = l2tlbParams.l0nSets,
307    way = l2tlbParams.l0nWays,
308    waySplit = 2,
309    dataSplit = 4,
310    singlePort = sramSinglePort,
311    readMCP2 = false,
312    hasMbist = hasMbist,
313    hasSramCtl = hasSramCtl
314  ))
315  val mbistPlL0 = MbistPipeline.PlaceMbistPipeline(1, s"MbistPipePtwL0", hasMbist)
316  val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
317  val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
318  val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W))))
319  val l0asids = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(l2tlbParams.hashAsidWidth.W))))
320  val l0vmids = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(l2tlbParams.hashAsidWidth.W))))
321  val l0vpns = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(l2tlbParams.hashVpnWidth.W))))
322  def getl0vSet(vpn: UInt) = {
323    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
324    val set = genPtwL0SetIdx(vpn)
325    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
326    val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W)))
327    l0vVec(set)
328  }
329  def getl0hSet(vpn: UInt) = {
330    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
331    val set = genPtwL0SetIdx(vpn)
332    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
333    l0h(set)
334  }
335
336  // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages
337  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true, hasNapot = true)))
338  val spv = RegInit(0.U(l2tlbParams.spSize.W))
339  val spg = Reg(UInt(l2tlbParams.spSize.W))
340  val spasids = sp.map(_.asid)
341  val spvmids = sp.map(_.vmid)
342  val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W)))
343
344  if (HasBitmapCheck) {
345    // wakeup corresponding entry
346    when (io.bitmap_wakeup.get.valid) {
347      when (io.bitmap_wakeup.get.bits.isSp) {
348        for (i <- 0 until l2tlbParams.spSize) {
349          when (sp(i).tag === io.bitmap_wakeup.get.bits.tag && spv(i) === 1.U) {
350            spBitmapReg(i) := io.bitmap_wakeup.get.bits.check_success
351          }
352        }
353      } .otherwise {
354        val wakeup_setindex = io.bitmap_wakeup.get.bits.setIndex
355        l0BitmapReg(wakeup_setindex)(OHToUInt(io.bitmap_wakeup.get.bits.way_info))(io.bitmap_wakeup.get.bits.pte_index) := io.bitmap_wakeup.get.bits.check_success
356        assert(l0v(wakeup_setindex * l2tlbParams.l0nWays.U + OHToUInt(io.bitmap_wakeup.get.bits.way_info)) === 1.U,
357          "Wakeuped entry must be valid!")
358      }
359    }
360  }
361
362  // Access Perf
363  val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
364  val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
365  val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
366  val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
367  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
368  if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B))
369  l2AccessPerf.map(_ := false.B)
370  l1AccessPerf.map(_ := false.B)
371  l0AccessPerf.map(_ := false.B)
372  spAccessPerf.map(_ := false.B)
373
374
375
376  def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = {
377    (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3))
378  }
379  // NOTE: not actually bypassed, just check if hit, re-access the page cache
380  def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = {
381    val change_h = MuxLookup(h_search, noS2xlate)(Seq(
382      allStage -> onlyStage1,
383      onlyStage1 -> onlyStage1,
384      onlyStage2 -> onlyStage2
385    ))
386    val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq(
387      allStage -> onlyStage1,
388      onlyStage1 -> onlyStage1,
389      onlyStage2 -> onlyStage2
390    ))
391    val refill_vpn = io.refill.bits.req_info_dup(0).vpn
392    io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h
393  }
394
395  val vpn_search = stageReq.bits.req_info.vpn
396  val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq(
397    allStage -> onlyStage1,
398    onlyStage1 -> onlyStage1,
399    onlyStage2 -> onlyStage2
400  ))
401
402  // l3
403  val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None
404  val l3HitPPN = if(EnableSv48) Some(Wire(UInt(gvpnLen.W))) else None
405  val l3HitPbmt = if(EnableSv48) Some(Wire(UInt(ptePbmtLen.W))) else None
406  val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None
407  val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None
408  if (EnableSv48) {
409    val hitVecT = l3.get.zipWithIndex.map {
410        case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
411          && l3v.get(i) && h_search === l3h.get(i))
412    }
413    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
414
415    // stageDelay, but check for l3
416    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle)
417    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.pbmt)), stageDelay_valid_1cycle)
418    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle)
419    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
420
421    when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) }
422
423    l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
424    for (i <- 0 until l2tlbParams.l3Size) {
425      XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
426    }
427    XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
428    XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
429
430    VecInit(hitVecT).suggestName(s"l3_hitVecT")
431    VecInit(hitVec).suggestName(s"l3_hitVec")
432
433    // synchronize with other entries with RegEnable
434    l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire))
435    l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire))
436    l3HitPbmt.map(_ := RegEnable(hitPbmt, stageDelay(1).fire))
437    l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire))
438  }
439
440  // l2
441  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size)
442  val (l2Hit, l2HitPPN, l2HitPbmt, l2Pre) = {
443    val hitVecT = l2.zipWithIndex.map {
444      case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
445        && l2v(i) && h_search === l2h(i))
446    }
447    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
448
449    // stageDelay, but check for l2
450    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle)
451    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.pbmt)), stageDelay_valid_1cycle)
452    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle)
453    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
454
455    when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) }
456
457    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
458    for (i <- 0 until l2tlbParams.l2Size) {
459      XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
460    }
461    XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
462    XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
463
464    VecInit(hitVecT).suggestName(s"l2_hitVecT")
465    VecInit(hitVec).suggestName(s"l2_hitVec")
466
467    // synchronize with other entries with RegEnable
468    (RegEnable(hit, stageDelay(1).fire),
469     RegEnable(hitPPN, stageDelay(1).fire),
470     RegEnable(hitPbmt, stageDelay(1).fire),
471     RegEnable(hitPre, stageDelay(1).fire))
472  }
473
474  // l1
475  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets)
476  val (l1Hit, l1HitPPN, l1HitPbmt, l1Pre, l1eccError) = {
477    val ridx = genPtwL1SetIdx(vpn_search)
478    l1.io.r.req.valid := stageReq.fire
479    l1.io.r.req.bits.apply(setIdx = ridx)
480    val vVec_req = getl1vSet(vpn_search)
481    val hVec_req = getl1hSet(vpn_search)
482
483    // delay one cycle after sram read
484    val delay_vpn = stageDelay(0).bits.req_info.vpn
485    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
486      allStage -> onlyStage1,
487      onlyStage1 -> onlyStage1,
488      onlyStage2 -> onlyStage2
489    ))
490    val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle)
491    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
492    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
493    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
494      wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
495
496    // check hit and ecc
497    val check_vpn = stageCheck(0).bits.req_info.vpn
498    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
499    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
500
501    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
502    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
503    val hitWayData = hitWayEntry.entries
504    val hit = ParallelOR(hitVec)
505    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W)))
506    val eccError = WireInit(false.B)
507    if (l2tlbParams.enablePTWECC) {
508      eccError := hitWayEntry.decode()
509    } else {
510      eccError := false.B
511    }
512
513    ridx.suggestName(s"l1_ridx")
514    ramDatas.suggestName(s"l1_ramDatas")
515    hitVec.suggestName(s"l1_hitVec")
516    hitWayData.suggestName(s"l1_hitWayData")
517    hitWay.suggestName(s"l1_hitWay")
518
519    when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) }
520
521    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
522    XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n")
523    for (i <- 0 until l2tlbParams.l1nWays) {
524      XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)}  l1v:${vVec(i)}  hit:${hit}\n")
525    }
526    XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n")
527
528    (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.pbmts(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError)
529  }
530  val te = ClockGate.genTeSink
531  val l0_masked_clock = ClockGate(te.cgen, stageReq.fire | (!flush_dup(0) && refill.levelOH.l0) | mbistPlL0.map(_.mbist.req).getOrElse(false.B), clock)
532  val l1_masked_clock = ClockGate(te.cgen, stageReq.fire | (!flush_dup(1) && refill.levelOH.l1) | mbistPlL1.map(_.mbist.req).getOrElse(false.B), clock)
533  l0.clock := l0_masked_clock
534  l1.clock := l1_masked_clock
535  // l0
536  val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets)
537  val (l0Hit, l0HitData, l0Pre, l0eccError, l0HitWay, l0BitmapCheckResult, l0JmpBitmapCheck) = {
538    val ridx = genPtwL0SetIdx(vpn_search)
539    l0.io.r.req.valid := stageReq.fire
540    l0.io.r.req.bits.apply(setIdx = ridx)
541    val vVec_req = getl0vSet(vpn_search)
542    val hVec_req = getl0hSet(vpn_search)
543
544    // delay one cycle after sram read
545    val delay_vpn = stageDelay(0).bits.req_info.vpn
546    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
547      allStage -> onlyStage1,
548      onlyStage1 -> onlyStage1,
549      onlyStage2 -> onlyStage2
550    ))
551    val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle)
552    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
553    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
554    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
555      wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
556
557    // check hit and ecc
558    val check_vpn = stageCheck(0).bits.req_info.vpn
559    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
560    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
561
562    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
563    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
564    val hitWayData = hitWayEntry.entries
565    val hitWayEcc = hitWayEntry.ecc
566    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W)))
567
568    val ishptw = RegEnable(stageDelay(0).bits.isHptwReq,stageDelay(1).fire)
569    val s2x_info = RegEnable(stageDelay(0).bits.req_info.s2xlate,stageDelay(1).fire)
570    val pte_index = RegEnable(stageDelay(0).bits.req_info.vpn(sectortlbwidth - 1, 0),stageDelay(1).fire)
571    val jmp_bitmap_check  = WireInit(false.B)
572    val hit = WireInit(false.B)
573    val l0bitmapreg = WireInit((VecInit(Seq.fill(l2tlbParams.l0nWays)(VecInit(Seq.fill(tlbcontiguous)(0.U(1.W)))))))
574    if (HasBitmapCheck) {
575      l0bitmapreg := RegEnable(RegNext(l0BitmapReg(ridx)), stageDelay(1).fire)
576      // cause llptw will trigger bitmapcheck
577      // add a coniditonal logic
578      // (s2x_info =/= allStage || ishptw)
579      hit := Mux(bitmapEnable && (s2x_info =/= allStage || ishptw), ParallelOR(hitVec) && l0bitmapreg(hitWay)(pte_index) === 1.U, ParallelOR(hitVec))
580      when (bitmapEnable && (s2x_info =/= allStage || ishptw) && ParallelOR(hitVec) && l0bitmapreg(hitWay)(pte_index) === 0.U) {
581        jmp_bitmap_check := true.B
582      }
583    } else {
584      hit := ParallelOR(hitVec)
585    }
586    val eccError = WireInit(false.B)
587    if (l2tlbParams.enablePTWECC) {
588      eccError := hitWayEntry.decode()
589    } else {
590      eccError := false.B
591    }
592
593    when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) }
594
595    l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
596    XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n")
597    for (i <- 0 until l2tlbParams.l0nWays) {
598      XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)}  l0v:${vVec(i)}  hit:${hitVec(i)}\n")
599    }
600    XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n")
601
602    ridx.suggestName(s"l0_ridx")
603    ramDatas.suggestName(s"l0_ramDatas")
604    hitVec.suggestName(s"l0_hitVec")
605    hitWay.suggestName(s"l0_hitWay")
606
607    (hit, hitWayData, hitWayData.prefetch, eccError, UIntToOH(hitWay), l0bitmapreg(hitWay), jmp_bitmap_check)
608  }
609  val l0HitPPN = l0HitData.ppns
610  val l0HitPbmt = l0HitData.pbmts
611  val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle)))
612  val l0HitValid = VecInit(l0HitData.onlypf.map(!_))
613  val l0Ptes = WireInit(VecInit(Seq.fill(tlbcontiguous)(0.U(XLEN.W)))) // L0 lavel Page Table Entry Vector
614  val l0cfs = WireInit(VecInit(Seq.fill(tlbcontiguous)(false.B))) // L0 lavel Bitmap Check Failed Vector
615  if (HasBitmapCheck) {
616    for (i <- 0 until tlbcontiguous) {
617      l0Ptes(i) := Cat(l0HitData.pbmts(i).asUInt,l0HitPPN(i), 0.U(2.W),l0HitPerm(i).asUInt,l0HitValid(i).asUInt)
618      l0cfs(i) := !l0BitmapCheckResult(i)
619    }
620  }
621
622  // super page
623  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
624  val (spHit, spHitData, spPre, spValid, spJmpBitmapCheck) = {
625    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, allType = true, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) }
626    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
627    val hitData = ParallelPriorityMux(hitVec zip sp)
628    val ishptw = RegEnable(stageReq.bits.isHptwReq, stageReq.fire)
629    val s2x_info = RegEnable(stageReq.bits.req_info.s2xlate, stageReq.fire)
630    val jmp_bitmap_check  = WireInit(false.B)
631    val hit = WireInit(false.B)
632    if (HasBitmapCheck) {
633      hit := Mux(bitmapEnable && (s2x_info =/= allStage || ishptw), ParallelOR(hitVec) && spBitmapReg(OHToUInt(hitVec)) === 1.U, ParallelOR(hitVec))
634      when (bitmapEnable && (s2x_info =/= allStage || ishptw) && ParallelOR(hitVec) && spBitmapReg(OHToUInt(hitVec)) === 0.U) {
635        jmp_bitmap_check := true.B
636      }
637    } else {
638      hit := ParallelOR(hitVec)
639    }
640
641    when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) }
642
643    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle }
644    for (i <- 0 until l2tlbParams.spSize) {
645      XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n")
646    }
647    XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
648
649    VecInit(hitVecT).suggestName(s"sp_hitVecT")
650    VecInit(hitVec).suggestName(s"sp_hitVec")
651
652    (RegEnable(hit, stageDelay(1).fire),
653     RegEnable(hitData, stageDelay(1).fire),
654     RegEnable(hitData.prefetch, stageDelay(1).fire),
655     RegEnable(hitData.v, stageDelay(1).fire),
656     RegEnable(jmp_bitmap_check, stageDelay(1).fire))
657  }
658  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
659  val spHitLevel = spHitData.level.getOrElse(0.U)
660  val spPte = Cat(spHitData.pbmt.asUInt,spHitData.ppn, 0.U(2.W), spHitPerm.asUInt,spHitData.v.asUInt) // Super-page Page Table Entry
661
662  val check_res = Wire(new PageCacheRespBundle)
663  check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get, l3HitPbmt.get))
664  check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, l2HitPbmt)
665  check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, l1HitPbmt, ecc = l1eccError)
666  check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPbmt, l0HitPerm, l0eccError, valid = l0HitValid, jmp_bitmap_check = l0JmpBitmapCheck, hitway = l0HitWay, ptes = l0Ptes, cfs = l0cfs)
667  check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitData.pbmt, spHitData.n.getOrElse(0.U), spHitPerm, false.B, spHitLevel, spValid, spJmpBitmapCheck, spPte)
668
669  val resp_res = Reg(new PageCacheRespBundle)
670  when (stageCheck(1).fire) { resp_res := check_res }
671
672  // stageResp bypass
673  val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
674  bypassed.indices.foreach(i =>
675    bypassed(i) := stageResp.bits.bypassed(i) ||
676      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
677        OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid)
678  )
679
680  // stageResp bypass to hptw
681  val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
682  hptw_bypassed.indices.foreach(i =>
683    hptw_bypassed(i) := stageResp.bits.bypassed(i) ||
684      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
685        io.resp.fire)
686  )
687
688  val isAllStage = stageResp.bits.req_info.s2xlate === allStage
689  val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2
690  val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage
691  val idx = stageResp.bits.req_info.vpn(2, 0)
692  val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
693  io.resp.bits.req_info   := stageResp.bits.req_info
694  io.resp.bits.isFirst  := stageResp.bits.isFirst
695  io.resp.bits.hit      := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf)
696  if (EnableSv48) {
697    io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage
698  } else {
699    io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage
700  }
701  io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit
702  io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq)
703  io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
704  io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
705  io.resp.bits.toFsm.ppn   := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))
706  io.resp.bits.toFsm.stage1Hit := stage1Hit
707  if (HasBitmapCheck) {
708    io.resp.bits.toFsm.bitmapCheck.get.jmp_bitmap_check := resp_res.l0.bitmapCheck.get.jmp_bitmap_check || resp_res.sp.bitmapCheck.get.jmp_bitmap_check
709    io.resp.bits.toFsm.bitmapCheck.get.toLLPTW := resp_res.l0.bitmapCheck.get.jmp_bitmap_check && (stageResp.bits.req_info.s2xlate === noS2xlate || stageResp.bits.req_info.s2xlate === onlyStage1)
710    io.resp.bits.toFsm.bitmapCheck.get.hitway := resp_res.l0.bitmapCheck.get.hitway
711    io.resp.bits.toFsm.bitmapCheck.get.pte := resp_res.sp.bitmapCheck.get.pte
712    io.resp.bits.toFsm.bitmapCheck.get.ptes := resp_res.l0.bitmapCheck.get.ptes
713    io.resp.bits.toFsm.bitmapCheck.get.cfs := resp_res.l0.bitmapCheck.get.cfs
714    io.resp.bits.toFsm.bitmapCheck.get.SPlevel := resp_res.sp.level
715  }
716
717  io.resp.bits.isHptwReq := stageResp.bits.isHptwReq
718  if (EnableSv48) {
719    io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq
720  } else {
721    io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq
722  }
723  io.resp.bits.toHptw.id := stageResp.bits.hptwId
724  io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq)
725  io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq
726  io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq
727  io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0)
728  io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn
729  io.resp.bits.toHptw.resp.entry.asid := DontCare
730  io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.vmid)
731  io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level))
732  io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source)
733  io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0)
734  io.resp.bits.toHptw.resp.entry.pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(idx), resp_res.sp.pbmt)
735  io.resp.bits.toHptw.resp.entry.n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U))
736  io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm))
737  io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
738  io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v
739  io.resp.bits.toHptw.resp.gaf := false.B
740  if (HasBitmapCheck) {
741    io.resp.bits.toHptw.bitmapCheck.get.jmp_bitmap_check := resp_res.l0.bitmapCheck.get.jmp_bitmap_check || resp_res.sp.bitmapCheck.get.jmp_bitmap_check
742    io.resp.bits.toHptw.bitmapCheck.get.hitway := resp_res.l0.bitmapCheck.get.hitway
743    io.resp.bits.toHptw.bitmapCheck.get.pte := resp_res.sp.bitmapCheck.get.pte
744    io.resp.bits.toHptw.bitmapCheck.get.ptes := resp_res.l0.bitmapCheck.get.ptes
745    io.resp.bits.toHptw.bitmapCheck.get.cfs := resp_res.l0.bitmapCheck.get.cfs
746    io.resp.bits.toHptw.bitmapCheck.get.fromSP := resp_res.sp.bitmapCheck.get.jmp_bitmap_check
747    io.resp.bits.toHptw.bitmapCheck.get.SPlevel := resp_res.sp.level
748  }
749
750  io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3))
751  io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare
752  io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.vmid))
753  if (EnableSv48) {
754    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
755      Mux(resp_res.sp.hit, resp_res.sp.level,
756        Mux(resp_res.l1.hit, 1.U,
757          Mux(resp_res.l2.hit, 2.U, 3.U))))))
758  } else {
759    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
760      Mux(resp_res.sp.hit, resp_res.sp.level,
761        Mux(resp_res.l1.hit, 1.U, 2.U)))))
762  }
763  io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source))
764  for (i <- 0 until tlbcontiguous) {
765    if (EnableSv48) {
766      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
767        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
768          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
769            Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth),
770              resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth)))))
771      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
772        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
773          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
774            Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0),
775              resp_res.l3.get.ppn(sectortlbwidth - 1, 0)))))
776      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
777        Mux(resp_res.sp.hit, resp_res.sp.v,
778          Mux(resp_res.l1.hit, resp_res.l1.v,
779            Mux(resp_res.l2.hit, resp_res.l2.v,
780              resp_res.l3.get.v))))
781    } else {
782      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
783        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
784          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
785            resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth))))
786      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
787        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
788          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
789            resp_res.l2.ppn(sectortlbwidth - 1, 0))))
790      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
791        Mux(resp_res.sp.hit, resp_res.sp.v,
792          Mux(resp_res.l1.hit, resp_res.l1.v,
793            resp_res.l2.v)))
794    }
795    io.resp.bits.stage1.entry(i).pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(i),
796      Mux(resp_res.sp.hit, resp_res.sp.pbmt,
797        Mux(resp_res.l1.hit, resp_res.l1.pbmt,
798          resp_res.l2.pbmt)))
799    io.resp.bits.stage1.entry(i).n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U))
800    io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i),  Mux(resp_res.sp.hit, resp_res.sp.perm, 0.U.asTypeOf(new PtePermBundle))))
801    io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v
802    io.resp.bits.stage1.entry(i).af := false.B
803    io.resp.bits.stage1.entry(i).cf := l0cfs(i) // L0 lavel Bitmap Check Failed Vector
804  }
805  io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools
806  io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B)
807  io.resp.bits.stage1.not_merge := false.B
808  io.resp.valid := stageResp.valid
809  XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit")
810
811  // refill Perf
812  val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
813  val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
814  val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
815  val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
816  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
817  l3RefillPerf.map(_.map(_ := false.B))
818  l2RefillPerf.map(_ := false.B)
819  l1RefillPerf.map(_ := false.B)
820  l0RefillPerf.map(_ := false.B)
821  spRefillPerf.map(_ := false.B)
822
823  // refill
824  l1.io.w.req <> DontCare
825  l0.io.w.req <> DontCare
826  l1.io.w.req.valid := false.B
827  l0.io.w.req.valid := false.B
828
829  val memRdata = refill.ptes
830  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
831  val memSelData = io.refill.bits.sel_pte_dup
832  val memPte = memSelData.map(a => a.asTypeOf(new PteBundle))
833  val mPBMTE = io.csr.mPBMTE
834  val hPBMTE = io.csr.hPBMTE
835  val pbmte = Mux(refill.req_info_dup(0).s2xlate === onlyStage1 || refill.req_info_dup(0).s2xlate === allStage, hPBMTE, mPBMTE)
836
837  def Tran2D(flushMask: UInt): Vec[UInt] = {
838    val tran2D = Wire(Vec(l2tlbParams.l0nSets,UInt(l2tlbParams.l0nWays.W)))
839    for (i <- 0 until l2tlbParams.l0nSets) {
840      tran2D(i) := flushMask((i + 1) * l2tlbParams.l0nWays - 1, i * l2tlbParams.l0nWays)
841    }
842    tran2D
843  }
844  def updateL0BitmapReg(l0BitmapReg: Vec[Vec[Vec[UInt]]], tran2D: Vec[UInt]) = {
845    for (i <- 0 until l2tlbParams.l0nSets) {
846      for (j <- 0 until l2tlbParams.l0nWays) {
847        when (tran2D(i)(j) === 0.U) {
848          for (k <- 0 until tlbcontiguous) {
849            l0BitmapReg(i)(j)(k) := 0.U
850          }
851        }
852      }
853    }
854  }
855  def TranVec(flushMask: UInt): Vec[UInt] = {
856    val vec = Wire(Vec(l2tlbParams.spSize,UInt(1.W)))
857    for (i <- 0 until l2tlbParams.spSize) {
858      vec(i) := flushMask(i)
859    }
860    vec
861  }
862  def updateSpBitmapReg(spBitmapReg: Vec[UInt], vec : Vec[UInt]) = {
863    for (i <- 0 until l2tlbParams.spSize) {
864      spBitmapReg(i) := spBitmapReg(i) & vec(i)
865    }
866  }
867
868  // TODO: handle sfenceLatch outsize
869  if (EnableSv48) {
870    val l3Refill =
871      !flush_dup(2) &&
872      refill.levelOH.l3.get &&
873      !memPte(2).isLeaf() &&
874      memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).hgatp.mode)
875    val l3RefillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way).suggestName(s"l3_refillIdx")
876    val l3RfOH = UIntToOH(l3RefillIdx).asUInt.suggestName(s"l3_rfOH")
877    when (l3Refill) {
878      l3.get(l3RefillIdx).refill(
879        refill.req_info_dup(2).vpn,
880        Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
881        io.csr_dup(2).hgatp.vmid,
882        memSelData(2),
883        3.U,
884        refill_prefetch_dup(2)
885      )
886      ptwl2replace.access(l3RefillIdx)
887      l3v.get := l3v.get | l3RfOH
888      l3g.get := (l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U)
889      l3h.get(l3RefillIdx) := refill_h(2)
890
891      for (i <- 0 until l2tlbParams.l3Size) {
892        l3RefillPerf.get(i) := i.U === l3RefillIdx
893      }
894    }
895    XSDebug(l3Refill, p"[l3 refill] refillIdx:${l3RefillIdx} refillEntry:${l3.get(l3RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
896    XSDebug(l3Refill, p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | l3RfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U))}\n")
897  }
898
899  // L2 refill
900  val l2Refill =
901    !flush_dup(2) &&
902    refill.levelOH.l2 &&
903    !memPte(2).isLeaf() &&
904    memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).hgatp.mode)
905  val l2RefillIdx = replaceWrapper(l2v, ptwl2replace.way).suggestName(s"l2_refillIdx")
906  val l2RfOH = UIntToOH(l2RefillIdx).asUInt.suggestName(s"l2_rfOH")
907  when (
908    l2Refill
909  ) {
910    l2(l2RefillIdx).refill(
911      refill.req_info_dup(2).vpn,
912      Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
913      io.csr_dup(2).hgatp.vmid,
914      memSelData(2),
915      2.U,
916      refill_prefetch_dup(2)
917    )
918    ptwl2replace.access(l2RefillIdx)
919    l2v := l2v | l2RfOH
920    l2g := (l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U)
921    l2h(l2RefillIdx) := refill_h(2)
922
923    for (i <- 0 until l2tlbParams.l2Size) {
924      l2RefillPerf(i) := i.U === l2RefillIdx
925    }
926  }
927  XSDebug(l2Refill, p"[l2 refill] refillIdx:${l2RefillIdx} refillEntry:${l2(l2RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
928  XSDebug(l2Refill, p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | l2RfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U))}\n")
929
930  // L1 refill
931  val l1Refill = !flush_dup(1) && refill.levelOH.l1
932  val l1RefillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn).suggestName(s"l1_refillIdx")
933  val l1VictimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(l1RefillIdx)).suggestName(s"l1_victimWay")
934  val l1VictimWayOH = UIntToOH(l1VictimWay).suggestName(s"l1_victimWayOH")
935  val l1RfvOH = UIntToOH(Cat(l1RefillIdx, l1VictimWay)).asUInt.suggestName(s"l1_rfvOH")
936  val l1Wdata = Wire(l1EntryType)
937  val l1Wvpn = refill.req_info_dup(1).vpn
938  val l1Wasid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid)
939  l1Wdata.gen(
940    vpn = l1Wvpn,
941    asid = l1Wasid,
942    vmid = io.csr_dup(1).hgatp.vmid,
943    data = memRdata,
944    levelUInt = 1.U,
945    refill_prefetch_dup(1),
946    refill.req_info_dup(1).s2xlate,
947    pbmte,
948    io.csr_dup(1).hgatp.mode
949  )
950  when (l1Refill) {
951    l1.io.w.apply(
952      valid = true.B,
953      setIdx = l1RefillIdx,
954      data = l1Wdata,
955      waymask = l1VictimWayOH
956    )
957    ptwl1replace.access(l1RefillIdx, l1VictimWay)
958    l1v := l1v | l1RfvOH
959    l1g := l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U)
960    l1h(l1RefillIdx)(l1VictimWay) := refill_h(1)
961    l1asids(l1RefillIdx)(l1VictimWay) := XORFold(l1Wasid, l2tlbParams.hashAsidWidth)
962    l1vmids(l1RefillIdx)(l1VictimWay) := XORFold(io.csr_dup(1).hgatp.vmid, l2tlbParams.hashAsidWidth)
963
964    for (i <- 0 until l2tlbParams.l1nWays) {
965      l1RefillPerf(i) := i.U === l1VictimWay
966    }
967  }
968  XSDebug(l1Refill, p"[l1 refill] refillIdx:0x${Hexadecimal(l1RefillIdx)} victimWay:${l1VictimWay} victimWayOH:${Binary(l1VictimWayOH)} rfvOH(in UInt):${Cat(l1RefillIdx, l1VictimWay)}\n")
969  XSDebug(l1Refill, p"[l1 refill] refilldata:0x${l1Wdata}\n")
970  XSDebug(l1Refill, p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | l1RfvOH)}\n")
971  XSDebug(l1Refill, p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U))}\n")
972
973  // L0 refill
974  val l0Refill = !flush_dup(0) && refill.levelOH.l0 && !memPte(0).isNapot(refill.level_dup(0))
975  val l0RefillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn).suggestName(s"l0_refillIdx")
976  val l0VictimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(l0RefillIdx)).suggestName(s"l0_victimWay")
977  val l0VictimWayOH = UIntToOH(l0VictimWay).asUInt.suggestName(s"l0_victimWayOH")
978  val l0RfvOH = UIntToOH(Cat(l0RefillIdx, l0VictimWay)).suggestName(s"l0_rfvOH")
979  val l0Wdata = Wire(l0EntryType)
980  // trans the l0 way info, for late wakeup logic
981  if (HasBitmapCheck) {
982    io.l0_way_info.get := l0VictimWayOH
983  }
984  val l0Wvpn = refill.req_info_dup(0).vpn
985  val l0Wasid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)
986  l0Wdata.gen(
987    vpn = l0Wvpn,
988    asid = l0Wasid,
989    vmid = io.csr_dup(0).hgatp.vmid,
990    data = memRdata,
991    levelUInt = 0.U,
992    refill_prefetch_dup(0),
993    refill.req_info_dup(0).s2xlate,
994    pbmte,
995    io.csr_dup(0).hgatp.mode
996  )
997  when (l0Refill) {
998    l0.io.w.apply(
999      valid = true.B,
1000      setIdx = l0RefillIdx,
1001      data = l0Wdata,
1002      waymask = l0VictimWayOH
1003    )
1004    ptwl0replace.access(l0RefillIdx, l0VictimWay)
1005    l0v := l0v | l0RfvOH
1006    l0g := l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U)
1007    l0h(l0RefillIdx)(l0VictimWay) := refill_h(0)
1008    if (HasBitmapCheck) {updateL0BitmapReg(l0BitmapReg, Tran2D(~l0RfvOH))}
1009    l0asids(l0RefillIdx)(l0VictimWay) := XORFold(l0Wasid, l2tlbParams.hashAsidWidth)
1010    l0vmids(l0RefillIdx)(l0VictimWay) := XORFold(io.csr_dup(0).hgatp.vmid, l2tlbParams.hashAsidWidth)
1011    l0vpns(l0RefillIdx)(l0VictimWay) := XORFold(l0Wvpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth)
1012
1013    for (i <- 0 until l2tlbParams.l0nWays) {
1014      l0RefillPerf(i) := i.U === l0VictimWay
1015    }
1016  }
1017  XSDebug(l0Refill, p"[l0 refill] refillIdx:0x${Hexadecimal(l0RefillIdx)} victimWay:${l0VictimWay} victimWayOH:${Binary(l0VictimWayOH)} rfvOH(in UInt):${Cat(l0RefillIdx, l0VictimWay)}\n")
1018  XSDebug(l0Refill, p"[l0 refill] refilldata:0x${l0Wdata}\n")
1019  XSDebug(l0Refill, p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | l0RfvOH)}\n")
1020  XSDebug(l0Refill, p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U))}\n")
1021
1022
1023  // misc entries: super & invalid
1024  val spRefill =
1025    !flush_dup(0) &&
1026    (refill.levelOH.sp || (refill.levelOH.l0 && memPte(0).isNapot(refill.level_dup(0)))) &&
1027    ((memPte(0).isLeaf() && memPte(0).canRefill(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte, io.csr_dup(0).hgatp.mode)) ||
1028    memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte))
1029  val spRefillIdx = spreplace.way.suggestName(s"sp_refillIdx") // LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
1030  val spRfOH = UIntToOH(spRefillIdx).asUInt.suggestName(s"sp_rfOH")
1031  when (spRefill) {
1032    sp(spRefillIdx).refill(
1033      refill.req_info_dup(0).vpn,
1034      Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
1035      io.csr_dup(0).hgatp.vmid,
1036      memSelData(0),
1037      refill.level_dup(0),
1038      refill_prefetch_dup(0),
1039      !memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte)
1040    )
1041    spreplace.access(spRefillIdx)
1042    spv := spv | spRfOH
1043    spg := spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U)
1044    sph(spRefillIdx) := refill_h(0)
1045    if (HasBitmapCheck) {updateSpBitmapReg(spBitmapReg, TranVec(~spRfOH))}
1046
1047    for (i <- 0 until l2tlbParams.spSize) {
1048      spRefillPerf(i) := i.U === spRefillIdx
1049    }
1050  }
1051  XSDebug(spRefill, p"[sp refill] refillIdx:${spRefillIdx} refillEntry:${sp(spRefillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n")
1052  XSDebug(spRefill, p"[sp refill] spv:${Binary(spv)}->${Binary(spv | spRfOH)} spg:${Binary(spg)}->${Binary(spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U))}\n")
1053
1054  val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B)
1055  val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B)
1056  val eccVpn = stageResp.bits.req_info.vpn
1057
1058  XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage")
1059  XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage")
1060  when (l1eccFlush) {
1061    val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn))
1062    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt
1063    l1v := l1v & ~flushMask
1064    l1g := l1g & ~flushMask
1065  }
1066
1067  when (l0eccFlush) {
1068    val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn))
1069    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
1070    l0v := l0v & ~flushMask
1071    l0g := l0g & ~flushMask
1072  }
1073
1074  // sfence logic
1075  val l0hashAsid = XORFold(sfence_dup(0).bits.id, l2tlbParams.hashAsidWidth)
1076  val l1hashAsid = XORFold(sfence_dup(1).bits.id, l2tlbParams.hashAsidWidth)
1077  val l0asidhit = VecInit(l0asids.flatMap(_.map(_ === l0hashAsid))).asUInt
1078  val l1asidhit = VecInit(l1asids.flatMap(_.map(_ === l1hashAsid))).asUInt
1079  val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt
1080  val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt
1081
1082  val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
1083  when (sfence_valid) {
1084    val l0hashVmid = XORFold(io.csr_dup(0).hgatp.vmid, l2tlbParams.hashAsidWidth)
1085    val l1hashVmid = XORFold(io.csr_dup(1).hgatp.vmid, l2tlbParams.hashAsidWidth)
1086    val l0vmidhit = VecInit(l0vmids.flatMap(_.map(_ === l0hashVmid))).asUInt
1087    val l1vmidhit = VecInit(l1vmids.flatMap(_.map(_ === l1hashVmid))).asUInt
1088    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1089    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
1090
1091    val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt
1092    val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt
1093    val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1094    val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt
1095    val l0virthit = l0hhit & VecInit(l0vmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt
1096    val l1virthit = l1hhit & VecInit(l1vmidhit.asBools.map{a => io.csr_dup(1).priv.virt && a || !io.csr_dup(1).priv.virt}).asUInt
1097    val l2virthit = l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt
1098    val spvirthit = sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt
1099
1100    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
1101    val l0hashVpn = XORFold(sfence_vpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth)
1102    val l0vpnhit = VecInit(l0vpns.flatMap(_.map(_ === l0hashVpn))).asUInt
1103    val l0flushSetIdx = UIntToOH(genPtwL0SetIdx(sfence_vpn))
1104    val l0flushMask = VecInit(l0flushSetIdx.asBools.map{a => Fill(l2tlbParams.l0nWays, a.asUInt)}).asUInt
1105
1106    when (sfence_dup(0).bits.rs1/*va*/) {
1107      when (sfence_dup(0).bits.rs2) {
1108        // all va && all asid
1109        l0v := l0v & ~l0virthit
1110        l1v := l1v & ~l1virthit
1111        l2v := l2v & ~l2virthit
1112        spv := spv & ~spvirthit
1113      } .otherwise {
1114        // all va && specific asid except global
1115        l0v := l0v & ~(l0virthit & ~l0g & l0asidhit)
1116        l1v := l1v & ~(l1virthit & ~l1g & l1asidhit)
1117        l2v := l2v & ~(l2virthit & ~l2g & l2asidhit)
1118        spv := spv & ~(spvirthit & ~spg & spasidhit)
1119      }
1120    } .otherwise {
1121      when (sfence_dup(0).bits.rs2) {
1122        // specific leaf of addr && all asid
1123        l0v := l0v & ~(l0virthit & l0vpnhit & l0flushMask)
1124        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
1125      } .otherwise {
1126        // specific leaf of addr && specific asid
1127        l0v := l0v & ~(l0virthit & ~l0g & l0asidhit & l0vpnhit & l0flushMask)
1128        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
1129      }
1130    }
1131  }
1132
1133  val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv
1134  when (hfencev_valid) {
1135    val l0hashVmid = XORFold(io.csr_dup(0).hgatp.vmid, l2tlbParams.hashAsidWidth)
1136    val l1hashVmid = XORFold(io.csr_dup(1).hgatp.vmid, l2tlbParams.hashAsidWidth)
1137    val l0vmidhit = VecInit(l0vmids.flatMap(_.map(_ === l0hashVmid))).asUInt
1138    val l1vmidhit = VecInit(l1vmids.flatMap(_.map(_ === l1hashVmid))).asUInt
1139    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1140    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
1141
1142    val l0hhit = VecInit(l0h.flatMap(_.map(_ === onlyStage1))).asUInt
1143    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt
1144    val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt
1145    val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt
1146
1147    val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
1148    val l0hashVpn = XORFold(hfencev_vpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth)
1149    val l0vpnhit = VecInit(l0vpns.flatMap(_.map(_ === l0hashVpn))).asUInt
1150    val l0flushSetIdx = UIntToOH(genPtwL0SetIdx(hfencev_vpn))
1151    val l0flushMask = VecInit(l0flushSetIdx.asBools.map{a => Fill(l2tlbParams.l0nWays, a.asUInt)}).asUInt
1152
1153    when(sfence_dup(0).bits.rs1) {
1154      when(sfence_dup(0).bits.rs2) {
1155        l0v := l0v & ~(l0hhit & l0vmidhit)
1156        l1v := l1v & ~(l1hhit & l1vmidhit)
1157        l2v := l2v & ~(l2hhit & l2vmidhit)
1158        spv := spv & ~(sphhit & spvmidhit)
1159      }.otherwise {
1160        l0v := l0v & ~(l0hhit & l0vmidhit & ~l0g & l0asidhit)
1161        l1v := l1v & ~(l1hhit & l1vmidhit & ~l1g & l1asidhit)
1162        l2v := l2v & ~(l2hhit & l2vmidhit & ~l2g & l2asidhit)
1163        spv := spv & ~(sphhit & spvmidhit & ~spg & spasidhit)
1164      }
1165    }.otherwise {
1166      when(sfence_dup(0).bits.rs2) {
1167        l0v := l0v & ~(l0hhit & l0vmidhit & l0vpnhit & l0flushMask)
1168        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = true.B))).asUInt)
1169      }.otherwise {
1170        l0v := l0v & ~(l0hhit & l0vmidhit & ~l0g & l0asidhit & l0vpnhit & l0flushMask)
1171        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = true.B))).asUInt)
1172      }
1173    }
1174  }
1175
1176
1177  val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg
1178  when(hfenceg_valid) {
1179    val l0hashVmid = XORFold(sfence_dup(0).bits.id, l2tlbParams.hashAsidWidth)
1180    val l1hashVmid = XORFold(sfence_dup(1).bits.id, l2tlbParams.hashAsidWidth)
1181    val l0vmidhit = VecInit(l0vmids.flatMap(_.map(_ === l0hashVmid))).asUInt
1182    val l1vmidhit = VecInit(l1vmids.flatMap(_.map(_ === l1hashVmid))).asUInt
1183    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1184    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt
1185
1186    val l0hhit = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt
1187    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt
1188    val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt
1189    val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt
1190
1191    val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen)
1192    val l0hashVpn = XORFold(hfenceg_gvpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth)
1193    val l0vpnhit = VecInit(l0vpns.flatMap(_.map(_ === l0hashVpn))).asUInt
1194    val l0flushSetIdx = UIntToOH(genPtwL0SetIdx(hfenceg_gvpn))
1195    val l0flushMask = VecInit(l0flushSetIdx.asBools.map{a => Fill(l2tlbParams.l0nWays, a.asUInt)}).asUInt
1196
1197    when(sfence_dup(0).bits.rs1) {
1198      when(sfence_dup(0).bits.rs2) {
1199        l0v := l0v & ~l0hhit
1200        l1v := l1v & ~l1hhit
1201        l2v := l2v & ~l2hhit
1202        spv := spv & ~sphhit
1203      }.otherwise {
1204        l0v := l0v & ~(l0hhit & l0vmidhit)
1205        l1v := l1v & ~(l1hhit & l1vmidhit)
1206        l2v := l2v & ~(l2hhit & l2vmidhit)
1207        spv := spv & ~(sphhit & spvmidhit)
1208      }
1209    }.otherwise {
1210      when(sfence_dup(0).bits.rs2) {
1211        l0v := l0v & ~(l0hhit & l0vpnhit & l0flushMask)
1212        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt)
1213      }.otherwise {
1214        l0v := l0v & ~(l0hhit & l0vmidhit & l0vpnhit & l0flushMask)
1215        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt)
1216      }
1217    }
1218  }
1219
1220  if (EnableSv48) {
1221    val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt
1222    val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1223    val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1224
1225    when (sfence_valid) {
1226      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1227      val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1228      val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1229
1230      when (sfence_dup(2).bits.rs1/*va*/) {
1231        when (sfence_dup(2).bits.rs2) {
1232          // all va && all asid
1233          l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1234        } .otherwise {
1235          // all va && specific asid except global
1236          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1237        }
1238      }
1239    }
1240
1241    when (hfencev_valid) {
1242      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1243      val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt
1244      val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1245      when(sfence_dup(2).bits.rs1) {
1246        when(sfence_dup(2).bits.rs2) {
1247          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1248        }.otherwise {
1249          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit))
1250        }
1251      }
1252    }
1253
1254    when (hfenceg_valid) {
1255      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1256      val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt
1257      val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen)
1258      when(sfence_dup(2).bits.rs1) {
1259        when(sfence_dup(2).bits.rs2) {
1260          l3v.map(_ := l3v.get & ~l3hhit)
1261        }.otherwise {
1262          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1263        }
1264      }
1265    }
1266  }
1267
1268  def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = {
1269    in.ready := !in.valid || out.ready
1270    out.valid := in.valid
1271    out.bits := in.bits
1272    out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) =>
1273      val bypassed_reg = Reg(Bool())
1274      val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid
1275      when (inFire) { bypassed_reg := bypassed_wire }
1276      .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire }
1277
1278      b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire))
1279    }
1280  }
1281
1282  // Perf Count
1283  val resp_l0 = resp_res.l0.hit
1284  val resp_sp = resp_res.sp.hit
1285  val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None
1286  val resp_l2_pre = resp_res.l2.pre
1287  val resp_l1_pre = resp_res.l1.pre
1288  val resp_l0_pre = resp_res.l0.pre
1289  val resp_sp_pre = resp_res.sp.pre
1290  val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1291  XSPerfAccumulate("access", base_valid_access_0)
1292  if (EnableSv48) {
1293    XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1294  }
1295  XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1296  XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1297  XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0)
1298  XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp)
1299  XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit)
1300
1301  if (EnableSv48) {
1302    XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1303  }
1304  XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1305  XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1306  XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0)
1307  XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp)
1308  XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1309
1310  val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire
1311  XSPerfAccumulate("pre_access", base_valid_access_1)
1312  if (EnableSv48) {
1313    XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1314  }
1315  XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1316  XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1317  XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0)
1318  XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp)
1319  XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit)
1320
1321  if (EnableSv48) {
1322    XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1323  }
1324  XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1325  XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1326  XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0)
1327  XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp)
1328  XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1329
1330  val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1331  XSPerfAccumulate("access_first", base_valid_access_2)
1332  if (EnableSv48) {
1333    XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1334  }
1335  XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1336  XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1337  XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0)
1338  XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp)
1339  XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit)
1340
1341  if (EnableSv48) {
1342    XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1343  }
1344  XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1345  XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1346  XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0)
1347  XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp)
1348  XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1349
1350  val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire
1351  XSPerfAccumulate("pre_access_first", base_valid_access_3)
1352  if (EnableSv48) {
1353    XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1354  }
1355  XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1356  XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1357  XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0)
1358  XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp)
1359  XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit)
1360
1361  if (EnableSv48) {
1362    XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1363  }
1364  XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1365  XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1366  XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0)
1367  XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp)
1368  XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1369
1370  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
1371  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
1372  if (EnableSv48) {
1373    l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) }
1374  }
1375  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) }
1376  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) }
1377  l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) }
1378  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
1379  if (EnableSv48) {
1380    l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) }
1381  }
1382  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) }
1383  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) }
1384  l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) }
1385  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
1386
1387  if (EnableSv48) {
1388    XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR)
1389  }
1390  XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR)
1391  XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR)
1392  XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR)
1393  XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR)
1394  if (EnableSv48) {
1395    XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0))
1396  }
1397  XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0))
1398  XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0))
1399  XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0))
1400  XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0))
1401
1402  // debug
1403  XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n")
1404  if (EnableSv48) {
1405    XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n")
1406  }
1407  XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n")
1408  XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n")
1409  XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n")
1410  XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n")
1411  XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n")
1412  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n")
1413  if (EnableSv48) {
1414    XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n")
1415  }
1416  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n")
1417  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n")
1418  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n")
1419  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n")
1420  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n")
1421
1422  val perfEvents = Seq(
1423    ("access           ", base_valid_access_0             ),
1424    ("l2_hit           ", l2Hit                           ),
1425    ("l1_hit           ", l1Hit                           ),
1426    ("l0_hit           ", l0Hit                           ),
1427    ("sp_hit           ", spHit                           ),
1428    ("pte_hit          ", l0Hit || spHit                  ),
1429    ("rwHarzad         ", io.req.valid && !io.req.ready   ),
1430    ("out_blocked      ", io.resp.valid && !io.resp.ready ),
1431  )
1432  generatePerfEvent()
1433}
1434