xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision d78a17c1d883132bf47d00d463dc9817c6a2dd0b)
1/***************************************************************************************
2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC)
3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences
4* Copyright (c) 2020-2021 Peng Cheng Laboratory
5*
6* XiangShan is licensed under Mulan PSL v2.
7* You can use this software according to the terms and conditions of the Mulan PSL v2.
8* You may obtain a copy of Mulan PSL v2 at:
9*          http://license.coscl.org.cn/MulanPSL2
10*
11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
14*
15* See the Mulan PSL v2 for more details.
16***************************************************************************************/
17
18package xiangshan.cache.mmu
19
20import org.chipsalliance.cde.config.Parameters
21import chisel3._
22import chisel3.util._
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import utility._
27import coupledL2.utils.SplittedSRAM
28import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
29import freechips.rocketchip.tilelink._
30
31/* ptw cache caches the page table of all the three layers
32 * ptw cache resp at next cycle
33 * the cache should not be blocked
34 * when miss queue if full, just block req outside
35 */
36
37class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle {
38  val hit = Bool()
39  val pre = Bool()
40  val ppn = UInt(gvpnLen.W)
41  val pbmt = UInt(ptePbmtLen.W)
42  val perm = new PtePermBundle()
43  val n = UInt(pteNLen.W)
44  val ecc = Bool()
45  val level = UInt(2.W)
46  val v = Bool()
47
48  def apply(hit: Bool, pre: Bool, ppn: UInt, pbmt: UInt = 0.U, n: UInt = 0.U,
49            perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()),
50            ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B): Unit = {
51    this.hit := hit && !ecc
52    this.pre := pre
53    this.ppn := ppn
54    this.n := n
55    this.pbmt := pbmt
56    this.perm := perm
57    this.ecc := ecc && hit
58    this.level := level
59    this.v := valid
60  }
61}
62
63class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle {
64  assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!")
65  val hit = Bool()
66  val pre = Bool()
67  val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W))
68  val pbmt = Vec(tlbcontiguous, UInt(ptePbmtLen.W))
69  val perm = Vec(tlbcontiguous, new PtePermBundle())
70  val ecc = Bool()
71  val level = UInt(2.W)
72  val v = Vec(tlbcontiguous, Bool())
73
74  def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], pbmt: Vec[UInt] = Vec(tlbcontiguous, 0.U),
75            perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())),
76            ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B)): Unit = {
77    this.hit := hit && !ecc
78    this.pre := pre
79    this.ppn := ppn
80    this.pbmt := pbmt
81    this.perm := perm
82    this.ecc := ecc && hit
83    this.level := level
84    this.v := valid
85  }
86}
87
88class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle {
89  val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None
90  val l2 = new PageCachePerPespBundle
91  val l1 = new PageCachePerPespBundle
92  val l0 = new PageCacheMergePespBundle
93  val sp = new PageCachePerPespBundle
94}
95
96class PtwCacheReq(implicit p: Parameters) extends PtwBundle {
97  val req_info = new L2TlbInnerBundle()
98  val isFirst = Bool()
99  val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool())
100  val isHptwReq = Bool()
101  val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W)
102}
103
104class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst {
105  val req = Flipped(DecoupledIO(new PtwCacheReq()))
106  val resp = DecoupledIO(new Bundle {
107    val req_info = new L2TlbInnerBundle()
108    val isFirst = Bool()
109    val hit = Bool()
110    val prefetch = Bool() // is the entry fetched by prefetch
111    val bypassed = Bool()
112    val toFsm = new Bundle {
113      val l3Hit = if (EnableSv48) Some(Bool()) else None
114      val l2Hit = Bool()
115      val l1Hit = Bool()
116      val ppn = UInt(gvpnLen.W)
117      val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW
118    }
119    val stage1 = new PtwMergeResp()
120    val isHptwReq = Bool()
121    val toHptw = new Bundle {
122      val l3Hit = if (EnableSv48) Some(Bool()) else None
123      val l2Hit = Bool()
124      val l1Hit = Bool()
125      val ppn = UInt(ppnLen.W)
126      val id = UInt(log2Up(l2tlbParams.llptwsize).W)
127      val resp = new HptwResp() // used if hit
128      val bypassed = Bool()
129    }
130  })
131  val refill = Flipped(ValidIO(new Bundle {
132    val ptes = UInt(blockBits.W)
133    val levelOH = new Bundle {
134      // NOTE: levelOH has (Level+1) bits, each stands for page cache entries
135      val sp = Bool()
136      val l0 = Bool()
137      val l1 = Bool()
138      val l2 = Bool()
139      val l3 = if (EnableSv48) Some(Bool()) else None
140      def apply(levelUInt: UInt, valid: Bool) = {
141        sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B)
142        l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B)
143        l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B)
144        l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B)
145        l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B))
146      }
147    }
148    // duplicate level and sel_pte for each page caches, for better fanout
149    val req_info_dup = Vec(3, new L2TlbInnerBundle())
150    val level_dup = Vec(3, UInt(log2Up(Level + 1).W))
151    val sel_pte_dup = Vec(3, UInt(XLEN.W))
152  }))
153  val sfence_dup = Vec(4, Input(new SfenceBundle()))
154  val csr_dup = Vec(3, Input(new TlbCsrBundle()))
155}
156
157class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents {
158  val io = IO(new PtwCacheIO)
159  val ecc = Code.fromString(l2tlbParams.ecc)
160  val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false, ReservedBits = l2tlbParams.l1ReservedBits)
161  val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true, ReservedBits = l2tlbParams.l0ReservedBits)
162
163  // TODO: four caches make the codes dirty, think about how to deal with it
164
165  val sfence_dup = io.sfence_dup
166  val refill = io.refill.bits
167  val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source))
168  val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate))
169  val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed)
170  val flush = flush_dup(0)
171
172  // when refill, refuce to accept new req
173  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
174
175  // handle hand signal and req_info
176  // TODO: replace with FlushableQueue
177  val stageReq = Wire(Decoupled(new PtwCacheReq()))         // enq stage & read page cache valid
178  val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp
179  val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc
180  val stageResp = Wire(Decoupled(new PtwCacheReq()))         // deq stage
181
182  val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush)      // catch ram data
183  val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter
184  val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool()))
185  stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush))  // ecc flush
186
187  stageReq <> io.req
188  PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad)
189  InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle)
190  PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush)
191  InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle)
192  PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush)
193  stageResp.ready := !stageResp.valid || io.resp.ready
194
195  // l3: level 3 non-leaf pte
196  val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None
197  val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None
198  val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None
199  val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None
200  val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None
201  val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None
202
203  // l2: level 2 non-leaf pte
204  val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen)))
205  val l2v = RegInit(0.U(l2tlbParams.l2Size.W))
206  val l2g = Reg(UInt(l2tlbParams.l2Size.W))
207  val l2asids = l2.map(_.asid)
208  val l2vmids = l2.map(_.vmid)
209  val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W)))
210
211  // l1: level 1 non-leaf pte
212  val l1 = Module(new SplittedSRAM(
213    l1EntryType,
214    set = l2tlbParams.l1nSets,
215    way = l2tlbParams.l1nWays,
216    waySplit = 1,
217    dataSplit = 4,
218    singlePort = sramSinglePort,
219    readMCP2 = false
220  ))
221  val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
222  val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
223  val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W))))
224  def getl1vSet(vpn: UInt) = {
225    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
226    val set = genPtwL1SetIdx(vpn)
227    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
228    val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W)))
229    l1vVec(set)
230  }
231  def getl1hSet(vpn: UInt) = {
232    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
233    val set = genPtwL1SetIdx(vpn)
234    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
235    l1h(set)
236  }
237
238  // l0: level 0 leaf pte of 4KB pages
239  val l0 = Module(new SplittedSRAM(
240    l0EntryType,
241    set = l2tlbParams.l0nSets,
242    way = l2tlbParams.l0nWays,
243    waySplit = 2,
244    dataSplit = 4,
245    singlePort = sramSinglePort,
246    readMCP2 = false
247  ))
248  val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
249  val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
250  val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W))))
251  def getl0vSet(vpn: UInt) = {
252    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
253    val set = genPtwL0SetIdx(vpn)
254    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
255    val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W)))
256    l0vVec(set)
257  }
258  def getl0hSet(vpn: UInt) = {
259    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
260    val set = genPtwL0SetIdx(vpn)
261    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
262    l0h(set)
263  }
264
265  // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages
266  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true, hasNapot = true)))
267  val spv = RegInit(0.U(l2tlbParams.spSize.W))
268  val spg = Reg(UInt(l2tlbParams.spSize.W))
269  val spasids = sp.map(_.asid)
270  val spvmids = sp.map(_.vmid)
271  val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W)))
272
273  // Access Perf
274  val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
275  val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
276  val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
277  val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
278  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
279  if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B))
280  l2AccessPerf.map(_ := false.B)
281  l1AccessPerf.map(_ := false.B)
282  l0AccessPerf.map(_ := false.B)
283  spAccessPerf.map(_ := false.B)
284
285
286
287  def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = {
288    (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3))
289  }
290  // NOTE: not actually bypassed, just check if hit, re-access the page cache
291  def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = {
292    val change_h = MuxLookup(h_search, noS2xlate)(Seq(
293      allStage -> onlyStage1,
294      onlyStage1 -> onlyStage1,
295      onlyStage2 -> onlyStage2
296    ))
297    val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq(
298      allStage -> onlyStage1,
299      onlyStage1 -> onlyStage1,
300      onlyStage2 -> onlyStage2
301    ))
302    val refill_vpn = io.refill.bits.req_info_dup(0).vpn
303    io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h
304  }
305
306  val vpn_search = stageReq.bits.req_info.vpn
307  val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq(
308    allStage -> onlyStage1,
309    onlyStage1 -> onlyStage1,
310    onlyStage2 -> onlyStage2
311  ))
312
313  // l3
314  val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None
315  val l3HitPPN = if(EnableSv48) Some(Wire(UInt(ppnLen.W))) else None
316  val l3HitPbmt = if(EnableSv48) Some(Wire(UInt(ptePbmtLen.W))) else None
317  val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None
318  val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None
319  if (EnableSv48) {
320    val hitVecT = l3.get.zipWithIndex.map {
321        case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
322          && l3v.get(i) && h_search === l3h.get(i))
323    }
324    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
325
326    // stageDelay, but check for l3
327    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle)
328    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.pbmt)), stageDelay_valid_1cycle)
329    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle)
330    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
331
332    when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) }
333
334    l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
335    for (i <- 0 until l2tlbParams.l3Size) {
336      XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
337    }
338    XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
339    XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
340
341    VecInit(hitVecT).suggestName(s"l3_hitVecT")
342    VecInit(hitVec).suggestName(s"l3_hitVec")
343
344    // synchronize with other entries with RegEnable
345    l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire))
346    l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire))
347    l3HitPbmt.map(_ := RegEnable(hitPbmt, stageDelay(1).fire))
348    l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire))
349  }
350
351  // l2
352  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size)
353  val (l2Hit, l2HitPPN, l2HitPbmt, l2Pre) = {
354    val hitVecT = l2.zipWithIndex.map {
355      case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
356        && l2v(i) && h_search === l2h(i))
357    }
358    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
359
360    // stageDelay, but check for l2
361    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle)
362    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.pbmt)), stageDelay_valid_1cycle)
363    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle)
364    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
365
366    when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) }
367
368    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
369    for (i <- 0 until l2tlbParams.l2Size) {
370      XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
371    }
372    XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
373    XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
374
375    VecInit(hitVecT).suggestName(s"l2_hitVecT")
376    VecInit(hitVec).suggestName(s"l2_hitVec")
377
378    // synchronize with other entries with RegEnable
379    (RegEnable(hit, stageDelay(1).fire),
380     RegEnable(hitPPN, stageDelay(1).fire),
381     RegEnable(hitPbmt, stageDelay(1).fire),
382     RegEnable(hitPre, stageDelay(1).fire))
383  }
384
385  // l1
386  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets)
387  val (l1Hit, l1HitPPN, l1HitPbmt, l1Pre, l1eccError) = {
388    val ridx = genPtwL1SetIdx(vpn_search)
389    l1.io.r.req.valid := stageReq.fire
390    l1.io.r.req.bits.apply(setIdx = ridx)
391    val vVec_req = getl1vSet(vpn_search)
392    val hVec_req = getl1hSet(vpn_search)
393
394    // delay one cycle after sram read
395    val delay_vpn = stageDelay(0).bits.req_info.vpn
396    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
397      allStage -> onlyStage1,
398      onlyStage1 -> onlyStage1,
399      onlyStage2 -> onlyStage2
400    ))
401    val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle)
402    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
403    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
404    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
405      wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
406
407    // check hit and ecc
408    val check_vpn = stageCheck(0).bits.req_info.vpn
409    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
410    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
411
412    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
413    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
414    val hitWayData = hitWayEntry.entries
415    val hit = ParallelOR(hitVec)
416    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W)))
417    val eccError = WireInit(false.B)
418    if (l2tlbParams.enablePTWECC) {
419      eccError := hitWayEntry.decode()
420    } else {
421      eccError := false.B
422    }
423
424    ridx.suggestName(s"l1_ridx")
425    ramDatas.suggestName(s"l1_ramDatas")
426    hitVec.suggestName(s"l1_hitVec")
427    hitWayData.suggestName(s"l1_hitWayData")
428    hitWay.suggestName(s"l1_hitWay")
429
430    when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) }
431
432    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
433    XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n")
434    for (i <- 0 until l2tlbParams.l1nWays) {
435      XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)}  l1v:${vVec(i)}  hit:${hit}\n")
436    }
437    XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n")
438
439    (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.pbmts(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError)
440  }
441
442  val l0_masked_clock = ClockGate(false.B, stageReq.fire | (!flush_dup(0) && refill.levelOH.l0), clock)
443  val l1_masked_clock = ClockGate(false.B, stageReq.fire | (!flush_dup(1) && refill.levelOH.l1), clock)
444  l0.clock := l0_masked_clock
445  l1.clock := l1_masked_clock
446  // l0
447  val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets)
448  val (l0Hit, l0HitData, l0Pre, l0eccError) = {
449    val ridx = genPtwL0SetIdx(vpn_search)
450    l0.io.r.req.valid := stageReq.fire
451    l0.io.r.req.bits.apply(setIdx = ridx)
452    val vVec_req = getl0vSet(vpn_search)
453    val hVec_req = getl0hSet(vpn_search)
454
455    // delay one cycle after sram read
456    val delay_vpn = stageDelay(0).bits.req_info.vpn
457    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
458      allStage -> onlyStage1,
459      onlyStage1 -> onlyStage1,
460      onlyStage2 -> onlyStage2
461    ))
462    val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle)
463    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
464    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
465    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
466      wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
467
468    // check hit and ecc
469    val check_vpn = stageCheck(0).bits.req_info.vpn
470    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
471    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
472
473    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
474    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
475    val hitWayData = hitWayEntry.entries
476    val hitWayEcc = hitWayEntry.ecc
477    val hit = ParallelOR(hitVec)
478    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W)))
479    val eccError = WireInit(false.B)
480    if (l2tlbParams.enablePTWECC) {
481      eccError := hitWayEntry.decode()
482    } else {
483      eccError := false.B
484    }
485
486    when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) }
487
488    l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
489    XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n")
490    for (i <- 0 until l2tlbParams.l0nWays) {
491      XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)}  l0v:${vVec(i)}  hit:${hitVec(i)}\n")
492    }
493    XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n")
494
495    ridx.suggestName(s"l0_ridx")
496    ramDatas.suggestName(s"l0_ramDatas")
497    hitVec.suggestName(s"l0_hitVec")
498    hitWay.suggestName(s"l0_hitWay")
499
500    (hit, hitWayData, hitWayData.prefetch, eccError)
501  }
502  val l0HitPPN = l0HitData.ppns
503  val l0HitPbmt = l0HitData.pbmts
504  val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle)))
505  val l0HitValid = VecInit(l0HitData.onlypf.map(!_))
506
507  // super page
508  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
509  val (spHit, spHitData, spPre, spValid) = {
510    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, allType = true, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) }
511    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
512    val hitData = ParallelPriorityMux(hitVec zip sp)
513    val hit = ParallelOR(hitVec)
514
515    when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) }
516
517    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle }
518    for (i <- 0 until l2tlbParams.spSize) {
519      XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n")
520    }
521    XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
522
523    VecInit(hitVecT).suggestName(s"sp_hitVecT")
524    VecInit(hitVec).suggestName(s"sp_hitVec")
525
526    (RegEnable(hit, stageDelay(1).fire),
527     RegEnable(hitData, stageDelay(1).fire),
528     RegEnable(hitData.prefetch, stageDelay(1).fire),
529     RegEnable(hitData.v, stageDelay(1).fire))
530  }
531  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
532  val spHitLevel = spHitData.level.getOrElse(0.U)
533
534  val check_res = Wire(new PageCacheRespBundle)
535  check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get, l3HitPbmt.get))
536  check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, l2HitPbmt)
537  check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, l1HitPbmt, ecc = l1eccError)
538  check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPbmt, l0HitPerm, l0eccError, valid = l0HitValid)
539  check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitData.pbmt, spHitData.n.getOrElse(0.U), spHitPerm, false.B, spHitLevel, spValid)
540
541  val resp_res = Reg(new PageCacheRespBundle)
542  when (stageCheck(1).fire) { resp_res := check_res }
543
544  // stageResp bypass
545  val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
546  bypassed.indices.foreach(i =>
547    bypassed(i) := stageResp.bits.bypassed(i) ||
548      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
549        OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid)
550  )
551
552  // stageResp bypass to hptw
553  val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
554  hptw_bypassed.indices.foreach(i =>
555    hptw_bypassed(i) := stageResp.bits.bypassed(i) ||
556      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
557        io.resp.fire)
558  )
559
560  val isAllStage = stageResp.bits.req_info.s2xlate === allStage
561  val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2
562  val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage
563  val idx = stageResp.bits.req_info.vpn(2, 0)
564  val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
565  io.resp.bits.req_info   := stageResp.bits.req_info
566  io.resp.bits.isFirst  := stageResp.bits.isFirst
567  io.resp.bits.hit      := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf)
568  if (EnableSv48) {
569    io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage
570  } else {
571    io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage
572  }
573  io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit
574  io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq)
575  io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
576  io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
577  io.resp.bits.toFsm.ppn   := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))
578  io.resp.bits.toFsm.stage1Hit := stage1Hit
579
580  io.resp.bits.isHptwReq := stageResp.bits.isHptwReq
581  if (EnableSv48) {
582    io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq
583  } else {
584    io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq
585  }
586  io.resp.bits.toHptw.id := stageResp.bits.hptwId
587  io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq)
588  io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq
589  io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq
590  io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0)
591  io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn
592  io.resp.bits.toHptw.resp.entry.asid := DontCare
593  io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.vmid)
594  io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level))
595  io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source)
596  io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0)
597  io.resp.bits.toHptw.resp.entry.pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(idx), resp_res.sp.pbmt)
598  io.resp.bits.toHptw.resp.entry.n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U))
599  io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm))
600  io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
601  io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v
602  io.resp.bits.toHptw.resp.gaf := false.B
603
604  io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3))
605  io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare
606  io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.vmid))
607  if (EnableSv48) {
608    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
609      Mux(resp_res.sp.hit, resp_res.sp.level,
610        Mux(resp_res.l1.hit, 1.U,
611          Mux(resp_res.l2.hit, 2.U, 3.U))))))
612  } else {
613    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
614      Mux(resp_res.sp.hit, resp_res.sp.level,
615        Mux(resp_res.l1.hit, 1.U, 2.U)))))
616  }
617  io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source))
618  for (i <- 0 until tlbcontiguous) {
619    if (EnableSv48) {
620      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
621        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
622          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
623            Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth),
624              resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth)))))
625      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
626        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
627          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
628            Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0),
629              resp_res.l3.get.ppn(sectortlbwidth - 1, 0)))))
630      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
631        Mux(resp_res.sp.hit, resp_res.sp.v,
632          Mux(resp_res.l1.hit, resp_res.l1.v,
633            Mux(resp_res.l2.hit, resp_res.l2.v,
634              resp_res.l3.get.v))))
635    } else {
636      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
637        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
638          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
639            resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth))))
640      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
641        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
642          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
643            resp_res.l2.ppn(sectortlbwidth - 1, 0))))
644      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
645        Mux(resp_res.sp.hit, resp_res.sp.v,
646          Mux(resp_res.l1.hit, resp_res.l1.v,
647            resp_res.l2.v)))
648    }
649    io.resp.bits.stage1.entry(i).pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(i),
650      Mux(resp_res.sp.hit, resp_res.sp.pbmt,
651        Mux(resp_res.l1.hit, resp_res.l1.pbmt,
652          resp_res.l2.pbmt)))
653    io.resp.bits.stage1.entry(i).n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U))
654    io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i),  Mux(resp_res.sp.hit, resp_res.sp.perm, 0.U.asTypeOf(new PtePermBundle))))
655    io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v
656    io.resp.bits.stage1.entry(i).af := false.B
657  }
658  io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools
659  io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B)
660  io.resp.bits.stage1.not_merge := false.B
661  io.resp.valid := stageResp.valid
662  XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit")
663
664  // refill Perf
665  val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
666  val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
667  val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
668  val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
669  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
670  l3RefillPerf.map(_.map(_ := false.B))
671  l2RefillPerf.map(_ := false.B)
672  l1RefillPerf.map(_ := false.B)
673  l0RefillPerf.map(_ := false.B)
674  spRefillPerf.map(_ := false.B)
675
676  // refill
677  l1.io.w.req <> DontCare
678  l0.io.w.req <> DontCare
679  l1.io.w.req.valid := false.B
680  l0.io.w.req.valid := false.B
681
682  val memRdata = refill.ptes
683  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
684  val memSelData = io.refill.bits.sel_pte_dup
685  val memPte = memSelData.map(a => a.asTypeOf(new PteBundle))
686  val mPBMTE = io.csr.mPBMTE
687  val hPBMTE = io.csr.hPBMTE
688  val pbmte = Mux(refill.req_info_dup(0).s2xlate === onlyStage1 || refill.req_info_dup(0).s2xlate === allStage, hPBMTE, mPBMTE)
689
690  // TODO: handle sfenceLatch outsize
691  if (EnableSv48) {
692    val l3Refill =
693      !flush_dup(2) &&
694      refill.levelOH.l3.get &&
695      !memPte(2).isLeaf() &&
696      memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode)
697    val l3RefillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way).suggestName(s"l3_refillIdx")
698    val l3RfOH = UIntToOH(l3RefillIdx).asUInt.suggestName(s"l3_rfOH")
699    when (l3Refill) {
700      l3.get(l3RefillIdx).refill(
701        refill.req_info_dup(2).vpn,
702        Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
703        io.csr_dup(2).hgatp.vmid,
704        memSelData(2),
705        3.U,
706        refill_prefetch_dup(2)
707      )
708      ptwl2replace.access(l3RefillIdx)
709      l3v.get := l3v.get | l3RfOH
710      l3g.get := (l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U)
711      l3h.get(l3RefillIdx) := refill_h(2)
712
713      for (i <- 0 until l2tlbParams.l3Size) {
714        l3RefillPerf.get(i) := i.U === l3RefillIdx
715      }
716    }
717    XSDebug(l3Refill, p"[l3 refill] refillIdx:${l3RefillIdx} refillEntry:${l3.get(l3RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
718    XSDebug(l3Refill, p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | l3RfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U))}\n")
719  }
720
721  // L2 refill
722  val l2Refill =
723    !flush_dup(2) &&
724    refill.levelOH.l2 &&
725    !memPte(2).isLeaf() &&
726    memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode)
727  val l2RefillIdx = replaceWrapper(l2v, ptwl2replace.way).suggestName(s"l2_refillIdx")
728  val l2RfOH = UIntToOH(l2RefillIdx).asUInt.suggestName(s"l2_rfOH")
729  when (
730    l2Refill
731  ) {
732    l2(l2RefillIdx).refill(
733      refill.req_info_dup(2).vpn,
734      Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
735      io.csr_dup(2).hgatp.vmid,
736      memSelData(2),
737      2.U,
738      refill_prefetch_dup(2)
739    )
740    ptwl2replace.access(l2RefillIdx)
741    l2v := l2v | l2RfOH
742    l2g := (l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U)
743    l2h(l2RefillIdx) := refill_h(2)
744
745    for (i <- 0 until l2tlbParams.l2Size) {
746      l2RefillPerf(i) := i.U === l2RefillIdx
747    }
748  }
749  XSDebug(l2Refill, p"[l2 refill] refillIdx:${l2RefillIdx} refillEntry:${l2(l2RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
750  XSDebug(l2Refill, p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | l2RfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U))}\n")
751
752  // L1 refill
753  val l1Refill = !flush_dup(1) && refill.levelOH.l1
754  val l1RefillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn).suggestName(s"l1_refillIdx")
755  val l1VictimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(l1RefillIdx)).suggestName(s"l1_victimWay")
756  val l1VictimWayOH = UIntToOH(l1VictimWay).suggestName(s"l1_victimWayOH")
757  val l1RfvOH = UIntToOH(Cat(l1RefillIdx, l1VictimWay)).asUInt.suggestName(s"l1_rfvOH")
758  val l1Wdata = Wire(l1EntryType)
759  l1Wdata.gen(
760    vpn = refill.req_info_dup(1).vpn,
761    asid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid),
762    vmid = io.csr_dup(1).hgatp.vmid,
763    data = memRdata,
764    levelUInt = 1.U,
765    refill_prefetch_dup(1),
766    refill.req_info_dup(1).s2xlate,
767    pbmte,
768    io.csr_dup(1).vsatp.mode
769  )
770  when (l1Refill) {
771    l1.io.w.apply(
772      valid = true.B,
773      setIdx = l1RefillIdx,
774      data = l1Wdata,
775      waymask = l1VictimWayOH
776    )
777    ptwl1replace.access(l1RefillIdx, l1VictimWay)
778    l1v := l1v | l1RfvOH
779    l1g := l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U)
780    l1h(l1RefillIdx)(l1VictimWay) := refill_h(1)
781
782    for (i <- 0 until l2tlbParams.l1nWays) {
783      l1RefillPerf(i) := i.U === l1VictimWay
784    }
785  }
786  XSDebug(l1Refill, p"[l1 refill] refillIdx:0x${Hexadecimal(l1RefillIdx)} victimWay:${l1VictimWay} victimWayOH:${Binary(l1VictimWayOH)} rfvOH(in UInt):${Cat(l1RefillIdx, l1VictimWay)}\n")
787  XSDebug(l1Refill, p"[l1 refill] refilldata:0x${l1Wdata}\n")
788  XSDebug(l1Refill, p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | l1RfvOH)}\n")
789  XSDebug(l1Refill, p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U))}\n")
790
791  // L0 refill
792  val l0Refill = !flush_dup(0) && refill.levelOH.l0 && !memPte(0).isNapot(refill.level_dup(0))
793  val l0RefillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn).suggestName(s"l0_refillIdx")
794  val l0VictimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(l0RefillIdx)).suggestName(s"l0_victimWay")
795  val l0VictimWayOH = UIntToOH(l0VictimWay).asUInt.suggestName(s"l0_victimWayOH")
796  val l0RfvOH = UIntToOH(Cat(l0RefillIdx, l0VictimWay)).suggestName(s"l0_rfvOH")
797  val l0Wdata = Wire(l0EntryType)
798  l0Wdata.gen(
799    vpn = refill.req_info_dup(0).vpn,
800    asid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
801    vmid = io.csr_dup(0).hgatp.vmid,
802    data = memRdata,
803    levelUInt = 0.U,
804    refill_prefetch_dup(0),
805    refill.req_info_dup(0).s2xlate,
806    pbmte,
807    io.csr_dup(0).vsatp.mode
808  )
809  when (l0Refill) {
810    l0.io.w.apply(
811      valid = true.B,
812      setIdx = l0RefillIdx,
813      data = l0Wdata,
814      waymask = l0VictimWayOH
815    )
816    ptwl0replace.access(l0RefillIdx, l0VictimWay)
817    l0v := l0v | l0RfvOH
818    l0g := l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U)
819    l0h(l0RefillIdx)(l0VictimWay) := refill_h(0)
820
821    for (i <- 0 until l2tlbParams.l0nWays) {
822      l0RefillPerf(i) := i.U === l0VictimWay
823    }
824  }
825  XSDebug(l0Refill, p"[l0 refill] refillIdx:0x${Hexadecimal(l0RefillIdx)} victimWay:${l0VictimWay} victimWayOH:${Binary(l0VictimWayOH)} rfvOH(in UInt):${Cat(l0RefillIdx, l0VictimWay)}\n")
826  XSDebug(l0Refill, p"[l0 refill] refilldata:0x${l0Wdata}\n")
827  XSDebug(l0Refill, p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | l0RfvOH)}\n")
828  XSDebug(l0Refill, p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U))}\n")
829
830
831  // misc entries: super & invalid
832  val spRefill =
833    !flush_dup(0) &&
834    (refill.levelOH.sp || (refill.levelOH.l0 && memPte(0).isNapot(refill.level_dup(0)))) &&
835    ((memPte(0).isLeaf() && memPte(0).canRefill(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte, io.csr_dup(0).vsatp.mode)) ||
836    memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte))
837  val spRefillIdx = spreplace.way.suggestName(s"sp_refillIdx") // LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
838  val spRfOH = UIntToOH(spRefillIdx).asUInt.suggestName(s"sp_rfOH")
839  when (spRefill) {
840    sp(spRefillIdx).refill(
841      refill.req_info_dup(0).vpn,
842      Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
843      io.csr_dup(0).hgatp.vmid,
844      memSelData(0),
845      refill.level_dup(0),
846      refill_prefetch_dup(0),
847      !memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte)
848    )
849    spreplace.access(spRefillIdx)
850    spv := spv | spRfOH
851    spg := spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U)
852    sph(spRefillIdx) := refill_h(0)
853
854    for (i <- 0 until l2tlbParams.spSize) {
855      spRefillPerf(i) := i.U === spRefillIdx
856    }
857  }
858  XSDebug(spRefill, p"[sp refill] refillIdx:${spRefillIdx} refillEntry:${sp(spRefillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n")
859  XSDebug(spRefill, p"[sp refill] spv:${Binary(spv)}->${Binary(spv | spRfOH)} spg:${Binary(spg)}->${Binary(spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U))}\n")
860
861  val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B)
862  val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B)
863  val eccVpn = stageResp.bits.req_info.vpn
864
865  XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage")
866  XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage")
867  when (l1eccFlush) {
868    val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn))
869    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt
870    l1v := l1v & ~flushMask
871    l1g := l1g & ~flushMask
872  }
873
874  when (l0eccFlush) {
875    val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn))
876    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
877    l0v := l0v & ~flushMask
878    l0g := l0g & ~flushMask
879  }
880
881  // sfence for l0
882  val sfence_valid_l0 = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
883  when (sfence_valid_l0) {
884    val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt
885    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
886    when (sfence_dup(0).bits.rs1/*va*/) {
887      when (sfence_dup(0).bits.rs2) {
888        // all va && all asid
889        l0v := l0v & ~l0hhit
890      } .otherwise {
891        // all va && specific asid except global
892        l0v := l0v & (l0g | ~l0hhit)
893      }
894    } .otherwise {
895      // val flushMask = UIntToOH(genTlbl1Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
896      val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(sfence_vpn))
897      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l0nWays, _.asUInt))).asUInt
898      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
899      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
900      flushMask.suggestName(s"sfence_nrs1_flushMask")
901
902      when (sfence_dup(0).bits.rs2) {
903        // specific leaf of addr && all asid
904        l0v := l0v & ~flushMask & ~l0hhit
905      } .otherwise {
906        // specific leaf of addr && specific asid
907        l0v := l0v & (~flushMask | l0g | ~l0hhit)
908      }
909    }
910  }
911
912  // hfencev, simple implementation for l0
913  val hfencev_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hv
914  when(hfencev_valid_l0) {
915    val flushMask = VecInit(l0h.flatMap(_.map(_  === onlyStage1))).asUInt
916    l0v := l0v & ~flushMask // all VS-stage l0 pte
917  }
918
919  // hfenceg, simple implementation for l0
920  val hfenceg_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hg
921  when(hfenceg_valid_l0) {
922    val flushMask = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt
923    l0v := l0v & ~flushMask // all G-stage l0 pte
924  }
925
926  val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt
927  val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt
928  val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
929  when (sfence_valid) {
930    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
931    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
932    val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
933    val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt
934    val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt
935    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
936
937    when (sfence_dup(0).bits.rs1/*va*/) {
938      when (sfence_dup(0).bits.rs2) {
939        // all va && all asid
940        l1v := l1v & ~l1hhit
941        l2v := l2v & ~(l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
942        spv := spv & ~(sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
943      } .otherwise {
944        // all va && specific asid except global
945        l1v := l1v & (l1g | ~l1hhit)
946        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
947        spv := spv & ~(~spg & sphhit & spasidhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
948      }
949    } .otherwise {
950      when (sfence_dup(0).bits.rs2) {
951        // specific leaf of addr && all asid
952        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
953      } .otherwise {
954        // specific leaf of addr && specific asid
955        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
956      }
957    }
958  }
959
960  val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv
961  when (hfencev_valid) {
962    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
963    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
964    val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt
965    val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt
966    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt
967    val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
968    when(sfence_dup(0).bits.rs1) {
969      when(sfence_dup(0).bits.rs2) {
970        l1v := l1v & ~l1hhit
971        l2v := l2v & ~(l2hhit & l2vmidhit)
972        spv := spv & ~(sphhit & spvmidhit)
973      }.otherwise {
974        l1v := l1v & (l1g | ~l1hhit)
975        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & l2vmidhit)
976        spv := spv & ~(~spg & sphhit & spasidhit & spvmidhit)
977      }
978    }.otherwise {
979      when(sfence_dup(0).bits.rs2) {
980        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = true.B))).asUInt)
981      }.otherwise {
982        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = true.B))).asUInt)
983      }
984    }
985  }
986
987
988  val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg
989  when(hfenceg_valid) {
990    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
991    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt
992    val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt
993    val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt
994    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt
995    val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen)
996    when(sfence_dup(0).bits.rs1) {
997      when(sfence_dup(0).bits.rs2) {
998        l1v := l1v & ~l1hhit
999        l2v := l2v & ~l2hhit
1000        spv := spv & ~sphhit
1001      }.otherwise {
1002        l1v := l1v & ~l1hhit
1003        l2v := l2v & ~(l2hhit & l2vmidhit)
1004        spv := spv & ~(sphhit & spvmidhit)
1005      }
1006    }.otherwise {
1007      when(sfence_dup(0).bits.rs2) {
1008        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt)
1009      }.otherwise {
1010        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt)
1011      }
1012    }
1013  }
1014
1015  if (EnableSv48) {
1016    val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt
1017    val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1018    val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1019
1020    when (sfence_valid) {
1021      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1022      val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1023      val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1024
1025      when (sfence_dup(2).bits.rs1/*va*/) {
1026        when (sfence_dup(2).bits.rs2) {
1027          // all va && all asid
1028          l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1029        } .otherwise {
1030          // all va && specific asid except global
1031          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1032        }
1033      }
1034    }
1035
1036    when (hfencev_valid) {
1037      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1038      val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt
1039      val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1040      when(sfence_dup(2).bits.rs1) {
1041        when(sfence_dup(2).bits.rs2) {
1042          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1043        }.otherwise {
1044          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit))
1045        }
1046      }
1047    }
1048
1049    when (hfenceg_valid) {
1050      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1051      val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt
1052      val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen)
1053      when(sfence_dup(2).bits.rs1) {
1054        when(sfence_dup(2).bits.rs2) {
1055          l3v.map(_ := l3v.get & ~l3hhit)
1056        }.otherwise {
1057          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1058        }
1059      }
1060    }
1061  }
1062
1063  def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = {
1064    in.ready := !in.valid || out.ready
1065    out.valid := in.valid
1066    out.bits := in.bits
1067    out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) =>
1068      val bypassed_reg = Reg(Bool())
1069      val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid
1070      when (inFire) { bypassed_reg := bypassed_wire }
1071      .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire }
1072
1073      b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire))
1074    }
1075  }
1076
1077  // Perf Count
1078  val resp_l0 = resp_res.l0.hit
1079  val resp_sp = resp_res.sp.hit
1080  val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None
1081  val resp_l2_pre = resp_res.l2.pre
1082  val resp_l1_pre = resp_res.l1.pre
1083  val resp_l0_pre = resp_res.l0.pre
1084  val resp_sp_pre = resp_res.sp.pre
1085  val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1086  XSPerfAccumulate("access", base_valid_access_0)
1087  if (EnableSv48) {
1088    XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1089  }
1090  XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1091  XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1092  XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0)
1093  XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp)
1094  XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit)
1095
1096  if (EnableSv48) {
1097    XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1098  }
1099  XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1100  XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1101  XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0)
1102  XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp)
1103  XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1104
1105  val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire
1106  XSPerfAccumulate("pre_access", base_valid_access_1)
1107  if (EnableSv48) {
1108    XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1109  }
1110  XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1111  XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1112  XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0)
1113  XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp)
1114  XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit)
1115
1116  if (EnableSv48) {
1117    XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1118  }
1119  XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1120  XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1121  XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0)
1122  XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp)
1123  XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1124
1125  val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1126  XSPerfAccumulate("access_first", base_valid_access_2)
1127  if (EnableSv48) {
1128    XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1129  }
1130  XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1131  XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1132  XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0)
1133  XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp)
1134  XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit)
1135
1136  if (EnableSv48) {
1137    XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1138  }
1139  XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1140  XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1141  XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0)
1142  XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp)
1143  XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1144
1145  val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire
1146  XSPerfAccumulate("pre_access_first", base_valid_access_3)
1147  if (EnableSv48) {
1148    XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1149  }
1150  XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1151  XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1152  XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0)
1153  XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp)
1154  XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit)
1155
1156  if (EnableSv48) {
1157    XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1158  }
1159  XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1160  XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1161  XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0)
1162  XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp)
1163  XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1164
1165  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
1166  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
1167  if (EnableSv48) {
1168    l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) }
1169  }
1170  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) }
1171  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) }
1172  l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) }
1173  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
1174  if (EnableSv48) {
1175    l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) }
1176  }
1177  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) }
1178  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) }
1179  l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) }
1180  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
1181
1182  if (EnableSv48) {
1183    XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR)
1184  }
1185  XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR)
1186  XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR)
1187  XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR)
1188  XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR)
1189  if (EnableSv48) {
1190    XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0))
1191  }
1192  XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0))
1193  XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0))
1194  XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0))
1195  XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0))
1196
1197  // debug
1198  XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n")
1199  if (EnableSv48) {
1200    XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n")
1201  }
1202  XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n")
1203  XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n")
1204  XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n")
1205  XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n")
1206  XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n")
1207  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n")
1208  if (EnableSv48) {
1209    XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n")
1210  }
1211  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n")
1212  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n")
1213  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n")
1214  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n")
1215  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n")
1216
1217  val perfEvents = Seq(
1218    ("access           ", base_valid_access_0             ),
1219    ("l2_hit           ", l2Hit                           ),
1220    ("l1_hit           ", l1Hit                           ),
1221    ("l0_hit           ", l0Hit                           ),
1222    ("sp_hit           ", spHit                           ),
1223    ("pte_hit          ", l0Hit || spHit                  ),
1224    ("rwHarzad         ", io.req.valid && !io.req.ready   ),
1225    ("out_blocked      ", io.resp.valid && !io.resp.ready ),
1226  )
1227  generatePerfEvent()
1228}
1229