xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision 33177a7c6ea22740da90c7bdc8eed306ef2cfda3)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import chisel3.internal.naming.chiselName
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
27import freechips.rocketchip.tilelink._
28
29/* ptw cache caches the page table of all the three layers
30 * ptw cache resp at next cycle
31 * the cache should not be blocked
32 * when miss queue if full, just block req outside
33 */
34class PtwCacheIO()(implicit p: Parameters) extends PtwBundle {
35  val req = Flipped(DecoupledIO(new Bundle {
36    val vpn = UInt(vpnLen.W)
37    val source = UInt(bPtwWidth.W)
38    val isReplay = Bool()
39  }))
40  val resp = DecoupledIO(new Bundle {
41    val source = UInt(bPtwWidth.W)
42    val vpn = UInt(vpnLen.W)
43    val isReplay = Bool()
44    val hit = Bool()
45    val toFsm = new Bundle {
46      val l1Hit = Bool()
47      val l2Hit = Bool()
48      val ppn = UInt(ppnLen.W)
49    }
50    val toTlb = new PtwEntry(tagLen = vpnLen, hasPerm = true, hasLevel = true)
51  })
52  val refill = Flipped(ValidIO(new Bundle {
53    val ptes = UInt(blockBits.W)
54    val vpn = UInt(vpnLen.W)
55    val level = UInt(log2Up(Level).W)
56    val addr_low = UInt((log2Up(l2tlbParams.blockBytes) - log2Up(XLEN/8)).W)
57  }))
58  val sfence = Input(new SfenceBundle)
59}
60
61
62@chiselName
63class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst {
64  val io = IO(new PtwCacheIO)
65
66  val ecc = Code.fromString(l2tlbParams.ecc)
67  val l2EntryType = new PTWEntriesWithEcc(ecc, num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)
68  val l3EntryType = new PTWEntriesWithEcc(ecc, num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)
69
70  // TODO: four caches make the codes dirty, think about how to deal with it
71
72  val sfence = io.sfence
73  val refill = io.refill.bits
74
75  val first_valid = io.req.valid
76  val first_fire = first_valid && io.req.ready
77  val first_req = io.req.bits
78  val second_ready = Wire(Bool())
79  val second_valid = ValidHold(first_fire, io.resp.fire(), sfence.valid)
80  val second_req = RegEnable(first_req, first_fire)
81  // NOTE: if ptw cache resp may be blocked, hard to handle refill
82  // when miss queue is full, please to block itlb and dtlb input
83
84  // when refill, refuce to accept new req
85  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
86  io.req.ready := !rwHarzad && (second_ready || io.req.bits.isReplay)
87  // NOTE: when write, don't ready, whe
88  //       when replay, just come in, out make sure resp.fire()
89
90  // l1: level 0 non-leaf pte
91  val l1 = Reg(Vec(l2tlbParams.l1Size, new PtwEntry(tagLen = PtwL1TagLen)))
92  val l1v = RegInit(0.U(l2tlbParams.l1Size.W))
93  val l1g = Reg(UInt(l2tlbParams.l1Size.W))
94
95  // l2: level 1 non-leaf pte
96  val l2 = Module(new SRAMTemplate(
97    l2EntryType,
98    set = l2tlbParams.l2nSets,
99    way = l2tlbParams.l2nWays,
100    singlePort = sramSinglePort
101  ))
102  val l2v = RegInit(0.U((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W))
103  val l2g = Reg(UInt((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W))
104  def getl2vSet(vpn: UInt) = {
105    require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays))
106    val set = genPtwL2SetIdx(vpn)
107    require(set.getWidth == log2Up(l2tlbParams.l2nSets))
108    val l2vVec = l2v.asTypeOf(Vec(l2tlbParams.l2nSets, UInt(l2tlbParams.l2nWays.W)))
109    l2vVec(set)
110  }
111
112  // l3: level 2 leaf pte of 4KB pages
113  val l3 = Module(new SRAMTemplate(
114    l3EntryType,
115    set = l2tlbParams.l3nSets,
116    way = l2tlbParams.l3nWays,
117    singlePort = sramSinglePort
118  ))
119  val l3v = RegInit(0.U((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W))
120  val l3g = Reg(UInt((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W))
121  def getl3vSet(vpn: UInt) = {
122    require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays))
123    val set = genPtwL3SetIdx(vpn)
124    require(set.getWidth == log2Up(l2tlbParams.l3nSets))
125    val l3vVec = l3v.asTypeOf(Vec(l2tlbParams.l3nSets, UInt(l2tlbParams.l3nWays.W)))
126    l3vVec(set)
127  }
128
129  // sp: level 0/1 leaf pte of 1GB/2MB super pages
130  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true)))
131  val spv = RegInit(0.U(l2tlbParams.spSize.W))
132  val spg = Reg(UInt(l2tlbParams.spSize.W))
133
134  // Access Perf
135  val l1AccessPerf = Wire(Vec(l2tlbParams.l1Size, Bool()))
136  val l2AccessPerf = Wire(Vec(l2tlbParams.l2nWays, Bool()))
137  val l3AccessPerf = Wire(Vec(l2tlbParams.l3nWays, Bool()))
138  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
139  l1AccessPerf.map(_ := false.B)
140  l2AccessPerf.map(_ := false.B)
141  l3AccessPerf.map(_ := false.B)
142  spAccessPerf.map(_ := false.B)
143
144  // l1
145  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer, l2tlbParams.l1Size)
146  val (l1Hit, l1HitPPN) = {
147    val hitVecT = l1.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && l1v(i) }
148    val hitVec = hitVecT.map(RegEnable(_, first_fire))
149    val hitPPN = ParallelPriorityMux(hitVec zip l1.map(_.ppn))
150    val hit = ParallelOR(hitVec) && second_valid
151
152    when (hit) { ptwl1replace.access(OHToUInt(hitVec)) }
153
154    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire)}
155    for (i <- 0 until l2tlbParams.l1Size) {
156      XSDebug(first_fire, p"[l1] l1(${i.U}) ${l1(i)} hit:${l1(i).hit(first_req.vpn)}\n")
157    }
158    XSDebug(first_fire, p"[l1] l1v:${Binary(l1v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
159    XSDebug(second_valid, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
160
161    VecInit(hitVecT).suggestName(s"l1_hitVecT")
162    VecInit(hitVec).suggestName(s"l1_hitVec")
163
164    (hit, hitPPN)
165  }
166
167  // l2
168  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer,l2tlbParams.l2nWays,l2tlbParams.l2nSets)
169  val (l2Hit, l2HitPPN, l2eccError) = {
170    val ridx = genPtwL2SetIdx(first_req.vpn)
171    val vidx = RegEnable(VecInit(getl2vSet(first_req.vpn).asBools), first_fire)
172    l2.io.r.req.valid := first_fire
173    l2.io.r.req.bits.apply(setIdx = ridx)
174    val ramDatas = l2.io.r.resp.data
175    // val hitVec = VecInit(ramDatas.map{wayData => wayData.hit(first_req.vpn) })
176    val hitVec = VecInit(ramDatas.zip(vidx).map { case (wayData, v) => wayData.entries.hit(second_req.vpn) && v })
177    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
178    val hitWayData = hitWayEntry.entries
179    val hitWayEcc = hitWayEntry.ecc
180    val hit = ParallelOR(hitVec) && second_valid
181    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l2nWays).map(_.U))
182
183    val eccError = ecc.decode(Cat(hitWayEcc, hitWayData.asUInt())).error
184
185    ridx.suggestName(s"l2_ridx")
186    vidx.suggestName(s"l2_vidx")
187    ramDatas.suggestName(s"l2_ramDatas")
188    hitVec.suggestName(s"l2_hitVec")
189    hitWayData.suggestName(s"l2_hitWayData")
190    hitWay.suggestName(s"l2_hitWay")
191
192    when (hit) { ptwl2replace.access(genPtwL2SetIdx(second_req.vpn), hitWay) }
193
194    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) }
195    XSDebug(first_fire, p"[l2] ridx:0x${Hexadecimal(ridx)}\n")
196    for (i <- 0 until l2tlbParams.l2nWays) {
197      XSDebug(RegNext(first_fire), p"[l2] ramDatas(${i.U}) ${ramDatas(i)}  l2v:${vidx(i)}  hit:${ramDatas(i).entries.hit(second_req.vpn)}\n")
198    }
199    XSDebug(second_valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n")
200
201    (hit && !eccError, hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)), hit && eccError)
202  }
203
204  // l3
205  val ptwl3replace = ReplacementPolicy.fromString(l2tlbParams.l3Replacer,l2tlbParams.l3nWays,l2tlbParams.l3nSets)
206  val (l3Hit, l3HitData, l3eccError) = {
207    val ridx = genPtwL3SetIdx(first_req.vpn)
208    val vidx = RegEnable(VecInit(getl3vSet(first_req.vpn).asBools), first_fire)
209    l3.io.r.req.valid := first_fire
210    l3.io.r.req.bits.apply(setIdx = ridx)
211    val ramDatas = l3.io.r.resp.data
212    val hitVec = VecInit(ramDatas.zip(vidx).map{ case (wayData, v) => wayData.entries.hit(second_req.vpn) && v })
213    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
214    val hitWayData = hitWayEntry.entries
215    val hitWayEcc = hitWayEntry.ecc
216    val hit = ParallelOR(hitVec) && second_valid
217    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l3nWays).map(_.U))
218
219    val eccError = ecc.decode(Cat(hitWayEcc, hitWayData.asUInt())).error
220
221    when (hit) { ptwl3replace.access(genPtwL3SetIdx(second_req.vpn), hitWay) }
222
223    l3AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) }
224    XSDebug(first_fire, p"[l3] ridx:0x${Hexadecimal(ridx)}\n")
225    for (i <- 0 until l2tlbParams.l3nWays) {
226      XSDebug(RegNext(first_fire), p"[l3] ramDatas(${i.U}) ${ramDatas(i)}  l3v:${vidx(i)}  hit:${ramDatas(i).entries.hit(second_req.vpn)}\n")
227    }
228    XSDebug(second_valid, p"[l3] l3Hit:${hit} l3HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n")
229
230    ridx.suggestName(s"l3_ridx")
231    vidx.suggestName(s"l3_vidx")
232    ramDatas.suggestName(s"l3_ramDatas")
233    hitVec.suggestName(s"l3_hitVec")
234    hitWay.suggestName(s"l3_hitWay")
235
236    (hit && !eccError, hitWayData, hit && eccError)
237  }
238  val l3HitPPN = l3HitData.ppns(genPtwL3SectorIdx(second_req.vpn))
239  val l3HitPerm = l3HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL3SectorSize, new PtePermBundle)))(genPtwL3SectorIdx(second_req.vpn))
240
241  // super page
242  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
243  val (spHit, spHitData) = {
244    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && spv(i) }
245    val hitVec = hitVecT.map(RegEnable(_, first_fire))
246    val hitData = ParallelPriorityMux(hitVec zip sp)
247    val hit = ParallelOR(hitVec) && second_valid
248
249    when (hit) { spreplace.access(OHToUInt(hitVec)) }
250
251    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && RegNext(first_fire) }
252    for (i <- 0 until l2tlbParams.spSize) {
253      XSDebug(first_fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(first_req.vpn)} spv:${spv(i)}\n")
254    }
255    XSDebug(second_valid, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
256
257    VecInit(hitVecT).suggestName(s"sp_hitVecT")
258    VecInit(hitVec).suggestName(s"sp_hitVec")
259
260    (hit, hitData)
261  }
262  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
263  val spHitLevel = spHitData.level.getOrElse(0.U)
264
265  val resp = Wire(io.resp.bits.cloneType)
266  val resp_latch = RegEnable(resp, io.resp.valid && !io.resp.ready)
267  val resp_latch_valid = ValidHold(io.resp.valid && !io.resp.ready, io.resp.ready, sfence.valid)
268  second_ready := !(second_valid || resp_latch_valid) || io.resp.fire()
269  resp.source   := second_req.source
270  resp.vpn      := second_req.vpn
271  resp.isReplay := second_req.isReplay
272  resp.hit      := l3Hit || spHit
273  resp.toFsm.l1Hit := l1Hit
274  resp.toFsm.l2Hit := l2Hit
275  resp.toFsm.ppn   := Mux(l2Hit, l2HitPPN, l1HitPPN)
276  resp.toTlb.tag   := second_req.vpn
277  resp.toTlb.ppn   := Mux(l3Hit, l3HitPPN, spHitData.ppn)
278  resp.toTlb.perm.map(_ := Mux(l3Hit, l3HitPerm, spHitPerm))
279  resp.toTlb.level.map(_ := Mux(l3Hit, 2.U, spHitLevel))
280
281  io.resp.valid := second_valid
282  io.resp.bits := Mux(resp_latch_valid, resp_latch, resp)
283  assert(!(l3Hit && spHit), "normal page and super page both hit")
284
285  // refill Perf
286  val l1RefillPerf = Wire(Vec(l2tlbParams.l1Size, Bool()))
287  val l2RefillPerf = Wire(Vec(l2tlbParams.l2nWays, Bool()))
288  val l3RefillPerf = Wire(Vec(l2tlbParams.l3nWays, Bool()))
289  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
290  l1RefillPerf.map(_ := false.B)
291  l2RefillPerf.map(_ := false.B)
292  l3RefillPerf.map(_ := false.B)
293  spRefillPerf.map(_ := false.B)
294
295  // refill
296  l2.io.w.req <> DontCare
297  l3.io.w.req <> DontCare
298  l2.io.w.req.valid := false.B
299  l3.io.w.req.valid := false.B
300
301  def get_part(data: UInt, index: UInt): UInt = {
302    val inner_data = data.asTypeOf(Vec(data.getWidth / XLEN, UInt(XLEN.W)))
303    inner_data(index)
304  }
305
306  val memRdata = refill.ptes
307  val memSelData = get_part(memRdata, refill.addr_low)
308  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
309  val memPte = memSelData.asTypeOf(new PteBundle)
310
311  memPte.suggestName("memPte")
312
313  // TODO: handle sfenceLatch outsize
314  when (io.refill.valid && !memPte.isPf(refill.level) && !sfence.valid ) {
315    when (refill.level === 0.U && !memPte.isLeaf()) {
316      // val refillIdx = LFSR64()(log2Up(l2tlbParams.l1Size)-1,0) // TODO: may be LRU
317      val refillIdx = replaceWrapper(l1v, ptwl1replace.way)
318      refillIdx.suggestName(s"PtwL1RefillIdx")
319      val rfOH = UIntToOH(refillIdx)
320      l1(refillIdx).refill(refill.vpn, memSelData)
321      ptwl1replace.access(refillIdx)
322      l1v := l1v | rfOH
323      l1g := (l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U)
324
325      for (i <- 0 until l2tlbParams.l1Size) {
326        l1RefillPerf(i) := i.U === refillIdx
327      }
328
329      XSDebug(p"[l1 refill] refillIdx:${refillIdx} refillEntry:${l1(refillIdx).genPtwEntry(refill.vpn, memSelData)}\n")
330      XSDebug(p"[l1 refill] l1v:${Binary(l1v)}->${Binary(l1v | rfOH)} l1g:${Binary(l1g)}->${Binary((l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U))}\n")
331
332      refillIdx.suggestName(s"l1_refillIdx")
333      rfOH.suggestName(s"l1_rfOH")
334    }
335
336    when (refill.level === 1.U && !memPte.isLeaf()) {
337      val refillIdx = genPtwL2SetIdx(refill.vpn)
338      val victimWay = replaceWrapper(RegEnable(VecInit(getl2vSet(refill.vpn).asBools).asUInt, first_fire), ptwl2replace.way(refillIdx))
339      val victimWayOH = UIntToOH(victimWay)
340      val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
341      val wdata = Wire(l2EntryType)
342      wdata.entries := wdata.entries.genEntries(vpn = refill.vpn, data = memRdata, levelUInt = 1.U)
343      wdata.ecc := ecc.encode(wdata.entries.asUInt()) >> wdata.entries.getWidth
344      l2.io.w.apply(
345        valid = true.B,
346        setIdx = refillIdx,
347        data = wdata,
348        waymask = victimWayOH
349      )
350      ptwl2replace.access(refillIdx, victimWay)
351      l2v := l2v | rfvOH
352      l2g := l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
353
354      for (i <- 0 until l2tlbParams.l2nWays) {
355        l2RefillPerf(i) := i.U === victimWay
356      }
357
358      XSDebug(p"[l2 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
359      XSDebug(p"[l2 refill] refilldata:0x${
360        (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries(
361          vpn = refill.vpn, data = memRdata, levelUInt = 1.U)
362      }\n")
363      XSDebug(p"[l2 refill] l2v:${Binary(l2v)} -> ${Binary(l2v | rfvOH)}\n")
364      XSDebug(p"[l2 refill] l2g:${Binary(l2g)} -> ${Binary(l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
365
366      refillIdx.suggestName(s"l2_refillIdx")
367      victimWay.suggestName(s"l2_victimWay")
368      victimWayOH.suggestName(s"l2_victimWayOH")
369      rfvOH.suggestName(s"l2_rfvOH")
370    }
371
372    when (refill.level === 2.U && memPte.isLeaf()) {
373      val refillIdx = genPtwL3SetIdx(refill.vpn)
374      val victimWay = replaceWrapper(RegEnable(VecInit(getl3vSet(refill.vpn).asBools).asUInt, first_fire), ptwl3replace.way(refillIdx))
375      val victimWayOH = UIntToOH(victimWay)
376      val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
377      val wdata = Wire(l3EntryType)
378      wdata.entries := wdata.entries.genEntries(vpn = refill.vpn, data = memRdata, levelUInt = 2.U)
379      wdata.ecc := ecc.encode(wdata.entries.asUInt()) >> wdata.entries.getWidth
380      l3.io.w.apply(
381        valid = true.B,
382        setIdx = refillIdx,
383        data = wdata,
384        waymask = victimWayOH
385      )
386      ptwl3replace.access(refillIdx, victimWay)
387      l3v := l3v | rfvOH
388      l3g := l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
389
390        for (i <- 0 until l2tlbParams.l3nWays) {
391          l3RefillPerf(i) := i.U === victimWay
392        }
393
394      XSDebug(p"[l3 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
395      XSDebug(p"[l3 refill] refilldata:0x${
396        (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries(
397          vpn = refill.vpn, data = memRdata, levelUInt = 2.U)
398      }\n")
399      XSDebug(p"[l3 refill] l3v:${Binary(l3v)} -> ${Binary(l3v | rfvOH)}\n")
400      XSDebug(p"[l3 refill] l3g:${Binary(l3g)} -> ${Binary(l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
401
402      refillIdx.suggestName(s"l3_refillIdx")
403      victimWay.suggestName(s"l3_victimWay")
404      victimWayOH.suggestName(s"l3_victimWayOH")
405      rfvOH.suggestName(s"l3_rfvOH")
406    }
407    when ((refill.level === 0.U || refill.level === 1.U) && memPte.isLeaf()) {
408      val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
409      val rfOH = UIntToOH(refillIdx)
410      sp(refillIdx).refill(refill.vpn, memSelData, refill.level)
411      spreplace.access(refillIdx)
412      spv := spv | rfOH
413      spg := spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U)
414
415      for (i <- 0 until l2tlbParams.spSize) {
416        spRefillPerf(i) := i.U === refillIdx
417      }
418
419      XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.vpn, memSelData, refill.level)}\n")
420      XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U))}\n")
421
422      refillIdx.suggestName(s"sp_refillIdx")
423      rfOH.suggestName(s"sp_rfOH")
424    }
425  }
426
427  val l2eccFlush = RegNext(l2eccError, init = false.B)
428  val l3eccFlush = RegNext(l3eccError, init = false.B)
429  val eccVpn = RegNext(second_req.vpn)
430
431  assert(!l2eccFlush)
432  assert(!l3eccFlush)
433  when (l2eccFlush) {
434    val flushSetIdxOH = UIntToOH(genPtwL2SetIdx(eccVpn))
435    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l2nWays, a.asUInt) }).asUInt
436    l2v := l2v & ~flushMask
437    l2g := l2g & ~flushMask
438  }
439
440  when (l3eccFlush) {
441    val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(eccVpn))
442    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt
443    l3v := l3v & ~flushMask
444    l3g := l3g & ~flushMask
445  }
446
447  // sfence
448  when (sfence.valid) {
449    when (sfence.bits.rs1/*va*/) {
450      when (sfence.bits.rs2) {
451        // all va && all asid
452        l1v := 0.U
453        l2v := 0.U
454        l3v := 0.U
455        spv := 0.U
456      } .otherwise {
457        // all va && specific asid except global
458        l1v := l1v & l1g
459        l2v := l2v & l2g
460        l3v := l3v & l3g
461        spv := spv & spg
462      }
463    } .otherwise {
464      // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
465      val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
466      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l3nWays, _.asUInt))).asUInt
467      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt
468      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
469      flushMask.suggestName(s"sfence_nrs1_flushMask")
470      when (sfence.bits.rs2) {
471        // specific leaf of addr && all asid
472        l3v := l3v & ~flushMask
473        l3g := l3g & ~flushMask
474      } .otherwise {
475        // specific leaf of addr && specific asid
476        l3v := l3v & (~flushMask | l3g)
477      }
478      spv := 0.U
479    }
480  }
481
482  // Perf Count
483  XSPerfAccumulate("access", second_valid)
484  XSPerfAccumulate("l1_hit", l1Hit)
485  XSPerfAccumulate("l2_hit", l2Hit)
486  XSPerfAccumulate("l3_hit", l3Hit)
487  XSPerfAccumulate("sp_hit", spHit)
488  XSPerfAccumulate("pte_hit", l3Hit || spHit)
489  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
490  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
491  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1AccessIndex${i}", l) }
492  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2AccessIndex${i}", l) }
493  l3AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3AccessIndex${i}", l) }
494  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
495  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1RefillIndex${i}", l) }
496  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2RefillIndex${i}", l) }
497  l3RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3RefillIndex${i}", l) }
498  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
499
500  // debug
501  XSDebug(sfence.valid, p"[sfence] original v and g vector:\n")
502  XSDebug(sfence.valid, p"[sfence] l1v:${Binary(l1v)}\n")
503  XSDebug(sfence.valid, p"[sfence] l2v:${Binary(l2v)}\n")
504  XSDebug(sfence.valid, p"[sfence] l3v:${Binary(l3v)}\n")
505  XSDebug(sfence.valid, p"[sfence] l3g:${Binary(l3g)}\n")
506  XSDebug(sfence.valid, p"[sfence] spv:${Binary(spv)}\n")
507  XSDebug(RegNext(sfence.valid), p"[sfence] new v and g vector:\n")
508  XSDebug(RegNext(sfence.valid), p"[sfence] l1v:${Binary(l1v)}\n")
509  XSDebug(RegNext(sfence.valid), p"[sfence] l2v:${Binary(l2v)}\n")
510  XSDebug(RegNext(sfence.valid), p"[sfence] l3v:${Binary(l3v)}\n")
511  XSDebug(RegNext(sfence.valid), p"[sfence] l3g:${Binary(l3g)}\n")
512  XSDebug(RegNext(sfence.valid), p"[sfence] spv:${Binary(spv)}\n")
513}
514