xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision 6d5ddbce72c9c67dcf0ec08cc682a9545ceb5f6c)
1*6d5ddbceSLemover/***************************************************************************************
2*6d5ddbceSLemover* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3*6d5ddbceSLemover*
4*6d5ddbceSLemover* XiangShan is licensed under Mulan PSL v2.
5*6d5ddbceSLemover* You can use this software according to the terms and conditions of the Mulan PSL v2.
6*6d5ddbceSLemover* You may obtain a copy of Mulan PSL v2 at:
7*6d5ddbceSLemover*          http://license.coscl.org.cn/MulanPSL2
8*6d5ddbceSLemover*
9*6d5ddbceSLemover* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
10*6d5ddbceSLemover* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
11*6d5ddbceSLemover* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
12*6d5ddbceSLemover*
13*6d5ddbceSLemover* See the Mulan PSL v2 for more details.
14*6d5ddbceSLemover***************************************************************************************/
15*6d5ddbceSLemover
16*6d5ddbceSLemoverpackage xiangshan.cache.mmu
17*6d5ddbceSLemover
18*6d5ddbceSLemoverimport chipsalliance.rocketchip.config.Parameters
19*6d5ddbceSLemoverimport chisel3._
20*6d5ddbceSLemoverimport chisel3.util._
21*6d5ddbceSLemoverimport xiangshan._
22*6d5ddbceSLemoverimport xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
23*6d5ddbceSLemoverimport utils._
24*6d5ddbceSLemoverimport freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
25*6d5ddbceSLemoverimport freechips.rocketchip.tilelink._
26*6d5ddbceSLemover
27*6d5ddbceSLemover/* ptw cache caches the page table of all the three layers
28*6d5ddbceSLemover * ptw cache resp at next cycle
29*6d5ddbceSLemover * the cache should not be blocked
30*6d5ddbceSLemover * when miss queue if full, just block req outside
31*6d5ddbceSLemover */
32*6d5ddbceSLemoverclass PtwCacheIO()(implicit p: Parameters) extends PtwBundle {
33*6d5ddbceSLemover  val req = Flipped(DecoupledIO(new Bundle {
34*6d5ddbceSLemover    val vpn = UInt(vpnLen.W)
35*6d5ddbceSLemover    val source = UInt(bPtwWidth.W)
36*6d5ddbceSLemover    val isReplay = Bool()
37*6d5ddbceSLemover  }))
38*6d5ddbceSLemover  val resp = DecoupledIO(new Bundle {
39*6d5ddbceSLemover    val source = UInt(bPtwWidth.W)
40*6d5ddbceSLemover    val vpn = UInt(vpnLen.W)
41*6d5ddbceSLemover    val isReplay = Bool()
42*6d5ddbceSLemover    val hit = Bool()
43*6d5ddbceSLemover    val toFsm = new Bundle {
44*6d5ddbceSLemover      val l1Hit = Bool()
45*6d5ddbceSLemover      val l2Hit = Bool()
46*6d5ddbceSLemover      val ppn = UInt(ppnLen.W)
47*6d5ddbceSLemover    }
48*6d5ddbceSLemover    val toTlb = new PtwEntry(tagLen = vpnLen, hasPerm = true, hasLevel = true)
49*6d5ddbceSLemover  })
50*6d5ddbceSLemover  val refill = Flipped(ValidIO(new Bundle {
51*6d5ddbceSLemover    val ptes = UInt(MemBandWidth.W)
52*6d5ddbceSLemover    val vpn = UInt(vpnLen.W)
53*6d5ddbceSLemover    val level = UInt(log2Up(Level).W)
54*6d5ddbceSLemover    val memAddr = Input(UInt(PAddrBits.W))
55*6d5ddbceSLemover  }))
56*6d5ddbceSLemover  val sfence = Input(new SfenceBundle)
57*6d5ddbceSLemover  val refuseRefill = Input(Bool())
58*6d5ddbceSLemover}
59*6d5ddbceSLemover
60*6d5ddbceSLemoverclass PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst {
61*6d5ddbceSLemover  val io = IO(new PtwCacheIO)
62*6d5ddbceSLemover
63*6d5ddbceSLemover  // TODO: four caches make the codes dirty, think about how to deal with it
64*6d5ddbceSLemover
65*6d5ddbceSLemover  val sfence = io.sfence
66*6d5ddbceSLemover  val refuseRefill = io.refuseRefill
67*6d5ddbceSLemover  val refill = io.refill.bits
68*6d5ddbceSLemover
69*6d5ddbceSLemover  val first_valid = io.req.valid
70*6d5ddbceSLemover  val first_fire = first_valid && io.req.ready
71*6d5ddbceSLemover  val first_req = io.req.bits
72*6d5ddbceSLemover  val second_ready = Wire(Bool())
73*6d5ddbceSLemover  val second_valid = ValidHold(first_fire, io.resp.fire(), sfence.valid)
74*6d5ddbceSLemover  val second_req = RegEnable(first_req, first_fire)
75*6d5ddbceSLemover  // NOTE: if ptw cache resp may be blocked, hard to handle refill
76*6d5ddbceSLemover  // when miss queue is full, please to block itlb and dtlb input
77*6d5ddbceSLemover
78*6d5ddbceSLemover  // when refill, refuce to accept new req
79*6d5ddbceSLemover  val rwHarzad = if (SramSinglePort) io.refill.valid else false.B
80*6d5ddbceSLemover  io.req.ready := !rwHarzad && (second_ready || io.req.bits.isReplay)
81*6d5ddbceSLemover  // NOTE: when write, don't ready, whe
82*6d5ddbceSLemover  //       when replay, just come in, out make sure resp.fire()
83*6d5ddbceSLemover
84*6d5ddbceSLemover  // l1: level 0 non-leaf pte
85*6d5ddbceSLemover  val l1 = Reg(Vec(PtwL1EntrySize, new PtwEntry(tagLen = PtwL1TagLen)))
86*6d5ddbceSLemover  val l1v = RegInit(0.U(PtwL1EntrySize.W))
87*6d5ddbceSLemover  val l1g = Reg(UInt(PtwL1EntrySize.W))
88*6d5ddbceSLemover
89*6d5ddbceSLemover  // l2: level 1 non-leaf pte
90*6d5ddbceSLemover  val l2 = Module(new SRAMTemplate(
91*6d5ddbceSLemover    new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false),
92*6d5ddbceSLemover    set = PtwL2LineNum,
93*6d5ddbceSLemover    way = PtwL2WayNum,
94*6d5ddbceSLemover    singlePort = SramSinglePort
95*6d5ddbceSLemover  ))
96*6d5ddbceSLemover  val l2v = RegInit(0.U((PtwL2LineNum * PtwL2WayNum).W))
97*6d5ddbceSLemover  val l2g = Reg(UInt((PtwL2LineNum * PtwL2WayNum).W))
98*6d5ddbceSLemover  def getl2vSet(vpn: UInt) = {
99*6d5ddbceSLemover    require(log2Up(PtwL2WayNum) == log2Down(PtwL2WayNum))
100*6d5ddbceSLemover    val set = genPtwL2SetIdx(vpn)
101*6d5ddbceSLemover    require(set.getWidth == log2Up(PtwL2LineNum))
102*6d5ddbceSLemover    val l2vVec = l2v.asTypeOf(Vec(PtwL2LineNum, UInt(PtwL2WayNum.W)))
103*6d5ddbceSLemover    l2vVec(set)
104*6d5ddbceSLemover  }
105*6d5ddbceSLemover
106*6d5ddbceSLemover  // l3: level 2 leaf pte of 4KB pages
107*6d5ddbceSLemover  val l3 = Module(new SRAMTemplate(
108*6d5ddbceSLemover    new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true),
109*6d5ddbceSLemover    set = PtwL3LineNum,
110*6d5ddbceSLemover    way = PtwL3WayNum,
111*6d5ddbceSLemover    singlePort = SramSinglePort
112*6d5ddbceSLemover  ))
113*6d5ddbceSLemover  val l3v = RegInit(0.U((PtwL3LineNum * PtwL3WayNum).W))
114*6d5ddbceSLemover  val l3g = Reg(UInt((PtwL3LineNum * PtwL3WayNum).W))
115*6d5ddbceSLemover  def getl3vSet(vpn: UInt) = {
116*6d5ddbceSLemover    require(log2Up(PtwL3WayNum) == log2Down(PtwL3WayNum))
117*6d5ddbceSLemover    val set = genPtwL3SetIdx(vpn)
118*6d5ddbceSLemover    require(set.getWidth == log2Up(PtwL3LineNum))
119*6d5ddbceSLemover    val l3vVec = l3v.asTypeOf(Vec(PtwL3LineNum, UInt(PtwL3WayNum.W)))
120*6d5ddbceSLemover    l3vVec(set)
121*6d5ddbceSLemover  }
122*6d5ddbceSLemover
123*6d5ddbceSLemover  // sp: level 0/1 leaf pte of 1GB/2MB super pages
124*6d5ddbceSLemover  val sp = Reg(Vec(PtwSPEntrySize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true)))
125*6d5ddbceSLemover  val spv = RegInit(0.U(PtwSPEntrySize.W))
126*6d5ddbceSLemover  val spg = Reg(UInt(PtwSPEntrySize.W))
127*6d5ddbceSLemover
128*6d5ddbceSLemover  // Access Perf
129*6d5ddbceSLemover  val l1AccessPerf = Wire(Vec(PtwL1EntrySize, Bool()))
130*6d5ddbceSLemover  val l2AccessPerf = Wire(Vec(PtwL2WayNum, Bool()))
131*6d5ddbceSLemover  val l3AccessPerf = Wire(Vec(PtwL3WayNum, Bool()))
132*6d5ddbceSLemover  val spAccessPerf = Wire(Vec(PtwSPEntrySize, Bool()))
133*6d5ddbceSLemover  l1AccessPerf.map(_ := false.B)
134*6d5ddbceSLemover  l2AccessPerf.map(_ := false.B)
135*6d5ddbceSLemover  l3AccessPerf.map(_ := false.B)
136*6d5ddbceSLemover  spAccessPerf.map(_ := false.B)
137*6d5ddbceSLemover
138*6d5ddbceSLemover  // l1
139*6d5ddbceSLemover  val ptwl1replace = ReplacementPolicy.fromString(ptwl1Replacer, PtwL1EntrySize)
140*6d5ddbceSLemover  val (l1Hit, l1HitPPN) = {
141*6d5ddbceSLemover    val hitVecT = l1.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && l1v(i) }
142*6d5ddbceSLemover    val hitVec = hitVecT.map(RegEnable(_, first_fire))
143*6d5ddbceSLemover    val hitPPN = ParallelPriorityMux(hitVec zip l1.map(_.ppn))
144*6d5ddbceSLemover    val hit = ParallelOR(hitVec) && second_valid
145*6d5ddbceSLemover
146*6d5ddbceSLemover    when (hit) { ptwl1replace.access(OHToUInt(hitVec)) }
147*6d5ddbceSLemover
148*6d5ddbceSLemover    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire)}
149*6d5ddbceSLemover    for (i <- 0 until PtwL1EntrySize) {
150*6d5ddbceSLemover      XSDebug(first_fire, p"[l1] l1(${i.U}) ${l1(i)} hit:${l1(i).hit(first_req.vpn)}\n")
151*6d5ddbceSLemover    }
152*6d5ddbceSLemover    XSDebug(first_fire, p"[l1] l1v:${Binary(l1v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
153*6d5ddbceSLemover    XSDebug(second_valid, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
154*6d5ddbceSLemover
155*6d5ddbceSLemover    VecInit(hitVecT).suggestName(s"l1_hitVecT")
156*6d5ddbceSLemover    VecInit(hitVec).suggestName(s"l1_hitVec")
157*6d5ddbceSLemover
158*6d5ddbceSLemover    (hit, hitPPN)
159*6d5ddbceSLemover  }
160*6d5ddbceSLemover
161*6d5ddbceSLemover  // l2
162*6d5ddbceSLemover  val ptwl2replace = ReplacementPolicy.fromString(ptwl2Replacer,PtwL2WayNum,PtwL2LineNum)
163*6d5ddbceSLemover  val (l2Hit, l2HitPPN) = {
164*6d5ddbceSLemover    val ridx = genPtwL2SetIdx(first_req.vpn)
165*6d5ddbceSLemover    val vidx = RegEnable(VecInit(getl2vSet(first_req.vpn).asBools), first_fire)
166*6d5ddbceSLemover    l2.io.r.req.valid := first_fire
167*6d5ddbceSLemover    l2.io.r.req.bits.apply(setIdx = ridx)
168*6d5ddbceSLemover    val ramDatas = l2.io.r.resp.data
169*6d5ddbceSLemover    // val hitVec = VecInit(ramDatas.map{wayData => wayData.hit(first_req.vpn) })
170*6d5ddbceSLemover    val hitVec = VecInit(ramDatas.zip(vidx).map { case (wayData, v) => wayData.hit(second_req.vpn) && v })
171*6d5ddbceSLemover    val hitWayData = ParallelPriorityMux(hitVec zip ramDatas)
172*6d5ddbceSLemover    val hit = ParallelOR(hitVec) && second_valid
173*6d5ddbceSLemover    val hitWay = ParallelPriorityMux(hitVec zip (0 until PtwL2WayNum).map(_.U))
174*6d5ddbceSLemover
175*6d5ddbceSLemover    ridx.suggestName(s"l2_ridx")
176*6d5ddbceSLemover    vidx.suggestName(s"l2_vidx")
177*6d5ddbceSLemover    ramDatas.suggestName(s"l2_ramDatas")
178*6d5ddbceSLemover    hitVec.suggestName(s"l2_hitVec")
179*6d5ddbceSLemover    hitWayData.suggestName(s"l2_hitWayData")
180*6d5ddbceSLemover    hitWay.suggestName(s"l2_hitWay")
181*6d5ddbceSLemover
182*6d5ddbceSLemover    when (hit) { ptwl2replace.access(genPtwL2SetIdx(second_req.vpn), hitWay) }
183*6d5ddbceSLemover
184*6d5ddbceSLemover    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) }
185*6d5ddbceSLemover    XSDebug(first_fire, p"[l2] ridx:0x${Hexadecimal(ridx)}\n")
186*6d5ddbceSLemover    for (i <- 0 until PtwL2WayNum) {
187*6d5ddbceSLemover      XSDebug(RegNext(first_fire), p"[l2] ramDatas(${i.U}) ${ramDatas(i)}  l2v:${vidx(i)}  hit:${ramDatas(i).hit(second_req.vpn)}\n")
188*6d5ddbceSLemover    }
189*6d5ddbceSLemover    XSDebug(second_valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n")
190*6d5ddbceSLemover
191*6d5ddbceSLemover    (hit, hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)))
192*6d5ddbceSLemover  }
193*6d5ddbceSLemover
194*6d5ddbceSLemover  // l3
195*6d5ddbceSLemover  val ptwl3replace = ReplacementPolicy.fromString(ptwl3Replacer,PtwL3WayNum,PtwL3LineNum)
196*6d5ddbceSLemover  val (l3Hit, l3HitData) = {
197*6d5ddbceSLemover    val ridx = genPtwL3SetIdx(first_req.vpn)
198*6d5ddbceSLemover    val vidx = RegEnable(VecInit(getl3vSet(first_req.vpn).asBools), first_fire)
199*6d5ddbceSLemover    l3.io.r.req.valid := first_fire
200*6d5ddbceSLemover    l3.io.r.req.bits.apply(setIdx = ridx)
201*6d5ddbceSLemover    val ramDatas = l3.io.r.resp.data
202*6d5ddbceSLemover    val hitVec = VecInit(ramDatas.zip(vidx).map{ case (wayData, v) => wayData.hit(second_req.vpn) && v })
203*6d5ddbceSLemover    val hitWayData = ParallelPriorityMux(hitVec zip ramDatas)
204*6d5ddbceSLemover    val hit = ParallelOR(hitVec) && second_valid
205*6d5ddbceSLemover    val hitWay = ParallelPriorityMux(hitVec zip (0 until PtwL3WayNum).map(_.U))
206*6d5ddbceSLemover
207*6d5ddbceSLemover    when (hit) { ptwl3replace.access(genPtwL3SetIdx(second_req.vpn), hitWay) }
208*6d5ddbceSLemover
209*6d5ddbceSLemover    l3AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) }
210*6d5ddbceSLemover    XSDebug(first_fire, p"[l3] ridx:0x${Hexadecimal(ridx)}\n")
211*6d5ddbceSLemover    for (i <- 0 until PtwL3WayNum) {
212*6d5ddbceSLemover      XSDebug(RegNext(first_fire), p"[l3] ramDatas(${i.U}) ${ramDatas(i)}  l3v:${vidx(i)}  hit:${ramDatas(i).hit(second_req.vpn)}\n")
213*6d5ddbceSLemover    }
214*6d5ddbceSLemover    XSDebug(second_valid, p"[l3] l3Hit:${hit} l3HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n")
215*6d5ddbceSLemover
216*6d5ddbceSLemover    ridx.suggestName(s"l3_ridx")
217*6d5ddbceSLemover    vidx.suggestName(s"l3_vidx")
218*6d5ddbceSLemover    ramDatas.suggestName(s"l3_ramDatas")
219*6d5ddbceSLemover    hitVec.suggestName(s"l3_hitVec")
220*6d5ddbceSLemover    hitWay.suggestName(s"l3_hitWay")
221*6d5ddbceSLemover
222*6d5ddbceSLemover    (hit, hitWayData)
223*6d5ddbceSLemover  }
224*6d5ddbceSLemover  val l3HitPPN = l3HitData.ppns(genPtwL3SectorIdx(second_req.vpn))
225*6d5ddbceSLemover  val l3HitPerm = l3HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL3SectorSize, new PtePermBundle)))(genPtwL3SectorIdx(second_req.vpn))
226*6d5ddbceSLemover
227*6d5ddbceSLemover  // super page
228*6d5ddbceSLemover  val spreplace = ReplacementPolicy.fromString(spReplacer, PtwSPEntrySize)
229*6d5ddbceSLemover  val (spHit, spHitData) = {
230*6d5ddbceSLemover    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && spv(i) }
231*6d5ddbceSLemover    val hitVec = hitVecT.map(RegEnable(_, first_fire))
232*6d5ddbceSLemover    val hitData = ParallelPriorityMux(hitVec zip sp)
233*6d5ddbceSLemover    val hit = ParallelOR(hitVec) && second_valid
234*6d5ddbceSLemover
235*6d5ddbceSLemover    when (hit) { spreplace.access(OHToUInt(hitVec)) }
236*6d5ddbceSLemover
237*6d5ddbceSLemover    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && RegNext(first_fire) }
238*6d5ddbceSLemover    for (i <- 0 until PtwSPEntrySize) {
239*6d5ddbceSLemover      XSDebug(first_fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(first_req.vpn)} spv:${spv(i)}\n")
240*6d5ddbceSLemover    }
241*6d5ddbceSLemover    XSDebug(second_valid, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
242*6d5ddbceSLemover
243*6d5ddbceSLemover    VecInit(hitVecT).suggestName(s"sp_hitVecT")
244*6d5ddbceSLemover    VecInit(hitVec).suggestName(s"sp_hitVec")
245*6d5ddbceSLemover
246*6d5ddbceSLemover    (hit, hitData)
247*6d5ddbceSLemover  }
248*6d5ddbceSLemover  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
249*6d5ddbceSLemover  val spHitLevel = spHitData.level.getOrElse(0.U)
250*6d5ddbceSLemover
251*6d5ddbceSLemover  val resp = Wire(io.resp.bits.cloneType)
252*6d5ddbceSLemover  val resp_latch = RegEnable(resp, io.resp.valid && !io.resp.ready)
253*6d5ddbceSLemover  val resp_latch_valid = ValidHold(io.resp.valid && !io.resp.ready, io.resp.ready, sfence.valid)
254*6d5ddbceSLemover  second_ready := !(second_valid || resp_latch_valid) || io.resp.fire()
255*6d5ddbceSLemover  resp.source   := second_req.source
256*6d5ddbceSLemover  resp.vpn      := second_req.vpn
257*6d5ddbceSLemover  resp.isReplay := second_req.isReplay
258*6d5ddbceSLemover  resp.hit      := l3Hit || spHit
259*6d5ddbceSLemover  resp.toFsm.l1Hit := l1Hit
260*6d5ddbceSLemover  resp.toFsm.l2Hit := l2Hit
261*6d5ddbceSLemover  resp.toFsm.ppn   := Mux(l2Hit, l2HitPPN, l1HitPPN)
262*6d5ddbceSLemover  resp.toTlb.tag   := second_req.vpn
263*6d5ddbceSLemover  resp.toTlb.ppn   := Mux(l3Hit, l3HitPPN, spHitData.ppn)
264*6d5ddbceSLemover  resp.toTlb.perm.map(_ := Mux(l3Hit, l3HitPerm, spHitPerm))
265*6d5ddbceSLemover  resp.toTlb.level.map(_ := Mux(l3Hit, 2.U, spHitLevel))
266*6d5ddbceSLemover
267*6d5ddbceSLemover  io.resp.valid := second_valid
268*6d5ddbceSLemover  io.resp.bits := Mux(resp_latch_valid, resp_latch, resp)
269*6d5ddbceSLemover  assert(!(l3Hit && spHit), "normal page and super page both hit")
270*6d5ddbceSLemover
271*6d5ddbceSLemover  // refill Perf
272*6d5ddbceSLemover  val l1RefillPerf = Wire(Vec(PtwL1EntrySize, Bool()))
273*6d5ddbceSLemover  val l2RefillPerf = Wire(Vec(PtwL2WayNum, Bool()))
274*6d5ddbceSLemover  val l3RefillPerf = Wire(Vec(PtwL3WayNum, Bool()))
275*6d5ddbceSLemover  val spRefillPerf = Wire(Vec(PtwSPEntrySize, Bool()))
276*6d5ddbceSLemover  l1RefillPerf.map(_ := false.B)
277*6d5ddbceSLemover  l2RefillPerf.map(_ := false.B)
278*6d5ddbceSLemover  l3RefillPerf.map(_ := false.B)
279*6d5ddbceSLemover  spRefillPerf.map(_ := false.B)
280*6d5ddbceSLemover
281*6d5ddbceSLemover  // refill
282*6d5ddbceSLemover  l2.io.w.req <> DontCare
283*6d5ddbceSLemover  l3.io.w.req <> DontCare
284*6d5ddbceSLemover  l2.io.w.req.valid := false.B
285*6d5ddbceSLemover  l3.io.w.req.valid := false.B
286*6d5ddbceSLemover
287*6d5ddbceSLemover  val memRdata = refill.ptes
288*6d5ddbceSLemover  val memSelData = memRdata.asTypeOf(Vec(MemBandWidth/XLEN, UInt(XLEN.W)))(refill.memAddr(log2Up(l1BusDataWidth/8) - 1, log2Up(XLEN/8)))
289*6d5ddbceSLemover  val memPtes = (0 until PtwL3SectorSize).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
290*6d5ddbceSLemover  val memPte = memSelData.asTypeOf(new PteBundle)
291*6d5ddbceSLemover
292*6d5ddbceSLemover  // TODO: handle sfenceLatch outsize
293*6d5ddbceSLemover  when (io.refill.valid && !memPte.isPf(refill.level) && !(sfence.valid || refuseRefill)) {
294*6d5ddbceSLemover    when (refill.level === 0.U && !memPte.isLeaf()) {
295*6d5ddbceSLemover      // val refillIdx = LFSR64()(log2Up(PtwL1EntrySize)-1,0) // TODO: may be LRU
296*6d5ddbceSLemover      val refillIdx = replaceWrapper(l1v, ptwl1replace.way)
297*6d5ddbceSLemover      refillIdx.suggestName(s"PtwL1RefillIdx")
298*6d5ddbceSLemover      val rfOH = UIntToOH(refillIdx)
299*6d5ddbceSLemover      l1(refillIdx).refill(refill.vpn, memSelData)
300*6d5ddbceSLemover      ptwl1replace.access(refillIdx)
301*6d5ddbceSLemover      l1v := l1v | rfOH
302*6d5ddbceSLemover      l1g := (l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U)
303*6d5ddbceSLemover
304*6d5ddbceSLemover      for (i <- 0 until PtwL1EntrySize) {
305*6d5ddbceSLemover        l1RefillPerf(i) := i.U === refillIdx
306*6d5ddbceSLemover      }
307*6d5ddbceSLemover
308*6d5ddbceSLemover      XSDebug(p"[l1 refill] refillIdx:${refillIdx} refillEntry:${l1(refillIdx).genPtwEntry(refill.vpn, memSelData)}\n")
309*6d5ddbceSLemover      XSDebug(p"[l1 refill] l1v:${Binary(l1v)}->${Binary(l1v | rfOH)} l1g:${Binary(l1g)}->${Binary((l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U))}\n")
310*6d5ddbceSLemover
311*6d5ddbceSLemover      refillIdx.suggestName(s"l1_refillIdx")
312*6d5ddbceSLemover      rfOH.suggestName(s"l1_rfOH")
313*6d5ddbceSLemover    }
314*6d5ddbceSLemover
315*6d5ddbceSLemover    when (refill.level === 1.U && !memPte.isLeaf()) {
316*6d5ddbceSLemover      val refillIdx = genPtwL2SetIdx(refill.vpn)
317*6d5ddbceSLemover      val victimWay = replaceWrapper(RegEnable(VecInit(getl2vSet(refill.vpn).asBools).asUInt, first_fire), ptwl2replace.way(refillIdx))
318*6d5ddbceSLemover      val victimWayOH = UIntToOH(victimWay)
319*6d5ddbceSLemover      val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
320*6d5ddbceSLemover      l2.io.w.apply(
321*6d5ddbceSLemover        valid = true.B,
322*6d5ddbceSLemover        setIdx = refillIdx,
323*6d5ddbceSLemover        data = (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries(
324*6d5ddbceSLemover          vpn = refill.vpn, data = memRdata, levelUInt = 1.U
325*6d5ddbceSLemover        ),
326*6d5ddbceSLemover        waymask = victimWayOH
327*6d5ddbceSLemover      )
328*6d5ddbceSLemover      ptwl2replace.access(refillIdx, victimWay)
329*6d5ddbceSLemover      l2v := l2v | rfvOH
330*6d5ddbceSLemover      l2g := l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
331*6d5ddbceSLemover
332*6d5ddbceSLemover      for (i <- 0 until PtwL2WayNum) {
333*6d5ddbceSLemover        l2RefillPerf(i) := i.U === victimWay
334*6d5ddbceSLemover      }
335*6d5ddbceSLemover
336*6d5ddbceSLemover      XSDebug(p"[l2 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
337*6d5ddbceSLemover      XSDebug(p"[l2 refill] refilldata:0x${
338*6d5ddbceSLemover        (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries(
339*6d5ddbceSLemover          vpn = refill.vpn, data = memRdata, levelUInt = 1.U)
340*6d5ddbceSLemover      }\n")
341*6d5ddbceSLemover      XSDebug(p"[l2 refill] l2v:${Binary(l2v)} -> ${Binary(l2v | rfvOH)}\n")
342*6d5ddbceSLemover      XSDebug(p"[l2 refill] l2g:${Binary(l2g)} -> ${Binary(l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
343*6d5ddbceSLemover
344*6d5ddbceSLemover      refillIdx.suggestName(s"l2_refillIdx")
345*6d5ddbceSLemover      victimWay.suggestName(s"l2_victimWay")
346*6d5ddbceSLemover      victimWayOH.suggestName(s"l2_victimWayOH")
347*6d5ddbceSLemover      rfvOH.suggestName(s"l2_rfvOH")
348*6d5ddbceSLemover    }
349*6d5ddbceSLemover
350*6d5ddbceSLemover    when (refill.level === 2.U && memPte.isLeaf()) {
351*6d5ddbceSLemover      val refillIdx = genPtwL3SetIdx(refill.vpn)
352*6d5ddbceSLemover      val victimWay = replaceWrapper(RegEnable(VecInit(getl3vSet(refill.vpn).asBools).asUInt, first_fire), ptwl3replace.way(refillIdx))
353*6d5ddbceSLemover      val victimWayOH = UIntToOH(victimWay)
354*6d5ddbceSLemover      val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
355*6d5ddbceSLemover      l3.io.w.apply(
356*6d5ddbceSLemover        valid = true.B,
357*6d5ddbceSLemover        setIdx = refillIdx,
358*6d5ddbceSLemover        data = (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries(
359*6d5ddbceSLemover          vpn = refill.vpn, data = memRdata, levelUInt = 2.U
360*6d5ddbceSLemover        ),
361*6d5ddbceSLemover        waymask = victimWayOH
362*6d5ddbceSLemover      )
363*6d5ddbceSLemover      ptwl3replace.access(refillIdx, victimWay)
364*6d5ddbceSLemover      l3v := l3v | rfvOH
365*6d5ddbceSLemover      l3g := l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
366*6d5ddbceSLemover
367*6d5ddbceSLemover        for (i <- 0 until PtwL3WayNum) {
368*6d5ddbceSLemover          l3RefillPerf(i) := i.U === victimWay
369*6d5ddbceSLemover        }
370*6d5ddbceSLemover
371*6d5ddbceSLemover      XSDebug(p"[l3 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
372*6d5ddbceSLemover      XSDebug(p"[l3 refill] refilldata:0x${
373*6d5ddbceSLemover        (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries(
374*6d5ddbceSLemover          vpn = refill.vpn, data = memRdata, levelUInt = 2.U)
375*6d5ddbceSLemover      }\n")
376*6d5ddbceSLemover      XSDebug(p"[l3 refill] l3v:${Binary(l3v)} -> ${Binary(l3v | rfvOH)}\n")
377*6d5ddbceSLemover      XSDebug(p"[l3 refill] l3g:${Binary(l3g)} -> ${Binary(l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
378*6d5ddbceSLemover
379*6d5ddbceSLemover      refillIdx.suggestName(s"l3_refillIdx")
380*6d5ddbceSLemover      victimWay.suggestName(s"l3_victimWay")
381*6d5ddbceSLemover      victimWayOH.suggestName(s"l3_victimWayOH")
382*6d5ddbceSLemover      rfvOH.suggestName(s"l3_rfvOH")
383*6d5ddbceSLemover    }
384*6d5ddbceSLemover
385*6d5ddbceSLemover    when ((refill.level === 0.U || refill.level === 1.U) && memPte.isLeaf()) {
386*6d5ddbceSLemover      val refillIdx = spreplace.way// LFSR64()(log2Up(PtwSPEntrySize)-1,0) // TODO: may be LRU
387*6d5ddbceSLemover      val rfOH = UIntToOH(refillIdx)
388*6d5ddbceSLemover      sp(refillIdx).refill(refill.vpn, memSelData, refill.level)
389*6d5ddbceSLemover      spreplace.access(refillIdx)
390*6d5ddbceSLemover      spv := spv | rfOH
391*6d5ddbceSLemover      spg := spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U)
392*6d5ddbceSLemover
393*6d5ddbceSLemover      for (i <- 0 until PtwSPEntrySize) {
394*6d5ddbceSLemover        spRefillPerf(i) := i.U === refillIdx
395*6d5ddbceSLemover      }
396*6d5ddbceSLemover
397*6d5ddbceSLemover      XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.vpn, memSelData, refill.level)}\n")
398*6d5ddbceSLemover      XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U))}\n")
399*6d5ddbceSLemover
400*6d5ddbceSLemover      refillIdx.suggestName(s"sp_refillIdx")
401*6d5ddbceSLemover      rfOH.suggestName(s"sp_rfOH")
402*6d5ddbceSLemover    }
403*6d5ddbceSLemover  }
404*6d5ddbceSLemover
405*6d5ddbceSLemover  // sfence
406*6d5ddbceSLemover  when (sfence.valid) {
407*6d5ddbceSLemover    when (sfence.bits.rs1/*va*/) {
408*6d5ddbceSLemover      when (sfence.bits.rs2) {
409*6d5ddbceSLemover        // all va && all asid
410*6d5ddbceSLemover        l1v := 0.U
411*6d5ddbceSLemover        l2v := 0.U
412*6d5ddbceSLemover        l3v := 0.U
413*6d5ddbceSLemover        spv := 0.U
414*6d5ddbceSLemover      } .otherwise {
415*6d5ddbceSLemover        // all va && specific asid except global
416*6d5ddbceSLemover        l1v := l1v & l1g
417*6d5ddbceSLemover        l2v := l2v & l2g
418*6d5ddbceSLemover        l3v := l3v & l3g
419*6d5ddbceSLemover        spv := spv & spg
420*6d5ddbceSLemover      }
421*6d5ddbceSLemover    } .otherwise {
422*6d5ddbceSLemover      // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
423*6d5ddbceSLemover      val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
424*6d5ddbceSLemover      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(PtwL3WayNum, _.asUInt))).asUInt
425*6d5ddbceSLemover      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(PtwL3WayNum, a.asUInt) }).asUInt
426*6d5ddbceSLemover      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
427*6d5ddbceSLemover      flushMask.suggestName(s"sfence_nrs1_flushMask")
428*6d5ddbceSLemover      when (sfence.bits.rs2) {
429*6d5ddbceSLemover        // specific leaf of addr && all asid
430*6d5ddbceSLemover        l3v := l3v & ~flushMask
431*6d5ddbceSLemover        l3g := l3g & ~flushMask
432*6d5ddbceSLemover      } .otherwise {
433*6d5ddbceSLemover        // specific leaf of addr && specific asid
434*6d5ddbceSLemover        l3v := l3v & (~flushMask | l3g)
435*6d5ddbceSLemover      }
436*6d5ddbceSLemover      spv := 0.U
437*6d5ddbceSLemover    }
438*6d5ddbceSLemover  }
439*6d5ddbceSLemover
440*6d5ddbceSLemover  // Perf Count
441*6d5ddbceSLemover  XSPerfAccumulate("access", second_valid)
442*6d5ddbceSLemover  XSPerfAccumulate("l1_hit", l1Hit)
443*6d5ddbceSLemover  XSPerfAccumulate("l2_hit", l2Hit)
444*6d5ddbceSLemover  XSPerfAccumulate("l3_hit", l3Hit)
445*6d5ddbceSLemover  XSPerfAccumulate("sp_hit", spHit)
446*6d5ddbceSLemover  XSPerfAccumulate("pte_hit", l3Hit || spHit)
447*6d5ddbceSLemover  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
448*6d5ddbceSLemover  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
449*6d5ddbceSLemover  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1AccessIndex${i}", l) }
450*6d5ddbceSLemover  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2AccessIndex${i}", l) }
451*6d5ddbceSLemover  l3AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3AccessIndex${i}", l) }
452*6d5ddbceSLemover  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
453*6d5ddbceSLemover  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1RefillIndex${i}", l) }
454*6d5ddbceSLemover  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2RefillIndex${i}", l) }
455*6d5ddbceSLemover  l3RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3RefillIndex${i}", l) }
456*6d5ddbceSLemover  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
457*6d5ddbceSLemover
458*6d5ddbceSLemover  // debug
459*6d5ddbceSLemover  XSDebug(sfence.valid, p"[sfence] original v and g vector:\n")
460*6d5ddbceSLemover  XSDebug(sfence.valid, p"[sfence] l1v:${Binary(l1v)}\n")
461*6d5ddbceSLemover  XSDebug(sfence.valid, p"[sfence] l2v:${Binary(l2v)}\n")
462*6d5ddbceSLemover  XSDebug(sfence.valid, p"[sfence] l3v:${Binary(l3v)}\n")
463*6d5ddbceSLemover  XSDebug(sfence.valid, p"[sfence] l3g:${Binary(l3g)}\n")
464*6d5ddbceSLemover  XSDebug(sfence.valid, p"[sfence] spv:${Binary(spv)}\n")
465*6d5ddbceSLemover  XSDebug(RegNext(sfence.valid), p"[sfence] new v and g vector:\n")
466*6d5ddbceSLemover  XSDebug(RegNext(sfence.valid), p"[sfence] l1v:${Binary(l1v)}\n")
467*6d5ddbceSLemover  XSDebug(RegNext(sfence.valid), p"[sfence] l2v:${Binary(l2v)}\n")
468*6d5ddbceSLemover  XSDebug(RegNext(sfence.valid), p"[sfence] l3v:${Binary(l3v)}\n")
469*6d5ddbceSLemover  XSDebug(RegNext(sfence.valid), p"[sfence] l3g:${Binary(l3g)}\n")
470*6d5ddbceSLemover  XSDebug(RegNext(sfence.valid), p"[sfence] spv:${Binary(spv)}\n")
471*6d5ddbceSLemover}