1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences 4* Copyright (c) 2020-2021 Peng Cheng Laboratory 5* 6* XiangShan is licensed under Mulan PSL v2. 7* You can use this software according to the terms and conditions of the Mulan PSL v2. 8* You may obtain a copy of Mulan PSL v2 at: 9* http://license.coscl.org.cn/MulanPSL2 10* 11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 14* 15* See the Mulan PSL v2 for more details. 16***************************************************************************************/ 17 18package xiangshan.cache.mmu 19 20import org.chipsalliance.cde.config.Parameters 21import chisel3._ 22import chisel3.util._ 23import xiangshan._ 24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 25import utils._ 26import utility._ 27import coupledL2.utils.SplittedSRAM 28import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 29import freechips.rocketchip.tilelink._ 30 31/* ptw cache caches the page table of all the three layers 32 * ptw cache resp at next cycle 33 * the cache should not be blocked 34 * when miss queue if full, just block req outside 35 */ 36 37class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle { 38 val hit = Bool() 39 val pre = Bool() 40 val ppn = UInt(gvpnLen.W) 41 val pbmt = UInt(ptePbmtLen.W) 42 val perm = new PtePermBundle() 43 val ecc = Bool() 44 val level = UInt(2.W) 45 val v = Bool() 46 47 def apply(hit: Bool, pre: Bool, ppn: UInt, pbmt: UInt = 0.U, 48 perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()), 49 ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B): Unit = { 50 this.hit := hit && !ecc 51 this.pre := pre 52 this.ppn := ppn 53 this.pbmt := pbmt 54 this.perm := perm 55 this.ecc := ecc && hit 56 this.level := level 57 this.v := valid 58 } 59} 60 61class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle { 62 assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!") 63 val hit = Bool() 64 val pre = Bool() 65 val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W)) 66 val pbmt = Vec(tlbcontiguous, UInt(ptePbmtLen.W)) 67 val perm = Vec(tlbcontiguous, new PtePermBundle()) 68 val ecc = Bool() 69 val level = UInt(2.W) 70 val v = Vec(tlbcontiguous, Bool()) 71 72 def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], pbmt: Vec[UInt] = Vec(tlbcontiguous, 0.U), 73 perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())), 74 ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B)): Unit = { 75 this.hit := hit && !ecc 76 this.pre := pre 77 this.ppn := ppn 78 this.pbmt := pbmt 79 this.perm := perm 80 this.ecc := ecc && hit 81 this.level := level 82 this.v := valid 83 } 84} 85 86class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle { 87 val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None 88 val l2 = new PageCachePerPespBundle 89 val l1 = new PageCachePerPespBundle 90 val l0 = new PageCacheMergePespBundle 91 val sp = new PageCachePerPespBundle 92} 93 94class PtwCacheReq(implicit p: Parameters) extends PtwBundle { 95 val req_info = new L2TlbInnerBundle() 96 val isFirst = Bool() 97 val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool()) 98 val isHptwReq = Bool() 99 val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W) 100} 101 102class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 103 val req = Flipped(DecoupledIO(new PtwCacheReq())) 104 val resp = DecoupledIO(new Bundle { 105 val req_info = new L2TlbInnerBundle() 106 val isFirst = Bool() 107 val hit = Bool() 108 val prefetch = Bool() // is the entry fetched by prefetch 109 val bypassed = Bool() 110 val toFsm = new Bundle { 111 val l3Hit = if (EnableSv48) Some(Bool()) else None 112 val l2Hit = Bool() 113 val l1Hit = Bool() 114 val ppn = UInt(gvpnLen.W) 115 val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW 116 } 117 val stage1 = new PtwMergeResp() 118 val isHptwReq = Bool() 119 val toHptw = new Bundle { 120 val l3Hit = if (EnableSv48) Some(Bool()) else None 121 val l2Hit = Bool() 122 val l1Hit = Bool() 123 val ppn = UInt(ppnLen.W) 124 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 125 val resp = new HptwResp() // used if hit 126 val bypassed = Bool() 127 } 128 }) 129 val refill = Flipped(ValidIO(new Bundle { 130 val ptes = UInt(blockBits.W) 131 val levelOH = new Bundle { 132 // NOTE: levelOH has (Level+1) bits, each stands for page cache entries 133 val sp = Bool() 134 val l0 = Bool() 135 val l1 = Bool() 136 val l2 = Bool() 137 val l3 = if (EnableSv48) Some(Bool()) else None 138 def apply(levelUInt: UInt, valid: Bool) = { 139 sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B) 140 l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B) 141 l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B) 142 l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B) 143 l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B)) 144 } 145 } 146 // duplicate level and sel_pte for each page caches, for better fanout 147 val req_info_dup = Vec(3, new L2TlbInnerBundle()) 148 val level_dup = Vec(3, UInt(log2Up(Level + 1).W)) 149 val sel_pte_dup = Vec(3, UInt(XLEN.W)) 150 })) 151 val sfence_dup = Vec(4, Input(new SfenceBundle())) 152 val csr_dup = Vec(3, Input(new TlbCsrBundle())) 153} 154 155class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents { 156 val io = IO(new PtwCacheIO) 157 val ecc = Code.fromString(l2tlbParams.ecc) 158 val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false, ReservedBits = l2tlbParams.l1ReservedBits) 159 val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true, ReservedBits = l2tlbParams.l0ReservedBits) 160 161 // TODO: four caches make the codes dirty, think about how to deal with it 162 163 val sfence_dup = io.sfence_dup 164 val refill = io.refill.bits 165 val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source)) 166 val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate)) 167 val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed) 168 val flush = flush_dup(0) 169 170 // when refill, refuce to accept new req 171 val rwHarzad = if (sramSinglePort) io.refill.valid else false.B 172 173 // handle hand signal and req_info 174 // TODO: replace with FlushableQueue 175 val stageReq = Wire(Decoupled(new PtwCacheReq())) // enq stage & read page cache valid 176 val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp 177 val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc 178 val stageResp = Wire(Decoupled(new PtwCacheReq())) // deq stage 179 180 val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush) // catch ram data 181 val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter 182 val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool())) 183 stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush)) // ecc flush 184 185 stageReq <> io.req 186 PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad) 187 InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle) 188 PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush) 189 InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle) 190 PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush) 191 stageResp.ready := !stageResp.valid || io.resp.ready 192 193 // l3: level 3 non-leaf pte 194 val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None 195 val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None 196 val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None 197 val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None 198 val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None 199 val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None 200 201 // l2: level 2 non-leaf pte 202 val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen))) 203 val l2v = RegInit(0.U(l2tlbParams.l2Size.W)) 204 val l2g = Reg(UInt(l2tlbParams.l2Size.W)) 205 val l2asids = l2.map(_.asid) 206 val l2vmids = l2.map(_.vmid) 207 val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W))) 208 209 // l1: level 1 non-leaf pte 210 val l1 = Module(new SplittedSRAM( 211 l1EntryType, 212 set = l2tlbParams.l1nSets, 213 way = l2tlbParams.l1nWays, 214 waySplit = 1, 215 dataSplit = 4, 216 singlePort = sramSinglePort, 217 readMCP2 = false 218 )) 219 val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W)) 220 val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W)) 221 val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W)))) 222 def getl1vSet(vpn: UInt) = { 223 require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays)) 224 val set = genPtwL1SetIdx(vpn) 225 require(set.getWidth == log2Up(l2tlbParams.l1nSets)) 226 val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W))) 227 l1vVec(set) 228 } 229 def getl1hSet(vpn: UInt) = { 230 require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays)) 231 val set = genPtwL1SetIdx(vpn) 232 require(set.getWidth == log2Up(l2tlbParams.l1nSets)) 233 l1h(set) 234 } 235 236 // l0: level 0 leaf pte of 4KB pages 237 val l0 = Module(new SplittedSRAM( 238 l0EntryType, 239 set = l2tlbParams.l0nSets, 240 way = l2tlbParams.l0nWays, 241 waySplit = 2, 242 dataSplit = 4, 243 singlePort = sramSinglePort, 244 readMCP2 = false 245 )) 246 val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W)) 247 val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W)) 248 val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W)))) 249 def getl0vSet(vpn: UInt) = { 250 require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays)) 251 val set = genPtwL0SetIdx(vpn) 252 require(set.getWidth == log2Up(l2tlbParams.l0nSets)) 253 val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W))) 254 l0vVec(set) 255 } 256 def getl0hSet(vpn: UInt) = { 257 require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays)) 258 val set = genPtwL0SetIdx(vpn) 259 require(set.getWidth == log2Up(l2tlbParams.l0nSets)) 260 l0h(set) 261 } 262 263 // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages 264 val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true))) 265 val spv = RegInit(0.U(l2tlbParams.spSize.W)) 266 val spg = Reg(UInt(l2tlbParams.spSize.W)) 267 val spasids = sp.map(_.asid) 268 val spvmids = sp.map(_.vmid) 269 val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W))) 270 271 // Access Perf 272 val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None 273 val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool())) 274 val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool())) 275 val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool())) 276 val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 277 if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B)) 278 l2AccessPerf.map(_ := false.B) 279 l1AccessPerf.map(_ := false.B) 280 l0AccessPerf.map(_ := false.B) 281 spAccessPerf.map(_ := false.B) 282 283 284 285 def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = { 286 (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3)) 287 } 288 // NOTE: not actually bypassed, just check if hit, re-access the page cache 289 def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = { 290 val change_h = MuxLookup(h_search, noS2xlate)(Seq( 291 allStage -> onlyStage1, 292 onlyStage1 -> onlyStage1, 293 onlyStage2 -> onlyStage2 294 )) 295 val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq( 296 allStage -> onlyStage1, 297 onlyStage1 -> onlyStage1, 298 onlyStage2 -> onlyStage2 299 )) 300 val refill_vpn = io.refill.bits.req_info_dup(0).vpn 301 io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h 302 } 303 304 val vpn_search = stageReq.bits.req_info.vpn 305 val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq( 306 allStage -> onlyStage1, 307 onlyStage1 -> onlyStage1, 308 onlyStage2 -> onlyStage2 309 )) 310 311 // l3 312 val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None 313 val l3HitPPN = if(EnableSv48) Some(Wire(UInt(ppnLen.W))) else None 314 val l3HitPbmt = if(EnableSv48) Some(Wire(UInt(ptePbmtLen.W))) else None 315 val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None 316 val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None 317 if (EnableSv48) { 318 val hitVecT = l3.get.zipWithIndex.map { 319 case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate) 320 && l3v.get(i) && h_search === l3h.get(i)) 321 } 322 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 323 324 // stageDelay, but check for l3 325 val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle) 326 val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.pbmt)), stageDelay_valid_1cycle) 327 val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle) 328 val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle) 329 330 when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) } 331 332 l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle} 333 for (i <- 0 until l2tlbParams.l3Size) { 334 XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n") 335 } 336 XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 337 XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 338 339 VecInit(hitVecT).suggestName(s"l3_hitVecT") 340 VecInit(hitVec).suggestName(s"l3_hitVec") 341 342 // synchronize with other entries with RegEnable 343 l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire)) 344 l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire)) 345 l3HitPbmt.map(_ := RegEnable(hitPbmt, stageDelay(1).fire)) 346 l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire)) 347 } 348 349 // l2 350 val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size) 351 val (l2Hit, l2HitPPN, l2HitPbmt, l2Pre) = { 352 val hitVecT = l2.zipWithIndex.map { 353 case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate) 354 && l2v(i) && h_search === l2h(i)) 355 } 356 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 357 358 // stageDelay, but check for l2 359 val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle) 360 val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.pbmt)), stageDelay_valid_1cycle) 361 val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle) 362 val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle) 363 364 when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) } 365 366 l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle} 367 for (i <- 0 until l2tlbParams.l2Size) { 368 XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n") 369 } 370 XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 371 XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 372 373 VecInit(hitVecT).suggestName(s"l2_hitVecT") 374 VecInit(hitVec).suggestName(s"l2_hitVec") 375 376 // synchronize with other entries with RegEnable 377 (RegEnable(hit, stageDelay(1).fire), 378 RegEnable(hitPPN, stageDelay(1).fire), 379 RegEnable(hitPbmt, stageDelay(1).fire), 380 RegEnable(hitPre, stageDelay(1).fire)) 381 } 382 383 // l1 384 val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets) 385 val (l1Hit, l1HitPPN, l1HitPbmt, l1Pre, l1eccError) = { 386 val ridx = genPtwL1SetIdx(vpn_search) 387 l1.io.r.req.valid := stageReq.fire 388 l1.io.r.req.bits.apply(setIdx = ridx) 389 val vVec_req = getl1vSet(vpn_search) 390 val hVec_req = getl1hSet(vpn_search) 391 392 // delay one cycle after sram read 393 val delay_vpn = stageDelay(0).bits.req_info.vpn 394 val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq( 395 allStage -> onlyStage1, 396 onlyStage1 -> onlyStage1, 397 onlyStage2 -> onlyStage2 398 )) 399 val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle) 400 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 401 val hVec_delay = RegEnable(hVec_req, stageReq.fire) 402 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) => 403 wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)}) 404 405 // check hit and ecc 406 val check_vpn = stageCheck(0).bits.req_info.vpn 407 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 408 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools 409 410 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 411 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 412 val hitWayData = hitWayEntry.entries 413 val hit = ParallelOR(hitVec) 414 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W))) 415 val eccError = WireInit(false.B) 416 if (l2tlbParams.enablePTWECC) { 417 eccError := hitWayEntry.decode() 418 } else { 419 eccError := false.B 420 } 421 422 ridx.suggestName(s"l1_ridx") 423 ramDatas.suggestName(s"l1_ramDatas") 424 hitVec.suggestName(s"l1_hitVec") 425 hitWayData.suggestName(s"l1_hitWayData") 426 hitWay.suggestName(s"l1_hitWay") 427 428 when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) } 429 430 l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 431 XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n") 432 for (i <- 0 until l2tlbParams.l1nWays) { 433 XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)} l1v:${vVec(i)} hit:${hit}\n") 434 } 435 XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n") 436 437 (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.pbmts(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError) 438 } 439 440 val l0_masked_clock = ClockGate(false.B, stageReq.fire | (!flush_dup(0) && refill.levelOH.l0), clock) 441 val l1_masked_clock = ClockGate(false.B, stageReq.fire | (!flush_dup(1) && refill.levelOH.l1), clock) 442 l0.clock := l0_masked_clock 443 l1.clock := l1_masked_clock 444 // l0 445 val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets) 446 val (l0Hit, l0HitData, l0Pre, l0eccError) = { 447 val ridx = genPtwL0SetIdx(vpn_search) 448 l0.io.r.req.valid := stageReq.fire 449 l0.io.r.req.bits.apply(setIdx = ridx) 450 val vVec_req = getl0vSet(vpn_search) 451 val hVec_req = getl0hSet(vpn_search) 452 453 // delay one cycle after sram read 454 val delay_vpn = stageDelay(0).bits.req_info.vpn 455 val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq( 456 allStage -> onlyStage1, 457 onlyStage1 -> onlyStage1, 458 onlyStage2 -> onlyStage2 459 )) 460 val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle) 461 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 462 val hVec_delay = RegEnable(hVec_req, stageReq.fire) 463 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) => 464 wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)}) 465 466 // check hit and ecc 467 val check_vpn = stageCheck(0).bits.req_info.vpn 468 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 469 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools 470 471 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 472 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 473 val hitWayData = hitWayEntry.entries 474 val hitWayEcc = hitWayEntry.ecc 475 val hit = ParallelOR(hitVec) 476 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W))) 477 val eccError = WireInit(false.B) 478 if (l2tlbParams.enablePTWECC) { 479 eccError := hitWayEntry.decode() 480 } else { 481 eccError := false.B 482 } 483 484 when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) } 485 486 l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 487 XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n") 488 for (i <- 0 until l2tlbParams.l0nWays) { 489 XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)} l0v:${vVec(i)} hit:${hitVec(i)}\n") 490 } 491 XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n") 492 493 ridx.suggestName(s"l0_ridx") 494 ramDatas.suggestName(s"l0_ramDatas") 495 hitVec.suggestName(s"l0_hitVec") 496 hitWay.suggestName(s"l0_hitWay") 497 498 (hit, hitWayData, hitWayData.prefetch, eccError) 499 } 500 val l0HitPPN = l0HitData.ppns 501 val l0HitPbmt = l0HitData.pbmts 502 val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle))) 503 val l0HitValid = VecInit(l0HitData.onlypf.map(!_)) 504 505 // super page 506 val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize) 507 val (spHit, spHitData, spPre, spValid) = { 508 val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) } 509 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 510 val hitData = ParallelPriorityMux(hitVec zip sp) 511 val hit = ParallelOR(hitVec) 512 513 when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) } 514 515 spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle } 516 for (i <- 0 until l2tlbParams.spSize) { 517 XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n") 518 } 519 XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n") 520 521 VecInit(hitVecT).suggestName(s"sp_hitVecT") 522 VecInit(hitVec).suggestName(s"sp_hitVec") 523 524 (RegEnable(hit, stageDelay(1).fire), 525 RegEnable(hitData, stageDelay(1).fire), 526 RegEnable(hitData.prefetch, stageDelay(1).fire), 527 RegEnable(hitData.v, stageDelay(1).fire)) 528 } 529 val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle)) 530 val spHitLevel = spHitData.level.getOrElse(0.U) 531 532 val check_res = Wire(new PageCacheRespBundle) 533 check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get)) 534 check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, l2HitPbmt) 535 check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, l1HitPbmt, ecc = l1eccError) 536 check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPbmt, l0HitPerm, l0eccError, valid = l0HitValid) 537 check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitData.pbmt, spHitPerm, false.B, spHitLevel, spValid) 538 539 val resp_res = Reg(new PageCacheRespBundle) 540 when (stageCheck(1).fire) { resp_res := check_res } 541 542 // stageResp bypass 543 val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool())) 544 bypassed.indices.foreach(i => 545 bypassed(i) := stageResp.bits.bypassed(i) || 546 ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate), 547 OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid) 548 ) 549 550 // stageResp bypass to hptw 551 val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool())) 552 hptw_bypassed.indices.foreach(i => 553 hptw_bypassed(i) := stageResp.bits.bypassed(i) || 554 ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate), 555 io.resp.fire) 556 ) 557 558 val isAllStage = stageResp.bits.req_info.s2xlate === allStage 559 val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2 560 val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage 561 val idx = stageResp.bits.req_info.vpn(2, 0) 562 val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v) 563 io.resp.bits.req_info := stageResp.bits.req_info 564 io.resp.bits.isFirst := stageResp.bits.isFirst 565 io.resp.bits.hit := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf) 566 if (EnableSv48) { 567 io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage 568 } else { 569 io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage 570 } 571 io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit 572 io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq) 573 io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq 574 io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq 575 io.resp.bits.toFsm.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn)) 576 io.resp.bits.toFsm.stage1Hit := stage1Hit 577 578 io.resp.bits.isHptwReq := stageResp.bits.isHptwReq 579 if (EnableSv48) { 580 io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq 581 } else { 582 io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq 583 } 584 io.resp.bits.toHptw.id := stageResp.bits.hptwId 585 io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq) 586 io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq 587 io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq 588 io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0) 589 io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn 590 io.resp.bits.toHptw.resp.entry.asid := DontCare 591 io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.vmid) 592 io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level)) 593 io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source) 594 io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0) 595 io.resp.bits.toHptw.resp.entry.pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(idx), resp_res.sp.pbmt) 596 io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm)) 597 io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v) 598 io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v 599 io.resp.bits.toHptw.resp.gaf := false.B 600 601 io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3)) 602 io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare 603 io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.vmid)) 604 if (EnableSv48) { 605 io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U, 606 Mux(resp_res.sp.hit, resp_res.sp.level, 607 Mux(resp_res.l1.hit, 1.U, 608 Mux(resp_res.l2.hit, 2.U, 3.U)))))) 609 } else { 610 io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U, 611 Mux(resp_res.sp.hit, resp_res.sp.level, 612 Mux(resp_res.l1.hit, 1.U, 2.U))))) 613 } 614 io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source)) 615 for (i <- 0 until tlbcontiguous) { 616 if (EnableSv48) { 617 io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth), 618 Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth), 619 Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth), 620 Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth), 621 resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth))))) 622 io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0), 623 Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0), 624 Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0), 625 Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0), 626 resp_res.l3.get.ppn(sectortlbwidth - 1, 0))))) 627 io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i), 628 Mux(resp_res.sp.hit, resp_res.sp.v, 629 Mux(resp_res.l1.hit, resp_res.l1.v, 630 Mux(resp_res.l2.hit, resp_res.l2.v, 631 resp_res.l3.get.v)))) 632 } else { 633 io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth), 634 Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth), 635 Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth), 636 resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth)))) 637 io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0), 638 Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0), 639 Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0), 640 resp_res.l2.ppn(sectortlbwidth - 1, 0)))) 641 io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i), 642 Mux(resp_res.sp.hit, resp_res.sp.v, 643 Mux(resp_res.l1.hit, resp_res.l1.v, 644 resp_res.l2.v))) 645 } 646 io.resp.bits.stage1.entry(i).pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(i), 647 Mux(resp_res.sp.hit, resp_res.sp.pbmt, 648 Mux(resp_res.l1.hit, resp_res.l1.pbmt, 649 resp_res.l2.pbmt))) 650 io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i), Mux(resp_res.sp.hit, resp_res.sp.perm, 0.U.asTypeOf(new PtePermBundle)))) 651 io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v 652 io.resp.bits.stage1.entry(i).af := false.B 653 } 654 io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools 655 io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B) 656 io.resp.bits.stage1.not_merge := false.B 657 io.resp.valid := stageResp.valid 658 XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit") 659 660 // refill Perf 661 val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None 662 val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool())) 663 val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool())) 664 val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool())) 665 val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 666 l3RefillPerf.map(_.map(_ := false.B)) 667 l2RefillPerf.map(_ := false.B) 668 l1RefillPerf.map(_ := false.B) 669 l0RefillPerf.map(_ := false.B) 670 spRefillPerf.map(_ := false.B) 671 672 // refill 673 l1.io.w.req <> DontCare 674 l0.io.w.req <> DontCare 675 l1.io.w.req.valid := false.B 676 l0.io.w.req.valid := false.B 677 678 val memRdata = refill.ptes 679 val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle)) 680 val memSelData = io.refill.bits.sel_pte_dup 681 val memPte = memSelData.map(a => a.asTypeOf(new PteBundle)) 682 val mPBMTE = io.csr.mPBMTE 683 val hPBMTE = io.csr.hPBMTE 684 val pbmte = Mux(refill.req_info_dup(0).s2xlate === onlyStage1 || refill.req_info_dup(0).s2xlate === allStage, hPBMTE, mPBMTE) 685 686 // TODO: handle sfenceLatch outsize 687 if (EnableSv48) { 688 val l3Refill = 689 !flush_dup(2) && 690 refill.levelOH.l3.get && 691 !memPte(2).isLeaf() && 692 memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode) 693 val l3RefillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way).suggestName(s"l3_refillIdx") 694 val l3RfOH = UIntToOH(l3RefillIdx).asUInt.suggestName(s"l3_rfOH") 695 when (l3Refill) { 696 l3.get(l3RefillIdx).refill( 697 refill.req_info_dup(2).vpn, 698 Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), 699 io.csr_dup(2).hgatp.vmid, 700 memSelData(2), 701 3.U, 702 refill_prefetch_dup(2) 703 ) 704 ptwl2replace.access(l3RefillIdx) 705 l3v.get := l3v.get | l3RfOH 706 l3g.get := (l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U) 707 l3h.get(l3RefillIdx) := refill_h(2) 708 709 for (i <- 0 until l2tlbParams.l3Size) { 710 l3RefillPerf.get(i) := i.U === l3RefillIdx 711 } 712 } 713 XSDebug(l3Refill, p"[l3 refill] refillIdx:${l3RefillIdx} refillEntry:${l3.get(l3RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n") 714 XSDebug(l3Refill, p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | l3RfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U))}\n") 715 } 716 717 // L2 refill 718 val l2Refill = 719 !flush_dup(2) && 720 refill.levelOH.l2 && 721 !memPte(2).isLeaf() && 722 memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode) 723 val l2RefillIdx = replaceWrapper(l2v, ptwl2replace.way).suggestName(s"l2_refillIdx") 724 val l2RfOH = UIntToOH(l2RefillIdx).asUInt.suggestName(s"l2_rfOH") 725 when ( 726 l2Refill 727 ) { 728 l2(l2RefillIdx).refill( 729 refill.req_info_dup(2).vpn, 730 Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), 731 io.csr_dup(2).hgatp.vmid, 732 memSelData(2), 733 2.U, 734 refill_prefetch_dup(2) 735 ) 736 ptwl2replace.access(l2RefillIdx) 737 l2v := l2v | l2RfOH 738 l2g := (l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U) 739 l2h(l2RefillIdx) := refill_h(2) 740 741 for (i <- 0 until l2tlbParams.l2Size) { 742 l2RefillPerf(i) := i.U === l2RefillIdx 743 } 744 } 745 XSDebug(l2Refill, p"[l2 refill] refillIdx:${l2RefillIdx} refillEntry:${l2(l2RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n") 746 XSDebug(l2Refill, p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | l2RfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U))}\n") 747 748 // L1 refill 749 val l1Refill = !flush_dup(1) && refill.levelOH.l1 750 val l1RefillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn).suggestName(s"l1_refillIdx") 751 val l1VictimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(l1RefillIdx)).suggestName(s"l1_victimWay") 752 val l1VictimWayOH = UIntToOH(l1VictimWay).suggestName(s"l1_victimWayOH") 753 val l1RfvOH = UIntToOH(Cat(l1RefillIdx, l1VictimWay)).asUInt.suggestName(s"l1_rfvOH") 754 val l1Wdata = Wire(l1EntryType) 755 l1Wdata.gen( 756 vpn = refill.req_info_dup(1).vpn, 757 asid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid), 758 vmid = io.csr_dup(1).hgatp.vmid, 759 data = memRdata, 760 levelUInt = 1.U, 761 refill_prefetch_dup(1), 762 refill.req_info_dup(1).s2xlate, 763 pbmte, 764 io.csr_dup(1).vsatp.mode 765 ) 766 when (l1Refill) { 767 l1.io.w.apply( 768 valid = true.B, 769 setIdx = l1RefillIdx, 770 data = l1Wdata, 771 waymask = l1VictimWayOH 772 ) 773 ptwl1replace.access(l1RefillIdx, l1VictimWay) 774 l1v := l1v | l1RfvOH 775 l1g := l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U) 776 l1h(l1RefillIdx)(l1VictimWay) := refill_h(1) 777 778 for (i <- 0 until l2tlbParams.l1nWays) { 779 l1RefillPerf(i) := i.U === l1VictimWay 780 } 781 } 782 XSDebug(l1Refill, p"[l1 refill] refillIdx:0x${Hexadecimal(l1RefillIdx)} victimWay:${l1VictimWay} victimWayOH:${Binary(l1VictimWayOH)} rfvOH(in UInt):${Cat(l1RefillIdx, l1VictimWay)}\n") 783 XSDebug(l1Refill, p"[l1 refill] refilldata:0x${l1Wdata}\n") 784 XSDebug(l1Refill, p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | l1RfvOH)}\n") 785 XSDebug(l1Refill, p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U))}\n") 786 787 // L0 refill 788 val l0Refill = !flush_dup(0) && refill.levelOH.l0 789 val l0RefillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn).suggestName(s"l0_refillIdx") 790 val l0VictimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(l0RefillIdx)).suggestName(s"l0_victimWay") 791 val l0VictimWayOH = UIntToOH(l0VictimWay).asUInt.suggestName(s"l0_victimWayOH") 792 val l0RfvOH = UIntToOH(Cat(l0RefillIdx, l0VictimWay)).suggestName(s"l0_rfvOH") 793 val l0Wdata = Wire(l0EntryType) 794 l0Wdata.gen( 795 vpn = refill.req_info_dup(0).vpn, 796 asid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), 797 vmid = io.csr_dup(0).hgatp.vmid, 798 data = memRdata, 799 levelUInt = 0.U, 800 refill_prefetch_dup(0), 801 refill.req_info_dup(0).s2xlate, 802 pbmte, 803 io.csr_dup(0).vsatp.mode 804 ) 805 when (l0Refill) { 806 l0.io.w.apply( 807 valid = true.B, 808 setIdx = l0RefillIdx, 809 data = l0Wdata, 810 waymask = l0VictimWayOH 811 ) 812 ptwl0replace.access(l0RefillIdx, l0VictimWay) 813 l0v := l0v | l0RfvOH 814 l0g := l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U) 815 l0h(l0RefillIdx)(l0VictimWay) := refill_h(0) 816 817 for (i <- 0 until l2tlbParams.l0nWays) { 818 l0RefillPerf(i) := i.U === l0VictimWay 819 } 820 } 821 XSDebug(l0Refill, p"[l0 refill] refillIdx:0x${Hexadecimal(l0RefillIdx)} victimWay:${l0VictimWay} victimWayOH:${Binary(l0VictimWayOH)} rfvOH(in UInt):${Cat(l0RefillIdx, l0VictimWay)}\n") 822 XSDebug(l0Refill, p"[l0 refill] refilldata:0x${l0Wdata}\n") 823 XSDebug(l0Refill, p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | l0RfvOH)}\n") 824 XSDebug(l0Refill, p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U))}\n") 825 826 827 // misc entries: super & invalid 828 val spRefill = 829 !flush_dup(0) && 830 refill.levelOH.sp && 831 ((memPte(0).isLeaf() && memPte(0).canRefill(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte, io.csr_dup(0).vsatp.mode)) || 832 memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte)) 833 val spRefillIdx = spreplace.way.suggestName(s"sp_refillIdx") // LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU 834 val spRfOH = UIntToOH(spRefillIdx).asUInt.suggestName(s"sp_rfOH") 835 when (spRefill) { 836 sp(spRefillIdx).refill( 837 refill.req_info_dup(0).vpn, 838 Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), 839 io.csr_dup(0).hgatp.vmid, 840 memSelData(0), 841 refill.level_dup(0), 842 refill_prefetch_dup(0), 843 !memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte) 844 ) 845 spreplace.access(spRefillIdx) 846 spv := spv | spRfOH 847 spg := spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U) 848 sph(spRefillIdx) := refill_h(0) 849 850 for (i <- 0 until l2tlbParams.spSize) { 851 spRefillPerf(i) := i.U === spRefillIdx 852 } 853 } 854 XSDebug(spRefill, p"[sp refill] refillIdx:${spRefillIdx} refillEntry:${sp(spRefillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n") 855 XSDebug(spRefill, p"[sp refill] spv:${Binary(spv)}->${Binary(spv | spRfOH)} spg:${Binary(spg)}->${Binary(spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U))}\n") 856 857 val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B) 858 val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B) 859 val eccVpn = stageResp.bits.req_info.vpn 860 861 XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage") 862 XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage") 863 when (l1eccFlush) { 864 val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn)) 865 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt 866 l1v := l1v & ~flushMask 867 l1g := l1g & ~flushMask 868 } 869 870 when (l0eccFlush) { 871 val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn)) 872 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt 873 l0v := l0v & ~flushMask 874 l0g := l0g & ~flushMask 875 } 876 877 // sfence for l0 878 val sfence_valid_l0 = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv 879 when (sfence_valid_l0) { 880 val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt 881 val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 882 when (sfence_dup(0).bits.rs1/*va*/) { 883 when (sfence_dup(0).bits.rs2) { 884 // all va && all asid 885 l0v := l0v & ~l0hhit 886 } .otherwise { 887 // all va && specific asid except global 888 l0v := l0v & (l0g | ~l0hhit) 889 } 890 } .otherwise { 891 // val flushMask = UIntToOH(genTlbl1Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen))) 892 val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(sfence_vpn)) 893 // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l0nWays, _.asUInt))).asUInt 894 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt 895 flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH") 896 flushMask.suggestName(s"sfence_nrs1_flushMask") 897 898 when (sfence_dup(0).bits.rs2) { 899 // specific leaf of addr && all asid 900 l0v := l0v & ~flushMask & ~l0hhit 901 } .otherwise { 902 // specific leaf of addr && specific asid 903 l0v := l0v & (~flushMask | l0g | ~l0hhit) 904 } 905 } 906 } 907 908 // hfencev, simple implementation for l0 909 val hfencev_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hv 910 when(hfencev_valid_l0) { 911 val flushMask = VecInit(l0h.flatMap(_.map(_ === onlyStage1))).asUInt 912 l0v := l0v & ~flushMask // all VS-stage l0 pte 913 } 914 915 // hfenceg, simple implementation for l0 916 val hfenceg_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hg 917 when(hfenceg_valid_l0) { 918 val flushMask = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt 919 l0v := l0v & ~flushMask // all G-stage l0 pte 920 } 921 922 val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt 923 val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt 924 val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv 925 when (sfence_valid) { 926 val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 927 val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt 928 val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt 929 val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt 930 val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt 931 val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 932 933 when (sfence_dup(0).bits.rs1/*va*/) { 934 when (sfence_dup(0).bits.rs2) { 935 // all va && all asid 936 l1v := l1v & ~l1hhit 937 l2v := l2v & ~(l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt) 938 spv := spv & ~(sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt) 939 } .otherwise { 940 // all va && specific asid except global 941 l1v := l1v & (l1g | ~l1hhit) 942 l2v := l2v & ~(~l2g & l2hhit & l2asidhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt) 943 spv := spv & ~(~spg & sphhit & spasidhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt) 944 } 945 } .otherwise { 946 when (sfence_dup(0).bits.rs2) { 947 // specific leaf of addr && all asid 948 spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt) 949 } .otherwise { 950 // specific leaf of addr && specific asid 951 spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = io.csr_dup(0).priv.virt))).asUInt) 952 } 953 } 954 } 955 956 val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv 957 when (hfencev_valid) { 958 val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 959 val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt 960 val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt 961 val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt 962 val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt 963 val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 964 when(sfence_dup(0).bits.rs1) { 965 when(sfence_dup(0).bits.rs2) { 966 l1v := l1v & ~l1hhit 967 l2v := l2v & ~(l2hhit & l2vmidhit) 968 spv := spv & ~(sphhit & spvmidhit) 969 }.otherwise { 970 l1v := l1v & (l1g | ~l1hhit) 971 l2v := l2v & ~(~l2g & l2hhit & l2asidhit & l2vmidhit) 972 spv := spv & ~(~spg & sphhit & spasidhit & spvmidhit) 973 } 974 }.otherwise { 975 when(sfence_dup(0).bits.rs2) { 976 spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = true.B))).asUInt) 977 }.otherwise { 978 spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = true.B))).asUInt) 979 } 980 } 981 } 982 983 984 val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg 985 when(hfenceg_valid) { 986 val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt 987 val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt 988 val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt 989 val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt 990 val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt 991 val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen) 992 when(sfence_dup(0).bits.rs1) { 993 when(sfence_dup(0).bits.rs2) { 994 l1v := l1v & ~l1hhit 995 l2v := l2v & ~l2hhit 996 spv := spv & ~sphhit 997 }.otherwise { 998 l1v := l1v & ~l1hhit 999 l2v := l2v & ~(l2hhit & l2vmidhit) 1000 spv := spv & ~(sphhit & spvmidhit) 1001 } 1002 }.otherwise { 1003 when(sfence_dup(0).bits.rs2) { 1004 spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt) 1005 }.otherwise { 1006 spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt) 1007 } 1008 } 1009 } 1010 1011 if (EnableSv48) { 1012 val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt 1013 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1014 val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt 1015 1016 when (sfence_valid) { 1017 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1018 val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt 1019 val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen) 1020 1021 when (sfence_dup(2).bits.rs1/*va*/) { 1022 when (sfence_dup(2).bits.rs2) { 1023 // all va && all asid 1024 l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)) 1025 } .otherwise { 1026 // all va && specific asid except global 1027 l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)) 1028 } 1029 } 1030 } 1031 1032 when (hfencev_valid) { 1033 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1034 val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt 1035 val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen) 1036 when(sfence_dup(2).bits.rs1) { 1037 when(sfence_dup(2).bits.rs2) { 1038 l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit)) 1039 }.otherwise { 1040 l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit)) 1041 } 1042 } 1043 } 1044 1045 when (hfenceg_valid) { 1046 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt 1047 val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt 1048 val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen) 1049 when(sfence_dup(2).bits.rs1) { 1050 when(sfence_dup(2).bits.rs2) { 1051 l3v.map(_ := l3v.get & ~l3hhit) 1052 }.otherwise { 1053 l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit)) 1054 } 1055 } 1056 } 1057 } 1058 1059 def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = { 1060 in.ready := !in.valid || out.ready 1061 out.valid := in.valid 1062 out.bits := in.bits 1063 out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) => 1064 val bypassed_reg = Reg(Bool()) 1065 val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid 1066 when (inFire) { bypassed_reg := bypassed_wire } 1067 .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire } 1068 1069 b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire)) 1070 } 1071 } 1072 1073 // Perf Count 1074 val resp_l0 = resp_res.l0.hit 1075 val resp_sp = resp_res.sp.hit 1076 val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None 1077 val resp_l2_pre = resp_res.l2.pre 1078 val resp_l1_pre = resp_res.l1.pre 1079 val resp_l0_pre = resp_res.l0.pre 1080 val resp_sp_pre = resp_res.sp.pre 1081 val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire 1082 XSPerfAccumulate("access", base_valid_access_0) 1083 if (EnableSv48) { 1084 XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1085 } 1086 XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1087 XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1088 XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0) 1089 XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp) 1090 XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit) 1091 1092 if (EnableSv48) { 1093 XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1094 } 1095 XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1096 XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1097 XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0) 1098 XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp) 1099 XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1100 1101 val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire 1102 XSPerfAccumulate("pre_access", base_valid_access_1) 1103 if (EnableSv48) { 1104 XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1105 } 1106 XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1107 XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1108 XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0) 1109 XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp) 1110 XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit) 1111 1112 if (EnableSv48) { 1113 XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1114 } 1115 XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1116 XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1117 XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0) 1118 XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp) 1119 XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1120 1121 val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire 1122 XSPerfAccumulate("access_first", base_valid_access_2) 1123 if (EnableSv48) { 1124 XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1125 } 1126 XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1127 XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1128 XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0) 1129 XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp) 1130 XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit) 1131 1132 if (EnableSv48) { 1133 XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1134 } 1135 XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1136 XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1137 XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0) 1138 XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp) 1139 XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1140 1141 val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire 1142 XSPerfAccumulate("pre_access_first", base_valid_access_3) 1143 if (EnableSv48) { 1144 XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1145 } 1146 XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1147 XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1148 XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0) 1149 XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp) 1150 XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit) 1151 1152 if (EnableSv48) { 1153 XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1154 } 1155 XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1156 XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1157 XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0) 1158 XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp) 1159 XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1160 1161 XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready) 1162 XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready) 1163 if (EnableSv48) { 1164 l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) } 1165 } 1166 l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) } 1167 l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) } 1168 l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) } 1169 spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) } 1170 if (EnableSv48) { 1171 l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) } 1172 } 1173 l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) } 1174 l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) } 1175 l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) } 1176 spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) } 1177 1178 if (EnableSv48) { 1179 XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR) 1180 } 1181 XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR) 1182 XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR) 1183 XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR) 1184 XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR) 1185 if (EnableSv48) { 1186 XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0)) 1187 } 1188 XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0)) 1189 XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0)) 1190 XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0)) 1191 XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0)) 1192 1193 // debug 1194 XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n") 1195 if (EnableSv48) { 1196 XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n") 1197 } 1198 XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n") 1199 XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n") 1200 XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n") 1201 XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n") 1202 XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n") 1203 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n") 1204 if (EnableSv48) { 1205 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n") 1206 } 1207 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n") 1208 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n") 1209 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n") 1210 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n") 1211 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n") 1212 1213 val perfEvents = Seq( 1214 ("access ", base_valid_access_0 ), 1215 ("l2_hit ", l2Hit ), 1216 ("l1_hit ", l1Hit ), 1217 ("l0_hit ", l0Hit ), 1218 ("sp_hit ", spHit ), 1219 ("pte_hit ", l0Hit || spHit ), 1220 ("rwHarzad ", io.req.valid && !io.req.ready ), 1221 ("out_blocked ", io.resp.valid && !io.resp.ready ), 1222 ) 1223 generatePerfEvent() 1224} 1225