1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.cache.mmu 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import chisel3.internal.naming.chiselName 23import xiangshan._ 24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 25import utils._ 26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 27import freechips.rocketchip.tilelink._ 28 29/* ptw cache caches the page table of all the three layers 30 * ptw cache resp at next cycle 31 * the cache should not be blocked 32 * when miss queue if full, just block req outside 33 */ 34 35class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle { 36 val hit = Bool() 37 val pre = Bool() 38 val ppn = UInt(ppnLen.W) 39 val perm = new PtePermBundle() 40 val ecc = Bool() 41 val level = UInt(2.W) 42 val v = Bool() 43 44 def apply(hit: Bool, pre: Bool, ppn: UInt, perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()), 45 ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B) { 46 this.hit := hit && !ecc 47 this.pre := pre 48 this.ppn := ppn 49 this.perm := perm 50 this.ecc := ecc && hit 51 this.level := level 52 this.v := valid 53 } 54} 55 56class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle { 57 val l1 = new PageCachePerPespBundle 58 val l2 = new PageCachePerPespBundle 59 val l3 = new PageCachePerPespBundle 60 val sp = new PageCachePerPespBundle 61} 62 63class PtwCacheReq(implicit p: Parameters) extends PtwBundle { 64 val req_info = new L2TlbInnerBundle() 65 val isFirst = Bool() 66 val bypassed = Vec(3, Bool()) 67} 68 69class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 70 val req = Flipped(DecoupledIO(new PtwCacheReq())) 71 val resp = DecoupledIO(new Bundle { 72 val req_info = new L2TlbInnerBundle() 73 val isFirst = Bool() 74 val hit = Bool() 75 val prefetch = Bool() // is the entry fetched by prefetch 76 val bypassed = Bool() 77 val toFsm = new Bundle { 78 val l1Hit = Bool() 79 val l2Hit = Bool() 80 val ppn = UInt(ppnLen.W) 81 } 82 val toTlb = new PtwEntry(tagLen = vpnLen, hasPerm = true, hasLevel = true) 83 }) 84 val refill = Flipped(ValidIO(new Bundle { 85 val ptes = UInt(blockBits.W) 86 val levelOH = new Bundle { 87 // NOTE: levelOH has (Level+1) bits, each stands for page cache entries 88 val sp = Bool() 89 val l3 = Bool() 90 val l2 = Bool() 91 val l1 = Bool() 92 def apply(levelUInt: UInt, valid: Bool) = { 93 sp := RegNext((levelUInt === 0.U || levelUInt === 1.U) && valid, false.B) 94 l3 := RegNext((levelUInt === 2.U) & valid, false.B) 95 l2 := RegNext((levelUInt === 1.U) & valid, false.B) 96 l1 := RegNext((levelUInt === 0.U) & valid, false.B) 97 } 98 } 99 // duplicate level and sel_pte for each page caches, for better fanout 100 val req_info_dup = Vec(3, new L2TlbInnerBundle()) 101 val level_dup = Vec(3, UInt(log2Up(Level).W)) 102 val sel_pte_dup = Vec(3, UInt(XLEN.W)) 103 })) 104 val sfence_dup = Vec(4, Input(new SfenceBundle())) 105 val csr_dup = Vec(3, Input(new TlbCsrBundle())) 106} 107 108@chiselName 109class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents { 110 val io = IO(new PtwCacheIO) 111 112 val ecc = Code.fromString(l2tlbParams.ecc) 113 val l2EntryType = new PTWEntriesWithEcc(ecc, num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false) 114 val l3EntryType = new PTWEntriesWithEcc(ecc, num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true) 115 116 // TODO: four caches make the codes dirty, think about how to deal with it 117 118 val sfence_dup = io.sfence_dup 119 val refill = io.refill.bits 120 val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source)) 121 val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed) 122 val flush = flush_dup(0) 123 124 // when refill, refuce to accept new req 125 val rwHarzad = if (sramSinglePort) io.refill.valid else false.B 126 127 // handle hand signal and req_info 128 // TODO: replace with FlushableQueue 129 val stageReq = Wire(Decoupled(new PtwCacheReq())) // enq stage & read page cache valid 130 val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp 131 val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc 132 val stageResp = Wire(Decoupled(new PtwCacheReq())) // deq stage 133 134 val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush) // catch ram data 135 val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter 136 val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool())) 137 stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush)) // ecc flush 138 139 stageReq <> io.req 140 PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad) 141 InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle) 142 PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush) 143 InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle) 144 PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush) 145 stageResp.ready := !stageResp.valid || io.resp.ready 146 147 // l1: level 0 non-leaf pte 148 val l1 = Reg(Vec(l2tlbParams.l1Size, new PtwEntry(tagLen = PtwL1TagLen))) 149 val l1v = RegInit(0.U(l2tlbParams.l1Size.W)) 150 val l1g = Reg(UInt(l2tlbParams.l1Size.W)) 151 val l1asids = Reg(Vec(l2tlbParams.l1Size, UInt(AsidLength.W))) 152 153 // l2: level 1 non-leaf pte 154 val l2 = Module(new SRAMTemplate( 155 l2EntryType, 156 set = l2tlbParams.l2nSets, 157 way = l2tlbParams.l2nWays, 158 singlePort = sramSinglePort 159 )) 160 val l2v = RegInit(0.U((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W)) 161 val l2g = Reg(UInt((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W)) 162 val l2asids = Reg(Vec(l2tlbParams.l2nSets, Vec(l2tlbParams.l2nWays, UInt(AsidLength.W)))) 163 def getl2vSet(vpn: UInt) = { 164 require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays)) 165 val set = genPtwL2SetIdx(vpn) 166 require(set.getWidth == log2Up(l2tlbParams.l2nSets)) 167 val l2vVec = l2v.asTypeOf(Vec(l2tlbParams.l2nSets, UInt(l2tlbParams.l2nWays.W))) 168 l2vVec(set) 169 } 170 def getl2asidSet(vpn: UInt) = { 171 require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays)) 172 val set = genPtwL2SetIdx(vpn) 173 require(set.getWidth == log2Up(l2tlbParams.l2nSets)) 174 l2asids(set) 175 } 176 177 // l3: level 2 leaf pte of 4KB pages 178 val l3 = Module(new SRAMTemplate( 179 l3EntryType, 180 set = l2tlbParams.l3nSets, 181 way = l2tlbParams.l3nWays, 182 singlePort = sramSinglePort 183 )) 184 val l3v = RegInit(0.U((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W)) 185 val l3g = Reg(UInt((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W)) 186 val l3asids = Reg(Vec(l2tlbParams.l3nSets, Vec(l2tlbParams.l3nWays, UInt(AsidLength.W)))) 187 def getl3vSet(vpn: UInt) = { 188 require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays)) 189 val set = genPtwL3SetIdx(vpn) 190 require(set.getWidth == log2Up(l2tlbParams.l3nSets)) 191 val l3vVec = l3v.asTypeOf(Vec(l2tlbParams.l3nSets, UInt(l2tlbParams.l3nWays.W))) 192 l3vVec(set) 193 } 194 def getl3asidSet(vpn: UInt) = { 195 require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays)) 196 val set = genPtwL3SetIdx(vpn) 197 require(set.getWidth == log2Up(l2tlbParams.l3nSets)) 198 l3asids(set) 199 } 200 201 // sp: level 0/1 leaf pte of 1GB/2MB super pages 202 val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true))) 203 val spv = RegInit(0.U(l2tlbParams.spSize.W)) 204 val spg = Reg(UInt(l2tlbParams.spSize.W)) 205 val spasids = Reg(Vec(l2tlbParams.spSize, UInt(AsidLength.W))) 206 207 // Access Perf 208 val l1AccessPerf = Wire(Vec(l2tlbParams.l1Size, Bool())) 209 val l2AccessPerf = Wire(Vec(l2tlbParams.l2nWays, Bool())) 210 val l3AccessPerf = Wire(Vec(l2tlbParams.l3nWays, Bool())) 211 val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 212 l1AccessPerf.map(_ := false.B) 213 l2AccessPerf.map(_ := false.B) 214 l3AccessPerf.map(_ := false.B) 215 spAccessPerf.map(_ := false.B) 216 217 218 219 def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = { 220 vpn1(vpnnLen*3-1, vpnnLen*(2-level)+3) === vpn2(vpnnLen*3-1, vpnnLen*(2-level)+3) 221 } 222 // NOTE: not actually bypassed, just check if hit, re-access the page cache 223 def refill_bypass(vpn: UInt, level: Int) = { 224 io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(io.refill.bits.req_info_dup(0).vpn, vpn, level), 225 } 226 227 // l1 228 val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer, l2tlbParams.l1Size) 229 val (l1Hit, l1HitPPN, l1Pre) = { 230 val hitVecT = l1.zipWithIndex.map { case (e, i) => e.hit(stageReq.bits.req_info.vpn, io.csr_dup(0).satp.asid) && l1v(i) } 231 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 232 233 // stageDelay, but check for l1 234 val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l1.map(_.ppn)), stageDelay_valid_1cycle) 235 val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l1.map(_.prefetch)), stageDelay_valid_1cycle) 236 val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle) 237 238 when (hit && stageDelay_valid_1cycle) { ptwl1replace.access(OHToUInt(hitVec)) } 239 240 l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle} 241 for (i <- 0 until l2tlbParams.l1Size) { 242 XSDebug(stageReq.fire, p"[l1] l1(${i.U}) ${l1(i)} hit:${l1(i).hit(stageReq.bits.req_info.vpn, io.csr_dup(0).satp.asid)}\n") 243 } 244 XSDebug(stageReq.fire, p"[l1] l1v:${Binary(l1v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 245 XSDebug(stageDelay(0).valid, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 246 247 VecInit(hitVecT).suggestName(s"l1_hitVecT") 248 VecInit(hitVec).suggestName(s"l1_hitVec") 249 250 // synchronize with other entries with RegEnable 251 (RegEnable(hit, stageDelay(1).fire), 252 RegEnable(hitPPN, stageDelay(1).fire), 253 RegEnable(hitPre, stageDelay(1).fire)) 254 } 255 256 // l2 257 val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer,l2tlbParams.l2nWays,l2tlbParams.l2nSets) 258 val (l2Hit, l2HitPPN, l2Pre, l2eccError) = { 259 val ridx = genPtwL2SetIdx(stageReq.bits.req_info.vpn) 260 l2.io.r.req.valid := stageReq.fire 261 l2.io.r.req.bits.apply(setIdx = ridx) 262 val vVec_req = getl2vSet(stageReq.bits.req_info.vpn) 263 264 // delay one cycle after sram read 265 val delay_vpn = stageDelay(0).bits.req_info.vpn 266 val data_resp = DataHoldBypass(l2.io.r.resp.data, stageDelay_valid_1cycle) 267 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 268 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).map { case (wayData, v) => 269 wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid) && v }) 270 271 // check hit and ecc 272 val check_vpn = stageCheck(0).bits.req_info.vpn 273 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 274 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools() 275 276 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 277 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 278 val hitWayData = hitWayEntry.entries 279 val hit = ParallelOR(hitVec) 280 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l2nWays).map(_.U(log2Up(l2tlbParams.l2nWays).W))) 281 val eccError = hitWayEntry.decode() 282 283 ridx.suggestName(s"l2_ridx") 284 ramDatas.suggestName(s"l2_ramDatas") 285 hitVec.suggestName(s"l2_hitVec") 286 hitWayData.suggestName(s"l2_hitWayData") 287 hitWay.suggestName(s"l2_hitWay") 288 289 when (hit && stageCheck_valid_1cycle) { ptwl2replace.access(genPtwL2SetIdx(check_vpn), hitWay) } 290 291 l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 292 XSDebug(stageDelay_valid_1cycle, p"[l2] ridx:0x${Hexadecimal(ridx)}\n") 293 for (i <- 0 until l2tlbParams.l2nWays) { 294 XSDebug(stageCheck_valid_1cycle, p"[l2] ramDatas(${i.U}) ${ramDatas(i)} l2v:${vVec(i)} hit:${hit}\n") 295 } 296 XSDebug(stageCheck_valid_1cycle, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL2SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n") 297 298 (hit, hitWayData.ppns(genPtwL2SectorIdx(check_vpn)), hitWayData.prefetch, eccError) 299 } 300 301 // l3 302 val ptwl3replace = ReplacementPolicy.fromString(l2tlbParams.l3Replacer,l2tlbParams.l3nWays,l2tlbParams.l3nSets) 303 val (l3Hit, l3HitData, l3Pre, l3eccError) = { 304 val ridx = genPtwL3SetIdx(stageReq.bits.req_info.vpn) 305 l3.io.r.req.valid := stageReq.fire 306 l3.io.r.req.bits.apply(setIdx = ridx) 307 val vVec_req = getl3vSet(stageReq.bits.req_info.vpn) 308 309 // delay one cycle after sram read 310 val delay_vpn = stageDelay(0).bits.req_info.vpn 311 val data_resp = DataHoldBypass(l3.io.r.resp.data, stageDelay_valid_1cycle) 312 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 313 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).map { case (wayData, v) => 314 wayData.entries.hit(delay_vpn, io.csr_dup(2).satp.asid) && v }) 315 316 // check hit and ecc 317 val check_vpn = stageCheck(0).bits.req_info.vpn 318 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 319 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools() 320 321 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 322 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 323 val hitWayData = hitWayEntry.entries 324 val hitWayEcc = hitWayEntry.ecc 325 val hit = ParallelOR(hitVec) 326 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l3nWays).map(_.U(log2Up(l2tlbParams.l3nWays).W))) 327 val eccError = hitWayEntry.decode() 328 329 when (hit && stageCheck_valid_1cycle) { ptwl3replace.access(genPtwL3SetIdx(check_vpn), hitWay) } 330 331 l3AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 332 XSDebug(stageReq.fire, p"[l3] ridx:0x${Hexadecimal(ridx)}\n") 333 for (i <- 0 until l2tlbParams.l3nWays) { 334 XSDebug(stageCheck_valid_1cycle, p"[l3] ramDatas(${i.U}) ${ramDatas(i)} l3v:${vVec(i)} hit:${hitVec(i)}\n") 335 } 336 XSDebug(stageCheck_valid_1cycle, p"[l3] l3Hit:${hit} l3HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n") 337 338 ridx.suggestName(s"l3_ridx") 339 ramDatas.suggestName(s"l3_ramDatas") 340 hitVec.suggestName(s"l3_hitVec") 341 hitWay.suggestName(s"l3_hitWay") 342 343 (hit, hitWayData, hitWayData.prefetch, eccError) 344 } 345 val l3HitPPN = l3HitData.ppns(genPtwL3SectorIdx(stageCheck(0).bits.req_info.vpn)) 346 val l3HitPerm = l3HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL3SectorSize, new PtePermBundle)))(genPtwL3SectorIdx(stageCheck(0).bits.req_info.vpn)) 347 val l3HitValid = l3HitData.vs(genPtwL3SectorIdx(stageCheck(0).bits.req_info.vpn)) 348 349 // super page 350 val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize) 351 val (spHit, spHitData, spPre, spValid) = { 352 val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(stageReq.bits.req_info.vpn, io.csr_dup(0).satp.asid) && spv(i) } 353 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 354 val hitData = ParallelPriorityMux(hitVec zip sp) 355 val hit = ParallelOR(hitVec) 356 357 when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) } 358 359 spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle } 360 for (i <- 0 until l2tlbParams.spSize) { 361 XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(stageReq.bits.req_info.vpn, io.csr_dup(0).satp.asid)} spv:${spv(i)}\n") 362 } 363 XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n") 364 365 VecInit(hitVecT).suggestName(s"sp_hitVecT") 366 VecInit(hitVec).suggestName(s"sp_hitVec") 367 368 (RegEnable(hit, stageDelay(1).fire), 369 RegEnable(hitData, stageDelay(1).fire), 370 RegEnable(hitData.prefetch, stageDelay(1).fire), 371 RegEnable(hitData.v, stageDelay(1).fire())) 372 } 373 val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle)) 374 val spHitLevel = spHitData.level.getOrElse(0.U) 375 376 val check_res = Wire(new PageCacheRespBundle) 377 check_res.l1.apply(l1Hit, l1Pre, l1HitPPN) 378 check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, ecc = l2eccError) 379 check_res.l3.apply(l3Hit, l3Pre, l3HitPPN, l3HitPerm, l3eccError, valid = l3HitValid) 380 check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitPerm, false.B, spHitLevel, spValid) 381 382 val resp_res = Reg(new PageCacheRespBundle) 383 when (stageCheck(1).fire) { resp_res := check_res } 384 385 // stageResp bypass 386 val bypassed = Wire(Vec(3, Bool())) 387 bypassed.indices.foreach(i => 388 bypassed(i) := stageResp.bits.bypassed(i) || 389 ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i), 390 OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid) 391 ) 392 393 io.resp.bits.req_info := stageResp.bits.req_info 394 io.resp.bits.isFirst := stageResp.bits.isFirst 395 io.resp.bits.hit := resp_res.l3.hit || resp_res.sp.hit 396 io.resp.bits.bypassed := bypassed(2) || (bypassed(1) && !resp_res.l2.hit) || (bypassed(0) && !resp_res.l1.hit) 397 io.resp.bits.prefetch := resp_res.l3.pre && resp_res.l3.hit || resp_res.sp.pre && resp_res.sp.hit 398 io.resp.bits.toFsm.l1Hit := resp_res.l1.hit 399 io.resp.bits.toFsm.l2Hit := resp_res.l2.hit 400 io.resp.bits.toFsm.ppn := Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l1.ppn) 401 io.resp.bits.toTlb.tag := stageResp.bits.req_info.vpn 402 io.resp.bits.toTlb.asid := io.csr_dup(0).satp.asid // DontCare 403 io.resp.bits.toTlb.ppn := Mux(resp_res.l3.hit, resp_res.l3.ppn, resp_res.sp.ppn) 404 io.resp.bits.toTlb.perm.map(_ := Mux(resp_res.l3.hit, resp_res.l3.perm, resp_res.sp.perm)) 405 io.resp.bits.toTlb.level.map(_ := Mux(resp_res.l3.hit, 2.U, resp_res.sp.level)) 406 io.resp.bits.toTlb.prefetch := from_pre(stageResp.bits.req_info.source) 407 io.resp.bits.toTlb.v := Mux(resp_res.sp.hit, resp_res.sp.v, resp_res.l3.v) 408 io.resp.valid := stageResp.valid 409 XSError(stageResp.valid && resp_res.l3.hit && resp_res.sp.hit, "normal page and super page both hit") 410 XSError(stageResp.valid && io.resp.bits.hit && bypassed(2), "page cache, bypassed but hit") 411 412 // refill Perf 413 val l1RefillPerf = Wire(Vec(l2tlbParams.l1Size, Bool())) 414 val l2RefillPerf = Wire(Vec(l2tlbParams.l2nWays, Bool())) 415 val l3RefillPerf = Wire(Vec(l2tlbParams.l3nWays, Bool())) 416 val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 417 l1RefillPerf.map(_ := false.B) 418 l2RefillPerf.map(_ := false.B) 419 l3RefillPerf.map(_ := false.B) 420 spRefillPerf.map(_ := false.B) 421 422 // refill 423 l2.io.w.req <> DontCare 424 l3.io.w.req <> DontCare 425 l2.io.w.req.valid := false.B 426 l3.io.w.req.valid := false.B 427 428 val memRdata = refill.ptes 429 val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle)) 430 val memSelData = io.refill.bits.sel_pte_dup 431 val memPte = memSelData.map(a => a.asTypeOf(new PteBundle)) 432 433 // TODO: handle sfenceLatch outsize 434 when (!flush_dup(0) && refill.levelOH.l1 && !memPte(0).isLeaf() && !memPte(0).isPf(refill.level_dup(0))) { 435 // val refillIdx = LFSR64()(log2Up(l2tlbParams.l1Size)-1,0) // TODO: may be LRU 436 val refillIdx = replaceWrapper(l1v, ptwl1replace.way) 437 refillIdx.suggestName(s"PtwL1RefillIdx") 438 val rfOH = UIntToOH(refillIdx) 439 l1(refillIdx).refill( 440 refill.req_info_dup(0).vpn, 441 io.csr_dup(0).satp.asid, 442 memSelData(0), 443 0.U, 444 refill_prefetch_dup(0) 445 ) 446 ptwl1replace.access(refillIdx) 447 l1v := l1v | rfOH 448 l1g := (l1g & ~rfOH) | Mux(memPte(0).perm.g, rfOH, 0.U) 449 450 for (i <- 0 until l2tlbParams.l1Size) { 451 l1RefillPerf(i) := i.U === refillIdx 452 } 453 454 XSDebug(p"[l1 refill] refillIdx:${refillIdx} refillEntry:${l1(refillIdx).genPtwEntry(refill.req_info_dup(0).vpn, io.csr_dup(0).satp.asid, memSelData(0), 0.U, prefetch = refill_prefetch_dup(0))}\n") 455 XSDebug(p"[l1 refill] l1v:${Binary(l1v)}->${Binary(l1v | rfOH)} l1g:${Binary(l1g)}->${Binary((l1g & ~rfOH) | Mux(memPte(0).perm.g, rfOH, 0.U))}\n") 456 457 refillIdx.suggestName(s"l1_refillIdx") 458 rfOH.suggestName(s"l1_rfOH") 459 } 460 461 when (!flush_dup(1) && refill.levelOH.l2 && !memPte(1).isLeaf() && !memPte(1).isPf(refill.level_dup(1))) { 462 val refillIdx = genPtwL2SetIdx(refill.req_info_dup(1).vpn) 463 val victimWay = replaceWrapper(getl2vSet(refill.req_info_dup(1).vpn), ptwl2replace.way(refillIdx)) 464 val victimWayOH = UIntToOH(victimWay) 465 val rfvOH = UIntToOH(Cat(refillIdx, victimWay)) 466 val wdata = Wire(l2EntryType) 467 wdata.gen( 468 vpn = refill.req_info_dup(1).vpn, 469 asid = io.csr_dup(1).satp.asid, 470 data = memRdata, 471 levelUInt = 1.U, 472 refill_prefetch_dup(1) 473 ) 474 l2.io.w.apply( 475 valid = true.B, 476 setIdx = refillIdx, 477 data = wdata, 478 waymask = victimWayOH 479 ) 480 ptwl2replace.access(refillIdx, victimWay) 481 l2v := l2v | rfvOH 482 l2g := l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U) 483 484 for (i <- 0 until l2tlbParams.l2nWays) { 485 l2RefillPerf(i) := i.U === victimWay 486 } 487 488 XSDebug(p"[l2 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n") 489 XSDebug(p"[l2 refill] refilldata:0x${wdata}\n") 490 XSDebug(p"[l2 refill] l2v:${Binary(l2v)} -> ${Binary(l2v | rfvOH)}\n") 491 XSDebug(p"[l2 refill] l2g:${Binary(l2g)} -> ${Binary(l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n") 492 493 refillIdx.suggestName(s"l2_refillIdx") 494 victimWay.suggestName(s"l2_victimWay") 495 victimWayOH.suggestName(s"l2_victimWayOH") 496 rfvOH.suggestName(s"l2_rfvOH") 497 } 498 499 when (!flush_dup(2) && refill.levelOH.l3) { 500 val refillIdx = genPtwL3SetIdx(refill.req_info_dup(2).vpn) 501 val victimWay = replaceWrapper(getl3vSet(refill.req_info_dup(2).vpn), ptwl3replace.way(refillIdx)) 502 val victimWayOH = UIntToOH(victimWay) 503 val rfvOH = UIntToOH(Cat(refillIdx, victimWay)) 504 val wdata = Wire(l3EntryType) 505 wdata.gen( 506 vpn = refill.req_info_dup(2).vpn, 507 asid = io.csr_dup(2).satp.asid, 508 data = memRdata, 509 levelUInt = 2.U, 510 refill_prefetch_dup(2) 511 ) 512 l3.io.w.apply( 513 valid = true.B, 514 setIdx = refillIdx, 515 data = wdata, 516 waymask = victimWayOH 517 ) 518 ptwl3replace.access(refillIdx, victimWay) 519 l3v := l3v | rfvOH 520 l3g := l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U) 521 522 for (i <- 0 until l2tlbParams.l3nWays) { 523 l3RefillPerf(i) := i.U === victimWay 524 } 525 526 XSDebug(p"[l3 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n") 527 XSDebug(p"[l3 refill] refilldata:0x${wdata}\n") 528 XSDebug(p"[l3 refill] l3v:${Binary(l3v)} -> ${Binary(l3v | rfvOH)}\n") 529 XSDebug(p"[l3 refill] l3g:${Binary(l3g)} -> ${Binary(l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n") 530 531 refillIdx.suggestName(s"l3_refillIdx") 532 victimWay.suggestName(s"l3_victimWay") 533 victimWayOH.suggestName(s"l3_victimWayOH") 534 rfvOH.suggestName(s"l3_rfvOH") 535 } 536 537 538 // misc entries: super & invalid 539 when (!flush_dup(0) && refill.levelOH.sp && (memPte(0).isLeaf() || memPte(0).isPf(refill.level_dup(0)))) { 540 val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU 541 val rfOH = UIntToOH(refillIdx) 542 sp(refillIdx).refill( 543 refill.req_info_dup(0).vpn, 544 io.csr_dup(0).satp.asid, 545 memSelData(0), 546 refill.level_dup(2), 547 refill_prefetch_dup(0), 548 !memPte(0).isPf(refill.level_dup(0)), 549 ) 550 spreplace.access(refillIdx) 551 spv := spv | rfOH 552 spg := spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U) 553 554 for (i <- 0 until l2tlbParams.spSize) { 555 spRefillPerf(i) := i.U === refillIdx 556 } 557 558 XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.req_info_dup(0).vpn, io.csr_dup(0).satp.asid, memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n") 559 XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U))}\n") 560 561 refillIdx.suggestName(s"sp_refillIdx") 562 rfOH.suggestName(s"sp_rfOH") 563 } 564 565 val l2eccFlush = resp_res.l2.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l2eccError, init = false.B) 566 val l3eccFlush = resp_res.l3.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l3eccError, init = false.B) 567 val eccVpn = stageResp.bits.req_info.vpn 568 569 XSError(l2eccFlush, "l2tlb.cache.l2 ecc error. Should not happen at sim stage") 570 XSError(l3eccFlush, "l2tlb.cache.l3 ecc error. Should not happen at sim stage") 571 when (l2eccFlush) { 572 val flushSetIdxOH = UIntToOH(genPtwL2SetIdx(eccVpn)) 573 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l2nWays, a.asUInt) }).asUInt 574 l2v := l2v & ~flushMask 575 l2g := l2g & ~flushMask 576 } 577 578 when (l3eccFlush) { 579 val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(eccVpn)) 580 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt 581 l3v := l3v & ~flushMask 582 l3g := l3g & ~flushMask 583 } 584 585 // sfence 586 when (sfence_dup(3).valid) { 587 val sfence_vpn = sfence_dup(3).bits.addr(sfence_dup(3).bits.addr.getWidth-1, offLen) 588 589 when (sfence_dup(3).bits.rs1/*va*/) { 590 when (sfence_dup(3).bits.rs2) { 591 // all va && all asid 592 l3v := 0.U 593 } .otherwise { 594 // all va && specific asid except global 595 l3v := l3v & l3g 596 } 597 } .otherwise { 598 // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen))) 599 val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence_vpn)) 600 // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l3nWays, _.asUInt))).asUInt 601 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt 602 flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH") 603 flushMask.suggestName(s"sfence_nrs1_flushMask") 604 605 when (sfence_dup(3).bits.rs2) { 606 // specific leaf of addr && all asid 607 l3v := l3v & ~flushMask 608 } .otherwise { 609 // specific leaf of addr && specific asid 610 l3v := l3v & (~flushMask | l3g) 611 } 612 } 613 } 614 615 when (sfence_dup(0).valid) { 616 val l1asidhit = VecInit(l1asids.map(_ === sfence_dup(0).bits.asid)).asUInt 617 val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.asid)).asUInt 618 val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 619 620 when (sfence_dup(0).bits.rs1/*va*/) { 621 when (sfence_dup(0).bits.rs2) { 622 // all va && all asid 623 l1v := 0.U 624 l2v := 0.U 625 spv := 0.U 626 } .otherwise { 627 // all va && specific asid except global 628 629 l1v := l1v & (~l1asidhit | l1g) 630 l2v := l2v & l2g 631 spv := spv & (~spasidhit | spg) 632 } 633 } .otherwise { 634 // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen))) 635 val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence_vpn)) 636 // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l3nWays, _.asUInt))).asUInt 637 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt 638 flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH") 639 flushMask.suggestName(s"sfence_nrs1_flushMask") 640 641 when (sfence_dup(0).bits.rs2) { 642 // specific leaf of addr && all asid 643 spv := spv & (~VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.asid, ignoreAsid = true))).asUInt | spg) 644 } .otherwise { 645 // specific leaf of addr && specific asid 646 spv := spv & (~VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.asid))).asUInt | spg) 647 } 648 } 649 } 650 651 def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = { 652 in.ready := !in.valid || out.ready 653 out.valid := in.valid 654 out.bits := in.bits 655 out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) => 656 val bypassed_reg = Reg(Bool()) 657 val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i) && io.refill.valid 658 when (inFire) { bypassed_reg := bypassed_wire } 659 .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire } 660 661 b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire)) 662 } 663 } 664 665 // Perf Count 666 val resp_l3 = resp_res.l3.hit 667 val resp_sp = resp_res.sp.hit 668 val resp_l1_pre = resp_res.l1.pre 669 val resp_l2_pre = resp_res.l2.pre 670 val resp_l3_pre = resp_res.l3.pre 671 val resp_sp_pre = resp_res.sp.pre 672 val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire() 673 XSPerfAccumulate("access", base_valid_access_0) 674 XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 675 XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 676 XSPerfAccumulate("l3_hit", base_valid_access_0 && resp_l3) 677 XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp) 678 XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit) 679 680 XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 681 XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 682 XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre && resp_l3) 683 XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp) 684 XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 685 686 val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire() 687 XSPerfAccumulate("pre_access", base_valid_access_1) 688 XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 689 XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 690 XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && resp_l3) 691 XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp) 692 XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit) 693 694 XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 695 XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 696 XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre && resp_l3) 697 XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp) 698 XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 699 700 val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire() 701 XSPerfAccumulate("access_first", base_valid_access_2) 702 XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 703 XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 704 XSPerfAccumulate("l3_hit_first", base_valid_access_2 && resp_l3) 705 XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp) 706 XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit) 707 708 XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 709 XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 710 XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre && resp_l3) 711 XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp) 712 XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 713 714 val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire() 715 XSPerfAccumulate("pre_access_first", base_valid_access_3) 716 XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 717 XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 718 XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && resp_l3) 719 XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp) 720 XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit) 721 722 XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 723 XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 724 XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre && resp_l3) 725 XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp) 726 XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 727 728 XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready) 729 XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready) 730 l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1AccessIndex${i}", l) } 731 l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2AccessIndex${i}", l) } 732 l3AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3AccessIndex${i}", l) } 733 spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) } 734 l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1RefillIndex${i}", l) } 735 l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2RefillIndex${i}", l) } 736 l3RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3RefillIndex${i}", l) } 737 spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) } 738 739 XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR) 740 XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR) 741 XSPerfAccumulate("l3Refill", Cat(l3RefillPerf).orR) 742 XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR) 743 XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0)) 744 XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0)) 745 XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf).orR && refill_prefetch_dup(0)) 746 XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0)) 747 748 // debug 749 XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n") 750 XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n") 751 XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n") 752 XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v)}\n") 753 XSDebug(sfence_dup(0).valid, p"[sfence] l3g:${Binary(l3g)}\n") 754 XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n") 755 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n") 756 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n") 757 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n") 758 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v)}\n") 759 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3g:${Binary(l3g)}\n") 760 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n") 761 762 val perfEvents = Seq( 763 ("access ", base_valid_access_0 ), 764 ("l1_hit ", l1Hit ), 765 ("l2_hit ", l2Hit ), 766 ("l3_hit ", l3Hit ), 767 ("sp_hit ", spHit ), 768 ("pte_hit ", l3Hit || spHit ), 769 ("rwHarzad ", io.req.valid && !io.req.ready ), 770 ("out_blocked ", io.resp.valid && !io.resp.ready), 771 ) 772 generatePerfEvent() 773} 774