1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.cache.mmu 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import xiangshan._ 23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 24import utils._ 25import utility._ 26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 27import freechips.rocketchip.tilelink._ 28 29/* ptw cache caches the page table of all the three layers 30 * ptw cache resp at next cycle 31 * the cache should not be blocked 32 * when miss queue if full, just block req outside 33 */ 34 35class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle { 36 val hit = Bool() 37 val pre = Bool() 38 val ppn = if (HasHExtension) UInt(gvpnLen.W) else UInt(ppnLen.W) 39 val perm = new PtePermBundle() 40 val ecc = Bool() 41 val level = UInt(2.W) 42 val v = Bool() 43 44 def apply(hit: Bool, pre: Bool, ppn: UInt, perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()), 45 ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B) { 46 this.hit := hit && !ecc 47 this.pre := pre 48 this.ppn := ppn 49 this.perm := perm 50 this.ecc := ecc && hit 51 this.level := level 52 this.v := valid 53 } 54} 55 56class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle { 57 assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!") 58 val hit = Bool() 59 val pre = Bool() 60 val ppn = Vec(tlbcontiguous, if(HasHExtension) UInt(gvpnLen.W) else UInt(ppnLen.W)) 61 val perm = Vec(tlbcontiguous, new PtePermBundle()) 62 val ecc = Bool() 63 val level = UInt(2.W) 64 val v = Vec(tlbcontiguous, Bool()) 65 66 def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())), 67 ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B)) { 68 this.hit := hit && !ecc 69 this.pre := pre 70 this.ppn := ppn 71 this.perm := perm 72 this.ecc := ecc && hit 73 this.level := level 74 this.v := valid 75 } 76} 77 78class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle { 79 val l1 = new PageCachePerPespBundle 80 val l2 = new PageCachePerPespBundle 81 val l3 = new PageCacheMergePespBundle 82 val sp = new PageCachePerPespBundle 83} 84 85class PtwCacheReq(implicit p: Parameters) extends PtwBundle { 86 val req_info = new L2TlbInnerBundle() 87 val isFirst = Bool() 88 val bypassed = Vec(3, Bool()) 89 val isHptw = Bool() 90 val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W) 91} 92 93class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 94 val req = Flipped(DecoupledIO(new PtwCacheReq())) 95 val resp = DecoupledIO(new Bundle { 96 val req_info = new L2TlbInnerBundle() 97 val isFirst = Bool() 98 val hit = Bool() 99 val prefetch = Bool() // is the entry fetched by prefetch 100 val bypassed = Bool() 101 val toFsm = new Bundle { 102 val l1Hit = Bool() 103 val l2Hit = Bool() 104 val ppn = if(HasHExtension) UInt(gvpnLen.W) else UInt(ppnLen.W) 105 } 106 val toTlb = new PtwMergeResp() 107 val isHptw = Bool() 108 val toHptw = new Bundle { 109 val l1Hit = Bool() 110 val l2Hit = Bool() 111 val ppn = UInt(ppnLen.W) 112 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 113 val resp = new HptwResp() // used if hit 114 } 115 }) 116 val refill = Flipped(ValidIO(new Bundle { 117 val ptes = UInt(blockBits.W) 118 val levelOH = new Bundle { 119 // NOTE: levelOH has (Level+1) bits, each stands for page cache entries 120 val sp = Bool() 121 val l3 = Bool() 122 val l2 = Bool() 123 val l1 = Bool() 124 def apply(levelUInt: UInt, valid: Bool) = { 125 sp := RegNext((levelUInt === 0.U || levelUInt === 1.U) && valid, false.B) 126 l3 := RegNext((levelUInt === 2.U) & valid, false.B) 127 l2 := RegNext((levelUInt === 1.U) & valid, false.B) 128 l1 := RegNext((levelUInt === 0.U) & valid, false.B) 129 } 130 } 131 // duplicate level and sel_pte for each page caches, for better fanout 132 val req_info_dup = Vec(3, new L2TlbInnerBundle()) 133 val level_dup = Vec(3, UInt(log2Up(Level).W)) 134 val sel_pte_dup = Vec(3, UInt(XLEN.W)) 135 })) 136 val sfence_dup = Vec(4, Input(new SfenceBundle())) 137 val csr_dup = Vec(3, Input(new TlbCsrBundle())) 138} 139 140class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents { 141 val io = IO(new PtwCacheIO) 142 val HasHExtension = l2tlbParams.HasHExtension 143 val ecc = Code.fromString(l2tlbParams.ecc) 144 val l2EntryType = new PTWEntriesWithEcc(ecc, num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false) 145 val l3EntryType = new PTWEntriesWithEcc(ecc, num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true) 146 147 // TODO: four caches make the codes dirty, think about how to deal with it 148 149 val sfence_dup = io.sfence_dup 150 val refill = io.refill.bits 151 val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source)) 152 val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed) 153 val flush = flush_dup(0) 154 155 // when refill, refuce to accept new req 156 val rwHarzad = if (sramSinglePort) io.refill.valid else false.B 157 158 // handle hand signal and req_info 159 // TODO: replace with FlushableQueue 160 val stageReq = Wire(Decoupled(new PtwCacheReq())) // enq stage & read page cache valid 161 val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp 162 val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc 163 val stageResp = Wire(Decoupled(new PtwCacheReq())) // deq stage 164 165 val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush) // catch ram data 166 val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter 167 val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool())) 168 stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush)) // ecc flush 169 170 stageReq <> io.req 171 PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad) 172 InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle) 173 PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush) 174 InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle) 175 PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush) 176 stageResp.ready := !stageResp.valid || io.resp.ready 177 178 // l1: level 0 non-leaf pte 179 val l1 = Reg(Vec(l2tlbParams.l1Size, new PtwEntry(tagLen = PtwL1TagLen))) 180 val l1v = RegInit(0.U(l2tlbParams.l1Size.W)) 181 val l1g = Reg(UInt(l2tlbParams.l1Size.W)) 182 val l1asids = l1.map(_.asid) 183 val l1vmids = l1.map(_.vmid) 184 val l1h = Reg(Vec(l2tlbParams.l1Size.W, UInt(2.W))) // 0 bit: s2xlate, 1 bit: stage 1 or stage 2 185 186 // l2: level 1 non-leaf pte 187 val l2 = Module(new SRAMTemplate( 188 l2EntryType, 189 set = l2tlbParams.l2nSets, 190 way = l2tlbParams.l2nWays, 191 singlePort = sramSinglePort 192 )) 193 val l2v = RegInit(0.U((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W)) 194 val l2g = Reg(UInt((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W)) 195 val l2h = Reg(Vec(l2tlbParams.l2nSets, Vec(l2tlbParams.l2nWays, UInt(2.W)))) 196 def getl2vSet(vpn: UInt) = { 197 require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays)) 198 val set = genPtwL2SetIdx(vpn) 199 require(set.getWidth == log2Up(l2tlbParams.l2nSets)) 200 val l2vVec = l2v.asTypeOf(Vec(l2tlbParams.l2nSets, UInt(l2tlbParams.l2nWays.W))) 201 l2vVec(set) 202 } 203 def getl2hSet(vpn: UInt) = { 204 require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays)) 205 val set = genPtwL2SetIdx(vpn) 206 require(set.getWidth == log2Up(l2tlbParams.l2nSets)) 207 l2h(set) 208 } 209 210 211 212 // l3: level 2 leaf pte of 4KB pages 213 val l3 = Module(new SRAMTemplate( 214 l3EntryType, 215 set = l2tlbParams.l3nSets, 216 way = l2tlbParams.l3nWays, 217 singlePort = sramSinglePort 218 )) 219 val l3v = RegInit(0.U((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W)) 220 val l3g = Reg(UInt((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W)) 221 val l3h = Reg(Vec(l2tlbParams.l3nSets, Vec(l2tlbParams.l3nWays, UInt(2.W)))) 222 def getl3vSet(vpn: UInt) = { 223 require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays)) 224 val set = genPtwL3SetIdx(vpn) 225 require(set.getWidth == log2Up(l2tlbParams.l3nSets)) 226 val l3vVec = l3v.asTypeOf(Vec(l2tlbParams.l3nSets, UInt(l2tlbParams.l3nWays.W))) 227 l3vVec(set) 228 } 229 def getl3hSet(vpn: UInt) = { 230 require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays)) 231 val set = genPtwL3SetIdx(vpn) 232 require(set.getWidth == log2Up(l2tlbParams.l3nSets)) 233 l3h(set) 234 } 235 236 // sp: level 0/1 leaf pte of 1GB/2MB super pages 237 val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true))) 238 val spv = RegInit(0.U(l2tlbParams.spSize.W)) 239 val spg = Reg(UInt(l2tlbParams.spSize.W)) 240 val spasids = sp.map(_.asid) 241 val spvmids = sp.map(_.vmid) 242 val sph = Reg(Vec(l2tlbParams.spSize.W, UInt(2.W))) 243 244 // Access Perf 245 val l1AccessPerf = Wire(Vec(l2tlbParams.l1Size, Bool())) 246 val l2AccessPerf = Wire(Vec(l2tlbParams.l2nWays, Bool())) 247 val l3AccessPerf = Wire(Vec(l2tlbParams.l3nWays, Bool())) 248 val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 249 l1AccessPerf.map(_ := false.B) 250 l2AccessPerf.map(_ := false.B) 251 l3AccessPerf.map(_ := false.B) 252 spAccessPerf.map(_ := false.B) 253 254 255 256 def vpn_match(vpn1: UInt, vpn2: UInt, level: Int, isGvpn: Bool) = { 257 (vpn1(vpnnLen*3-1, vpnnLen*(2-level)+3) === vpn2(vpnnLen*3-1, vpnnLen*(2-level)+3)) && 258 Mux(isGvpn, vpn1(vpnnLen*3+extendVpnnBits-1, vpnnLen*3) === vpn2(vpnnLen*3+extendVpnnBits-1, vpnnLen*3), true.B) 259 } 260 // NOTE: not actually bypassed, just check if hit, re-access the page cache 261 def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = { 262 val refill_vpn = io.refill.bits.req_info_dup(0).vpn 263 val refill_gvpn = io.refill.bits.req_info_dup(0).gvpn 264 val refill_isGvpn = io.refill.bits.req_info_dup(0).s2xlate(0) && io.refill.bits.req_info_dup(0).s2xlate(1) 265 io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(Mux(refill_isGvpn, refill_gvpn, refill_vpn), vpn, level, isGvpn) && h_search === io.refill.bits.req_info_dup(0).s2xlate 266 } 267 268 val isGvpn = stageReq.bits.req_info.s2xlate(0) && stageReq.bits.req_info.s2xlate(1) // only stage 2 xlate use gvpn 269 val vpn_search = Mux(isGvpn, stageReq.bits.req_info.gvpn, stageReq.bits.req_info.vpn) 270 val h_search = stageReq.bits.req_info.s2xlate 271 // l1 272 val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer, l2tlbParams.l1Size) 273 val (l1Hit, l1HitPPN, l1Pre) = { 274 val hitVecT = l1.zipWithIndex.map { 275 case (e, i) => (e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).hgatp.asid, s2xlate = h_search(0), isGvpn = isGvpn) 276 && l1v(i) && h_search === l1h(i)) 277 } 278 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 279 280 // stageDelay, but check for l1 281 val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l1.map(_.ppn)), stageDelay_valid_1cycle) 282 val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l1.map(_.prefetch)), stageDelay_valid_1cycle) 283 val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle) 284 285 when (hit && stageDelay_valid_1cycle) { ptwl1replace.access(OHToUInt(hitVec)) } 286 287 l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle} 288 for (i <- 0 until l2tlbParams.l1Size) { 289 XSDebug(stageReq.fire, p"[l1] l1(${i.U}) ${l1(i)} hit:${l1(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).hgatp.asid, s2xlate = h_search(0), isGvpn = isGvpn)}\n") 290 } 291 XSDebug(stageReq.fire, p"[l1] l1v:${Binary(l1v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 292 XSDebug(stageDelay(0).valid, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 293 294 VecInit(hitVecT).suggestName(s"l1_hitVecT") 295 VecInit(hitVec).suggestName(s"l1_hitVec") 296 297 // synchronize with other entries with RegEnable 298 (RegEnable(hit, stageDelay(1).fire), 299 RegEnable(hitPPN, stageDelay(1).fire), 300 RegEnable(hitPre, stageDelay(1).fire)) 301 } 302 303 // l2 304 val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer,l2tlbParams.l2nWays,l2tlbParams.l2nSets) 305 val (l2Hit, l2HitPPN, l2Pre, l2eccError) = { 306 val ridx = genPtwL2SetIdx(vpn_search) 307 l2.io.r.req.valid := stageReq.fire 308 l2.io.r.req.bits.apply(setIdx = ridx) 309 val vVec_req = getl2vSet(vpn_search) 310 val hVec_req = getl2hSet(vpn_search) 311 312 // delay one cycle after sram read 313 val delay_vpn = Mux(stageDelay(0).bits.req_info.s2xlate(0) && stageDelay(0).bits.req_info.s2xlate(1), stageDelay(0).bits.req_info.gvpn, stageDelay(0).bits.req_info.vpn) 314 val delay_h = stageDelay(0).bits.req_info.s2xlate 315 val data_resp = DataHoldBypass(l2.io.r.resp.data, stageDelay_valid_1cycle) 316 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 317 val hVec_delay = RegEnable(hVec_req, stageReq.fire) 318 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) => 319 wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).hgatp.asid, s2xlate = delay_h(0), isGvpn = delay_h(0) && delay_h(1)) && v && (delay_h === h)}) 320 321 // check hit and ecc 322 val check_vpn = Mux(stageCheck(0).bits.req_info.s2xlate(0) && stageCheck(0).bits.req_info.s2xlate(1), stageCheck(0).bits.req_info.gvpn, stageCheck(0).bits.req_info.vpn) 323 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 324 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools 325 326 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 327 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 328 val hitWayData = hitWayEntry.entries 329 val hit = ParallelOR(hitVec) 330 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l2nWays).map(_.U(log2Up(l2tlbParams.l2nWays).W))) 331 val eccError = hitWayEntry.decode() 332 333 ridx.suggestName(s"l2_ridx") 334 ramDatas.suggestName(s"l2_ramDatas") 335 hitVec.suggestName(s"l2_hitVec") 336 hitWayData.suggestName(s"l2_hitWayData") 337 hitWay.suggestName(s"l2_hitWay") 338 339 when (hit && stageCheck_valid_1cycle) { ptwl2replace.access(genPtwL2SetIdx(check_vpn), hitWay) } 340 341 l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 342 XSDebug(stageDelay_valid_1cycle, p"[l2] ridx:0x${Hexadecimal(ridx)}\n") 343 for (i <- 0 until l2tlbParams.l2nWays) { 344 XSDebug(stageCheck_valid_1cycle, p"[l2] ramDatas(${i.U}) ${ramDatas(i)} l2v:${vVec(i)} hit:${hit}\n") 345 } 346 XSDebug(stageCheck_valid_1cycle, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL2SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n") 347 348 (hit, hitWayData.ppns(genPtwL2SectorIdx(check_vpn)), hitWayData.prefetch, eccError) 349 } 350 351 // l3 352 val ptwl3replace = ReplacementPolicy.fromString(l2tlbParams.l3Replacer,l2tlbParams.l3nWays,l2tlbParams.l3nSets) 353 val (l3Hit, l3HitData, l3Pre, l3eccError) = { 354 val ridx = genPtwL3SetIdx(vpn_search) 355 l3.io.r.req.valid := stageReq.fire 356 l3.io.r.req.bits.apply(setIdx = ridx) 357 val vVec_req = getl3vSet(vpn_search) 358 val hVec_req = getl3hSet(vpn_search) 359 360 // delay one cycle after sram read 361 val delay_vpn = Mux(stageDelay(0).bits.req_info.s2xlate(0) && stageDelay(0).bits.req_info.s2xlate(1), stageDelay(0).bits.req_info.gvpn, stageDelay(0).bits.req_info.vpn) 362 val delay_h = stageDelay(0).bits.req_info.s2xlate 363 val data_resp = DataHoldBypass(l3.io.r.resp.data, stageDelay_valid_1cycle) 364 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 365 val hVec_delay = RegEnable(hVec_req, stageReq.fire) 366 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) => 367 wayData.entries.hit(delay_vpn, io.csr_dup(2).satp.asid, io.csr_dup(2).hgatp.asid, s2xlate = delay_h(0), isGvpn = delay_h(0) && delay_h(1)) && v && (delay_h === h)}) 368 369 // check hit and ecc 370 val check_vpn = Mux(stageCheck(0).bits.req_info.s2xlate(0) && stageCheck(0).bits.req_info.s2xlate(1), stageCheck(0).bits.req_info.gvpn, stageCheck(0).bits.req_info.vpn) 371 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 372 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools 373 374 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 375 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 376 val hitWayData = hitWayEntry.entries 377 val hitWayEcc = hitWayEntry.ecc 378 val hit = ParallelOR(hitVec) 379 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l3nWays).map(_.U(log2Up(l2tlbParams.l3nWays).W))) 380 val eccError = hitWayEntry.decode() 381 382 when (hit && stageCheck_valid_1cycle) { ptwl3replace.access(genPtwL3SetIdx(check_vpn), hitWay) } 383 384 l3AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 385 XSDebug(stageReq.fire, p"[l3] ridx:0x${Hexadecimal(ridx)}\n") 386 for (i <- 0 until l2tlbParams.l3nWays) { 387 XSDebug(stageCheck_valid_1cycle, p"[l3] ramDatas(${i.U}) ${ramDatas(i)} l3v:${vVec(i)} hit:${hitVec(i)}\n") 388 } 389 XSDebug(stageCheck_valid_1cycle, p"[l3] l3Hit:${hit} l3HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n") 390 391 ridx.suggestName(s"l3_ridx") 392 ramDatas.suggestName(s"l3_ramDatas") 393 hitVec.suggestName(s"l3_hitVec") 394 hitWay.suggestName(s"l3_hitWay") 395 396 (hit, hitWayData, hitWayData.prefetch, eccError) 397 } 398 val l3HitPPN = l3HitData.ppns 399 val l3HitPerm = l3HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL3SectorSize, new PtePermBundle))) 400 val l3HitValid = l3HitData.vs 401 402 // super page 403 val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize) 404 val (spHit, spHitData, spPre, spValid) = { 405 val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).hgatp.asid, s2xlate = h_search(0), isGvpn = h_search(0) && h_search(1)) && spv(i) && (sph(i) === h_search) } 406 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 407 val hitData = ParallelPriorityMux(hitVec zip sp) 408 val hit = ParallelOR(hitVec) 409 410 when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) } 411 412 spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle } 413 for (i <- 0 until l2tlbParams.spSize) { 414 XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).hgatp.asid, s2xlate = h_search(0), isGvpn = h_search(0) && h_search(1))} spv:${spv(i)}\n") 415 } 416 XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n") 417 418 VecInit(hitVecT).suggestName(s"sp_hitVecT") 419 VecInit(hitVec).suggestName(s"sp_hitVec") 420 421 (RegEnable(hit, stageDelay(1).fire), 422 RegEnable(hitData, stageDelay(1).fire), 423 RegEnable(hitData.prefetch, stageDelay(1).fire), 424 RegEnable(hitData.v, stageDelay(1).fire)) 425 } 426 val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle)) 427 val spHitLevel = spHitData.level.getOrElse(0.U) 428 429 val check_res = Wire(new PageCacheRespBundle) 430 check_res.l1.apply(l1Hit, l1Pre, l1HitPPN) 431 check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, ecc = l2eccError) 432 check_res.l3.apply(l3Hit, l3Pre, l3HitPPN, l3HitPerm, l3eccError, valid = l3HitValid) 433 check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitPerm, false.B, spHitLevel, spValid) 434 435 val resp_res = Reg(new PageCacheRespBundle) 436 when (stageCheck(1).fire) { resp_res := check_res } 437 438 // stageResp bypass 439 val bypassed = Wire(Vec(3, Bool())) 440 bypassed.indices.foreach(i => 441 bypassed(i) := stageResp.bits.bypassed(i) || 442 ValidHoldBypass(refill_bypass(vpn_search, i, h_search), 443 OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid) 444 ) 445 446 val resp_isGvpn = stageResp.bits.req_info.s2xlate(0) && stageResp.bits.req_info.s2xlate(1) 447 io.resp.bits.req_info := stageResp.bits.req_info 448 io.resp.bits.isFirst := stageResp.bits.isFirst 449 io.resp.bits.hit := resp_res.l3.hit || resp_res.sp.hit 450 io.resp.bits.bypassed := bypassed(2) || (bypassed(1) && !resp_res.l2.hit) || (bypassed(0) && !resp_res.l1.hit) 451 io.resp.bits.prefetch := resp_res.l3.pre && resp_res.l3.hit || resp_res.sp.pre && resp_res.sp.hit 452 io.resp.bits.toFsm.l1Hit := resp_res.l1.hit 453 io.resp.bits.toFsm.l2Hit := resp_res.l2.hit 454 io.resp.bits.toFsm.ppn := Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l1.ppn) 455 456 io.resp.bits.isHptw := stageResp.bits.isHptw 457 io.resp.bits.toHptw.id := stageResp.bits.hptwId 458 io.resp.bits.toHptw.l1Hit := resp_res.l1.hit 459 io.resp.bits.toHptw.l2Hit := resp_res.l2.hit 460 io.resp.bits.toHptw.ppn := Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l1.ppn) 461 val idx = stageResp.bits.req_info.gvpn(2, 0) 462 io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.gvpn 463 io.resp.bits.toHptw.resp.entry.vmid := io.csr_dup(0).hgatp.asid 464 io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l3.hit, 2.U, resp_res.sp.level)) 465 io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source) 466 io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l3.hit, resp_res.l3.ppn(idx), resp_res.sp.ppn) 467 io.resp.bits.toHptw.resp.entry.perm := Mux(resp_res.l3.hit, resp_res.l3.perm(idx), resp_res.sp.perm) 468 io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l3.hit, resp_res.l3.v(idx), resp_res.sp.v) 469 io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v 470 io.resp.bits.toHptw.resp.gaf := false.B 471 472 io.resp.bits.toTlb.entry.map(_.tag := Mux(resp_isGvpn, stageResp.bits.req_info.gvpn(gvpnLen - 1, 3), stageResp.bits.req_info.vpn(vpnLen - 1, 3))) 473 io.resp.bits.toTlb.entry.map(_.asid := io.csr_dup(0).satp.asid) // DontCare 474 io.resp.bits.toTlb.entry.map(_.level.map(_ := Mux(resp_res.l3.hit, 2.U, resp_res.sp.level))) 475 io.resp.bits.toTlb.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source)) 476 for (i <- 0 until tlbcontiguous) { 477 io.resp.bits.toTlb.entry(i).ppn := Mux(resp_res.l3.hit, resp_res.l3.ppn(i)(ppnLen - 1, sectortlbwidth), resp_res.sp.ppn(ppnLen - 1, sectortlbwidth)) 478 io.resp.bits.toTlb.entry(i).ppn_low := Mux(resp_res.l3.hit, resp_res.l3.ppn(i)(sectortlbwidth - 1, 0), resp_res.sp.ppn(sectortlbwidth - 1, 0)) 479 io.resp.bits.toTlb.entry(i).perm.map(_ := Mux(resp_res.l3.hit, resp_res.l3.perm(i), resp_res.sp.perm)) 480 io.resp.bits.toTlb.entry(i).v := Mux(resp_res.l3.hit, resp_res.l3.v(i), resp_res.sp.v) 481 io.resp.bits.toTlb.entry(i).pf := !io.resp.bits.toTlb.entry(i).v 482 io.resp.bits.toTlb.entry(i).af := false.B 483 } 484 io.resp.bits.toTlb.pteidx := UIntToOH(stageResp.bits.req_info.vpn(2, 0)).asBools 485 io.resp.bits.toTlb.not_super := Mux(resp_res.l3.hit, true.B, false.B) 486 io.resp.valid := stageResp.valid 487 XSError(stageResp.valid && resp_res.l3.hit && resp_res.sp.hit, "normal page and super page both hit") 488 XSError(stageResp.valid && io.resp.bits.hit && bypassed(2), "page cache, bypassed but hit") 489 490 // refill Perf 491 val l1RefillPerf = Wire(Vec(l2tlbParams.l1Size, Bool())) 492 val l2RefillPerf = Wire(Vec(l2tlbParams.l2nWays, Bool())) 493 val l3RefillPerf = Wire(Vec(l2tlbParams.l3nWays, Bool())) 494 val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 495 l1RefillPerf.map(_ := false.B) 496 l2RefillPerf.map(_ := false.B) 497 l3RefillPerf.map(_ := false.B) 498 spRefillPerf.map(_ := false.B) 499 500 // refill 501 l2.io.w.req <> DontCare 502 l3.io.w.req <> DontCare 503 l2.io.w.req.valid := false.B 504 l3.io.w.req.valid := false.B 505 506 val memRdata = refill.ptes 507 val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle)) 508 val memSelData = io.refill.bits.sel_pte_dup 509 val memPte = memSelData.map(a => a.asTypeOf(new PteBundle)) 510 511 // TODO: handle sfenceLatch outsize 512 when (!flush_dup(0) && refill.levelOH.l1 && !memPte(0).isLeaf() && !memPte(0).isPf(refill.level_dup(0)) && !memPte(0).isAf()) { 513 // val refillIdx = LFSR64()(log2Up(l2tlbParams.l1Size)-1,0) // TODO: may be LRU 514 val refillIdx = replaceWrapper(l1v, ptwl1replace.way) 515 refillIdx.suggestName(s"PtwL1RefillIdx") 516 val rfOH = UIntToOH(refillIdx) 517 l1(refillIdx).refill( 518 Mux(refill.req_info_dup(0).s2xlate === "0b11".U, refill.req_info_dup(0).gvpn, refill.req_info_dup(0).vpn), 519 Mux(refill.req_info_dup(0).s2xlate(0).asBool(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), 520 io.csr_dup(0).hgatp.asid, 521 memSelData(0), 522 0.U, 523 refill_prefetch_dup(0) 524 ) 525 ptwl1replace.access(refillIdx) 526 l1v := l1v | rfOH 527 l1g := (l1g & ~rfOH) | Mux(memPte(0).perm.g, rfOH, 0.U) 528 l1h(refillIdx) := refill.req_info_dup(0).s2xlate 529 530 for (i <- 0 until l2tlbParams.l1Size) { 531 l1RefillPerf(i) := i.U === refillIdx 532 } 533 534 XSDebug(p"[l1 refill] refillIdx:${refillIdx} refillEntry:${l1(refillIdx).genPtwEntry(refill.req_info_dup(0).vpn, io.csr_dup(0).satp.asid, memSelData(0), 0.U, prefetch = refill_prefetch_dup(0))}\n") 535 XSDebug(p"[l1 refill] l1v:${Binary(l1v)}->${Binary(l1v | rfOH)} l1g:${Binary(l1g)}->${Binary((l1g & ~rfOH) | Mux(memPte(0).perm.g, rfOH, 0.U))}\n") 536 537 refillIdx.suggestName(s"l1_refillIdx") 538 rfOH.suggestName(s"l1_rfOH") 539 } 540 541 when (!flush_dup(1) && refill.levelOH.l2 && !memPte(1).isLeaf() && !memPte(1).isPf(refill.level_dup(1)) && !memPte(1).isAf()) { 542 val refillIdx = genPtwL2SetIdx(refill.req_info_dup(1).vpn) 543 val victimWay = replaceWrapper(getl2vSet(refill.req_info_dup(1).vpn), ptwl2replace.way(refillIdx)) 544 val victimWayOH = UIntToOH(victimWay) 545 val rfvOH = UIntToOH(Cat(refillIdx, victimWay)) 546 val wdata = Wire(l2EntryType) 547 wdata.gen( 548 vpn = Mux(refill.req_info_dup(1).s2xlate === "0b11".U, refill.req_info_dup(1).gvpn, refill.req_info_dup(1).vpn), 549 asid = Mux(refill.req_info_dup(1).s2xlate(0).asBool(), io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid), 550 vmid = io.csr_dup(1).hgatp.asid, 551 data = memRdata, 552 levelUInt = 1.U, 553 refill_prefetch_dup(1) 554 ) 555 l2.io.w.apply( 556 valid = true.B, 557 setIdx = refillIdx, 558 data = wdata, 559 waymask = victimWayOH 560 ) 561 ptwl2replace.access(refillIdx, victimWay) 562 l2v := l2v | rfvOH 563 l2g := l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U) 564 l2h(refillIdx)(victimWay) := refill.req_info_dup(0).s2xlate 565 566 for (i <- 0 until l2tlbParams.l2nWays) { 567 l2RefillPerf(i) := i.U === victimWay 568 } 569 570 XSDebug(p"[l2 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n") 571 XSDebug(p"[l2 refill] refilldata:0x${wdata}\n") 572 XSDebug(p"[l2 refill] l2v:${Binary(l2v)} -> ${Binary(l2v | rfvOH)}\n") 573 XSDebug(p"[l2 refill] l2g:${Binary(l2g)} -> ${Binary(l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n") 574 575 refillIdx.suggestName(s"l2_refillIdx") 576 victimWay.suggestName(s"l2_victimWay") 577 victimWayOH.suggestName(s"l2_victimWayOH") 578 rfvOH.suggestName(s"l2_rfvOH") 579 } 580 581 when (!flush_dup(2) && refill.levelOH.l3 && !memPte(2).isAf()) { 582 val refillIdx = genPtwL3SetIdx(refill.req_info_dup(2).vpn) 583 val victimWay = replaceWrapper(getl3vSet(refill.req_info_dup(2).vpn), ptwl3replace.way(refillIdx)) 584 val victimWayOH = UIntToOH(victimWay) 585 val rfvOH = UIntToOH(Cat(refillIdx, victimWay)) 586 val wdata = Wire(l3EntryType) 587 wdata.gen( 588 vpn = Mux(refill.req_info_dup(2).s2xlate === "0b11".U, refill.req_info_dup(2).gvpn, refill.req_info_dup(2).vpn), 589 asid = Mux(refill.req_info_dup(2).s2xlate(0).asBool(), io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), 590 vmid = io.csr_dup(2).hgatp.asid, 591 data = memRdata, 592 levelUInt = 2.U, 593 refill_prefetch_dup(2) 594 ) 595 l3.io.w.apply( 596 valid = true.B, 597 setIdx = refillIdx, 598 data = wdata, 599 waymask = victimWayOH 600 ) 601 ptwl3replace.access(refillIdx, victimWay) 602 l3v := l3v | rfvOH 603 l3g := l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U) 604 l3h(refillIdx)(victimWay) := refill.req_info_dup(2).s2xlate 605 606 for (i <- 0 until l2tlbParams.l3nWays) { 607 l3RefillPerf(i) := i.U === victimWay 608 } 609 610 XSDebug(p"[l3 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n") 611 XSDebug(p"[l3 refill] refilldata:0x${wdata}\n") 612 XSDebug(p"[l3 refill] l3v:${Binary(l3v)} -> ${Binary(l3v | rfvOH)}\n") 613 XSDebug(p"[l3 refill] l3g:${Binary(l3g)} -> ${Binary(l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n") 614 615 refillIdx.suggestName(s"l3_refillIdx") 616 victimWay.suggestName(s"l3_victimWay") 617 victimWayOH.suggestName(s"l3_victimWayOH") 618 rfvOH.suggestName(s"l3_rfvOH") 619 } 620 621 622 // misc entries: super & invalid 623 when (!flush_dup(0) && refill.levelOH.sp && (memPte(0).isLeaf() || memPte(0).isPf(refill.level_dup(0))) && !memPte(0).isAf()) { 624 val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU 625 val rfOH = UIntToOH(refillIdx) 626 sp(refillIdx).refill( 627 refill.req_info_dup(0).vpn, 628 io.csr_dup(0).satp.asid, 629 io.csr_dup(0).hgatp.asid, 630 memSelData(0), 631 refill.level_dup(2), 632 refill_prefetch_dup(0), 633 !memPte(0).isPf(refill.level_dup(0)), 634 ) 635 spreplace.access(refillIdx) 636 spv := spv | rfOH 637 spg := spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U) 638 sph(refillIdx) := refill.req_info_dup(0).s2xlate 639 640 for (i <- 0 until l2tlbParams.spSize) { 641 spRefillPerf(i) := i.U === refillIdx 642 } 643 644 XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.req_info_dup(0).vpn, io.csr_dup(0).satp.asid, memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n") 645 XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U))}\n") 646 647 refillIdx.suggestName(s"sp_refillIdx") 648 rfOH.suggestName(s"sp_rfOH") 649 } 650 651 val l2eccFlush = resp_res.l2.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l2eccError, init = false.B) 652 val l3eccFlush = resp_res.l3.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l3eccError, init = false.B) 653 val eccVpn = stageResp.bits.req_info.vpn 654 655 XSError(l2eccFlush, "l2tlb.cache.l2 ecc error. Should not happen at sim stage") 656 XSError(l3eccFlush, "l2tlb.cache.l3 ecc error. Should not happen at sim stage") 657 when (l2eccFlush) { 658 val flushSetIdxOH = UIntToOH(genPtwL2SetIdx(eccVpn)) 659 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l2nWays, a.asUInt) }).asUInt 660 l2v := l2v & ~flushMask 661 l2g := l2g & ~flushMask 662 } 663 664 when (l3eccFlush) { 665 val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(eccVpn)) 666 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt 667 l3v := l3v & ~flushMask 668 l3g := l3g & ~flushMask 669 } 670 671 // sfence for not-virtualization for l3、l2 672 val sfence_valid_l3 = sfence_dup(3).valid && !sfence_dup(3).bits.hg && !sfence_dup(3).bits.hv 673 when (sfence_valid_l3 && io.csr_dup(3).priv.virt === false.B) { 674 val sfence_vpn = sfence_dup(3).bits.addr(sfence_dup(3).bits.addr.getWidth-1, offLen) 675 when (sfence_dup(3).bits.rs1/*va*/) { 676 when (sfence_dup(3).bits.rs2) { 677 // all va && all asid 678 l3v := 0.U 679 } .otherwise { 680 // all va && specific asid except global 681 l3v := l3v & l3g 682 } 683 } .otherwise { 684 // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen))) 685 val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence_vpn)) 686 // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l3nWays, _.asUInt))).asUInt 687 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt 688 flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH") 689 flushMask.suggestName(s"sfence_nrs1_flushMask") 690 691 when (sfence_dup(3).bits.rs2) { 692 // specific leaf of addr && all asid 693 l3v := l3v & ~flushMask 694 } .otherwise { 695 // specific leaf of addr && specific asid 696 l3v := l3v & (~flushMask | l3g) 697 } 698 } 699 } 700 701 // sfence for virtualization and hfencev, simple implementation for l3、l2 702 val hfencev_valid_l3 = sfence_dup(3).valid && sfence_dup(3).bits.hv 703 when((hfencev_valid_l3 && io.csr_dup(3).priv.virt) || hfencev_valid_l3) { 704 val flushMask = VecInit(l3h.map(_.map(_ === 10.U))).asUInt 705 l3v := l3v & ~flushMask // all VS-stage l3 pte 706 } 707 708 // hfenceg, simple implementation for l3 709 val hfenceg_valid_l3 = sfence_dup(3).valid && sfence_dup(3).bits.hg 710 when(hfenceg_valid_l3) { 711 val flushMask = VecInit(l3h.map(_.map(_ === 11.U))).asUInt 712 l3v := l3v & ~flushMask // all G-stage l3 pte 713 } 714 715 716 val l1asidhit = VecInit(l1asids.map(_ === sfence_dup(0).bits.id)).asUInt 717 val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt 718 val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv 719 when (sfence_valid) { 720 val l1vmidhit = VecInit(l1vmids.map(_ === io.csr_dup(0).hgatp.asid)).asUInt 721 val spvmidhit = VecInit(spvmids.map(_ === io.csr_dup(0).hgatp.asid)).asUInt 722 val l1hhit = VecInit(l1h.map(_(0) === io.csr_dup(0).priv.virt)).asUInt 723 val sphhit = VecInit(sph.map(_(0) === io.csr_dup(0).priv.virt)).asUInt 724 val l2hhit = VecInit(l2h.map(_.map(_(0) === io.csr_dup(0).priv.virt))).asUInt 725 val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 726 val l2h_set = getl2hSet(sfence_vpn) 727 728 when (sfence_dup(0).bits.rs1/*va*/) { 729 when (sfence_dup(0).bits.rs2) { 730 // all va && all asid 731 l2v := l2v & ~l2hhit 732 l1v := l1v & ~(l1hhit & VecInit(UIntToOH(l1vmidhit).asBools().map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt()) 733 spv := spv & ~(l2hhit & VecInit(UIntToOH(spvmidhit).asBools().map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt()) 734 } .otherwise { 735 // all va && specific asid except global 736 l2v := l2v & (l2g | ~l2hhit) 737 l1v := l1v & ~(~l1g & l1hhit & l1asidhit & VecInit(UIntToOH(l1vmidhit).asBools().map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt()) 738 spv := spv & ~(~spg & sphhit & spasidhit & VecInit(UIntToOH(spvmidhit).asBools().map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt()) 739 } 740 } .otherwise { 741 when (sfence_dup(0).bits.rs2) { 742 // specific leaf of addr && all asid 743 spv := spv & (~VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt, isGvpn = false))).asUInt) 744 } .otherwise { 745 // specific leaf of addr && specific asid 746 spv := spv & (~VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, s2xlate = io.csr_dup(0).priv.virt, isGvpn = false))).asUInt | spg) 747 } 748 } 749 } 750 751 val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv 752 when (hfencev_valid) { 753 val l1vmidhit = VecInit(l1vmids.map(_ === io.csr_dup(0).hgatp.asid)).asUInt 754 val spvmidhit = VecInit(spvmids.map(_ === io.csr_dup(0).hgatp.asid)).asUInt 755 val l1hhit = VecInit(l1h.map(_ === 10.U)).asUInt 756 val sphhit = VecInit(sph.map(_ === 10.U)).asUInt 757 val l2hhit = VecInit(l2h.map(_.map(_ === 10.U))).asUInt 758 val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 759 when(sfence_dup(0).bits.rs1) { 760 when(sfence_dup(0).bits.rs2) { 761 l2v := l2v & ~l2hhit 762 l1v := l1v & ~(l1hhit & l1vmidhit) 763 spv := spv & ~(l2hhit & spvmidhit) 764 }.otherwise { 765 l2v := l2v & (l2g | ~l2hhit) 766 l1v := l1v & ~(~l1g & l1hhit & l1asidhit & l1vmidhit) 767 spv := spv & ~(~spg & sphhit & spasidhit & spvmidhit) 768 } 769 }.otherwise { 770 when(sfence_dup(0).bits.rs2) { 771 spv := spv & (~VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, ignoreAsid = true, s2xlate = true.B, isGvpn = false))).asUInt) 772 }.otherwise { 773 spv := spv & (~VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, s2xlate = true.B, isGvpn = false))).asUInt | spg) 774 } 775 } 776 } 777 778 779 val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg 780 when(hfenceg_valid) { 781 val l1vmidhit = VecInit(l1vmids.map(_ === sfence_dup(0).bits.id)).asUInt 782 val spvmidhit = VecInit(spvmids.map(_ === sfence_dup(0).bits.id)).asUInt 783 val l1hhit = VecInit(l1h.map(_ === 11.U)).asUInt 784 val sphhit = VecInit(sph.map(_ === 11.U)).asUInt 785 val l2hhit = VecInit(l2h.map(_.map(_ === 11.U))).asUInt 786 val hfenceg_gvpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth - 1, offLen) 787 when(sfence_dup(0).bits.rs1) { 788 when(sfence_dup(0).bits.rs2) { 789 l2v := l2v & ~l2hhit 790 l1v := l1v & ~l1hhit 791 spv := spv & ~sphhit 792 }.otherwise { 793 l2v := l2v & ~l2hhit 794 l1v := l1v & ~(l1hhit & l1vmidhit) 795 spv := spv & ~(sphhit & spvmidhit) 796 } 797 }.otherwise { 798 when(sfence_dup(0).bits.rs2) { 799 spv := spv & (~VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B, isGvpn = true))).asUInt) 800 }.otherwise { 801 spv := spv & (~VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B, isGvpn = true))).asUInt) 802 } 803 } 804 } 805 806 def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = { 807 in.ready := !in.valid || out.ready 808 out.valid := in.valid 809 out.bits := in.bits 810 out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) => 811 val bypassed_reg = Reg(Bool()) 812 val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid 813 when (inFire) { bypassed_reg := bypassed_wire } 814 .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire } 815 816 b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire)) 817 } 818 } 819 820 // Perf Count 821 val resp_l3 = resp_res.l3.hit 822 val resp_sp = resp_res.sp.hit 823 val resp_l1_pre = resp_res.l1.pre 824 val resp_l2_pre = resp_res.l2.pre 825 val resp_l3_pre = resp_res.l3.pre 826 val resp_sp_pre = resp_res.sp.pre 827 val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire 828 XSPerfAccumulate("access", base_valid_access_0) 829 XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 830 XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 831 XSPerfAccumulate("l3_hit", base_valid_access_0 && resp_l3) 832 XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp) 833 XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit) 834 835 XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 836 XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 837 XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre && resp_l3) 838 XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp) 839 XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 840 841 val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire 842 XSPerfAccumulate("pre_access", base_valid_access_1) 843 XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 844 XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 845 XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && resp_l3) 846 XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp) 847 XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit) 848 849 XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 850 XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 851 XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre && resp_l3) 852 XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp) 853 XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 854 855 val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire 856 XSPerfAccumulate("access_first", base_valid_access_2) 857 XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 858 XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 859 XSPerfAccumulate("l3_hit_first", base_valid_access_2 && resp_l3) 860 XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp) 861 XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit) 862 863 XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 864 XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 865 XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre && resp_l3) 866 XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp) 867 XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 868 869 val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire 870 XSPerfAccumulate("pre_access_first", base_valid_access_3) 871 XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 872 XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 873 XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && resp_l3) 874 XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp) 875 XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit) 876 877 XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 878 XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.hit) 879 XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre && resp_l3) 880 XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp) 881 XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l3_pre && resp_l3 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 882 883 XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready) 884 XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready) 885 l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1AccessIndex${i}", l) } 886 l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2AccessIndex${i}", l) } 887 l3AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3AccessIndex${i}", l) } 888 spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) } 889 l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1RefillIndex${i}", l) } 890 l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2RefillIndex${i}", l) } 891 l3RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3RefillIndex${i}", l) } 892 spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) } 893 894 XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR) 895 XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR) 896 XSPerfAccumulate("l3Refill", Cat(l3RefillPerf).orR) 897 XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR) 898 XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0)) 899 XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0)) 900 XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf).orR && refill_prefetch_dup(0)) 901 XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0)) 902 903 // debug 904 XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n") 905 XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n") 906 XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n") 907 XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v)}\n") 908 XSDebug(sfence_dup(0).valid, p"[sfence] l3g:${Binary(l3g)}\n") 909 XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n") 910 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n") 911 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n") 912 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n") 913 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v)}\n") 914 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3g:${Binary(l3g)}\n") 915 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n") 916 917 val perfEvents = Seq( 918 ("access ", base_valid_access_0 ), 919 ("l1_hit ", l1Hit ), 920 ("l2_hit ", l2Hit ), 921 ("l3_hit ", l3Hit ), 922 ("sp_hit ", spHit ), 923 ("pte_hit ", l3Hit || spHit ), 924 ("rwHarzad ", io.req.valid && !io.req.ready ), 925 ("out_blocked ", io.resp.valid && !io.resp.ready), 926 ) 927 generatePerfEvent() 928} 929