1/*************************************************************************************** 2* Copyright (c) 2021-2025 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences 4* Copyright (c) 2020-2021 Peng Cheng Laboratory 5* Copyright (c) 2024-2025 Institute of Information Engineering, Chinese Academy of Sciences 6* 7* XiangShan is licensed under Mulan PSL v2. 8* You can use this software according to the terms and conditions of the Mulan PSL v2. 9* You may obtain a copy of Mulan PSL v2 at: 10* http://license.coscl.org.cn/MulanPSL2 11* 12* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 13* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 14* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 15* 16* See the Mulan PSL v2 for more details. 17***************************************************************************************/ 18 19package xiangshan.cache.mmu 20 21import org.chipsalliance.cde.config.Parameters 22import chisel3._ 23import chisel3.util._ 24import xiangshan._ 25import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 26import utils._ 27import utility._ 28import coupledL2.utils.SplittedSRAM 29import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 30import freechips.rocketchip.tilelink._ 31import utility.mbist.MbistPipeline 32 33/* ptw cache caches the page table of all the three layers 34 * ptw cache resp at next cycle 35 * the cache should not be blocked 36 * when miss queue if full, just block req outside 37 */ 38 39class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle { 40 val hit = Bool() 41 val pre = Bool() 42 val ppn = UInt(gvpnLen.W) 43 val pbmt = UInt(ptePbmtLen.W) 44 val perm = new PtePermBundle() 45 val n = UInt(pteNLen.W) 46 val ecc = Bool() 47 val level = UInt(2.W) 48 val v = Bool() 49 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 50 val jmp_bitmap_check = Bool() 51 val pte = UInt(XLEN.W) // Page Table Entry 52 }) 53 54 def apply(hit: Bool, pre: Bool, ppn: UInt, pbmt: UInt = 0.U, n: UInt = 0.U, 55 perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()), 56 ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B, jmp_bitmap_check: Bool = false.B, 57 pte: UInt = 0.U): Unit = { 58 this.hit := hit && !ecc 59 this.pre := pre 60 this.ppn := ppn 61 this.n := n 62 this.pbmt := pbmt 63 this.perm := perm 64 this.ecc := ecc && hit 65 this.level := level 66 this.v := valid 67 if (HasBitmapCheck) { 68 this.bitmapCheck.get.jmp_bitmap_check := jmp_bitmap_check 69 this.bitmapCheck.get.pte := pte 70 } 71 } 72} 73 74class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle { 75 assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!") 76 val hit = Bool() 77 val pre = Bool() 78 val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W)) 79 val pbmt = Vec(tlbcontiguous, UInt(ptePbmtLen.W)) 80 val perm = Vec(tlbcontiguous, new PtePermBundle()) 81 val ecc = Bool() 82 val level = UInt(2.W) 83 val v = Vec(tlbcontiguous, Bool()) 84 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 85 val jmp_bitmap_check = Bool() 86 val hitway = UInt(l2tlbParams.l0nWays.W) 87 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector 88 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 89 }) 90 91 def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], pbmt: Vec[UInt] = Vec(tlbcontiguous, 0.U), 92 perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())), 93 ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B), 94 jmp_bitmap_check: Bool = false.B, 95 hitway: UInt = 0.U, ptes: Vec[UInt] , cfs: Vec[Bool]): Unit = { 96 this.hit := hit && !ecc 97 this.pre := pre 98 this.ppn := ppn 99 this.pbmt := pbmt 100 this.perm := perm 101 this.ecc := ecc && hit 102 this.level := level 103 this.v := valid 104 if (HasBitmapCheck) { 105 this.bitmapCheck.get.jmp_bitmap_check := jmp_bitmap_check 106 this.bitmapCheck.get.hitway := hitway 107 this.bitmapCheck.get.ptes := ptes 108 this.bitmapCheck.get.cfs := cfs 109 } 110 } 111} 112 113class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle { 114 val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None 115 val l2 = new PageCachePerPespBundle 116 val l1 = new PageCachePerPespBundle 117 val l0 = new PageCacheMergePespBundle 118 val sp = new PageCachePerPespBundle 119} 120 121class PtwCacheReq(implicit p: Parameters) extends PtwBundle { 122 val req_info = new L2TlbInnerBundle() 123 val isFirst = Bool() 124 val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool()) 125 val isHptwReq = Bool() 126 val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W) 127} 128 129class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 130 val req = Flipped(DecoupledIO(new PtwCacheReq())) 131 val resp = DecoupledIO(new Bundle { 132 val req_info = new L2TlbInnerBundle() 133 val isFirst = Bool() 134 val hit = Bool() 135 val prefetch = Bool() // is the entry fetched by prefetch 136 val bypassed = Bool() 137 val toFsm = new Bundle { 138 val l3Hit = if (EnableSv48) Some(Bool()) else None 139 val l2Hit = Bool() 140 val l1Hit = Bool() 141 val ppn = UInt(gvpnLen.W) 142 val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW 143 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 144 val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check 145 val toLLPTW = Bool() 146 val hitway = UInt(l2tlbParams.l0nWays.W) 147 val pte = UInt(XLEN.W) // Page Table Entry 148 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector 149 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 150 val SPlevel = UInt(log2Up(Level).W) 151 }) 152 } 153 val stage1 = new PtwMergeResp() 154 val isHptwReq = Bool() 155 val toHptw = new Bundle { 156 val l3Hit = if (EnableSv48) Some(Bool()) else None 157 val l2Hit = Bool() 158 val l1Hit = Bool() 159 val ppn = UInt(ppnLen.W) 160 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 161 val resp = new HptwResp() // used if hit 162 val bypassed = Bool() 163 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 164 val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check 165 val hitway = UInt(l2tlbParams.l0nWays.W) 166 val pte = UInt(XLEN.W) // Page Table Entry 167 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector 168 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 169 val fromSP = Bool() 170 val SPlevel = UInt(log2Up(Level).W) 171 }) 172 } 173 }) 174 val refill = Flipped(ValidIO(new Bundle { 175 val ptes = UInt(blockBits.W) 176 val levelOH = new Bundle { 177 // NOTE: levelOH has (Level+1) bits, each stands for page cache entries 178 val sp = Bool() 179 val l0 = Bool() 180 val l1 = Bool() 181 val l2 = Bool() 182 val l3 = if (EnableSv48) Some(Bool()) else None 183 def apply(levelUInt: UInt, valid: Bool) = { 184 sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B) 185 l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B) 186 l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B) 187 l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B) 188 l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B)) 189 } 190 } 191 // duplicate level and sel_pte for each page caches, for better fanout 192 val req_info_dup = Vec(3, new L2TlbInnerBundle()) 193 val level_dup = Vec(3, UInt(log2Up(Level + 1).W)) 194 val sel_pte_dup = Vec(3, UInt(XLEN.W)) 195 })) 196 // when refill l0,save way info for late bitmap wakeup convenient 197 // valid at same cycle of refill.levelOH.l0 198 val l0_way_info = Option.when(HasBitmapCheck)(Output(UInt(l2tlbParams.l0nWays.W))) 199 val sfence_dup = Vec(4, Input(new SfenceBundle())) 200 val csr_dup = Vec(3, Input(new TlbCsrBundle())) 201 val bitmap_wakeup = Option.when(HasBitmapCheck)(Flipped(ValidIO(new Bundle { 202 val setIndex = Input(UInt(PtwL0SetIdxLen.W)) 203 val tag = Input(UInt(SPTagLen.W)) 204 val isSp = Input(Bool()) 205 val way_info = UInt(l2tlbParams.l0nWays.W) 206 val pte_index = UInt(sectortlbwidth.W) 207 val check_success = Bool() 208 }))) 209} 210 211class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents { 212 val io = IO(new PtwCacheIO) 213 val ecc = Code.fromString(l2tlbParams.ecc) 214 val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false, ReservedBits = l2tlbParams.l1ReservedBits) 215 val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true, ReservedBits = l2tlbParams.l0ReservedBits) 216 217 // use two additional regs to record corresponding cache entry whether via bitmap check 218 // 32(l0nSets)* 8 (l0nWays) * 8 (tlbcontiguous) 219 val l0BitmapReg = RegInit(VecInit(Seq.fill(l2tlbParams.l0nSets)(VecInit(Seq.fill(l2tlbParams.l0nWays)(VecInit(Seq.fill(tlbcontiguous)(0.U(1.W)))))))) 220 val spBitmapReg = RegInit(VecInit(Seq.fill(l2tlbParams.spSize)(0.U(1.W)))) 221 222 val bitmapEnable = io.csr_dup(0).mbmc.BME === 1.U && io.csr_dup(0).mbmc.CMODE === 0.U 223 // TODO: four caches make the codes dirty, think about how to deal with it 224 225 val sfence_dup = io.sfence_dup 226 val refill = io.refill.bits 227 val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source)) 228 val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate)) 229 val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed) 230 val flush = flush_dup(0) 231 232 // when refill, refuce to accept new req 233 val rwHarzad = if (sramSinglePort) io.refill.valid else false.B 234 235 // handle hand signal and req_info 236 // TODO: replace with FlushableQueue 237 val stageReq = Wire(Decoupled(new PtwCacheReq())) // enq stage & read page cache valid 238 val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp 239 val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc 240 val stageResp = Wire(Decoupled(new PtwCacheReq())) // deq stage 241 242 val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush) // catch ram data 243 val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter 244 val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool())) 245 stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush)) // ecc flush 246 247 stageReq <> io.req 248 PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad) 249 InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle) 250 PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush) 251 InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle) 252 PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush) 253 stageResp.ready := !stageResp.valid || io.resp.ready 254 255 // l3: level 3 non-leaf pte 256 val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None 257 val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None 258 val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None 259 val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None 260 val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None 261 val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None 262 263 // l2: level 2 non-leaf pte 264 val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen))) 265 val l2v = RegInit(0.U(l2tlbParams.l2Size.W)) 266 val l2g = Reg(UInt(l2tlbParams.l2Size.W)) 267 val l2asids = l2.map(_.asid) 268 val l2vmids = l2.map(_.vmid) 269 val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W))) 270 271 // l1: level 1 non-leaf pte 272 val l1 = Module(new SplittedSRAM( 273 l1EntryType, 274 set = l2tlbParams.l1nSets, 275 way = l2tlbParams.l1nWays, 276 waySplit = 1, 277 dataSplit = 4, 278 singlePort = sramSinglePort, 279 readMCP2 = false, 280 hasMbist = hasMbist 281 )) 282 val mbistPlL1 = MbistPipeline.PlaceMbistPipeline(1, s"MbistPipePtwL1", hasMbist) 283 val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W)) 284 val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W)) 285 val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W)))) 286 val l1asids = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(l2tlbParams.hashAsidWidth.W)))) 287 val l1vmids = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(l2tlbParams.hashAsidWidth.W)))) 288 def getl1vSet(vpn: UInt) = { 289 require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays)) 290 val set = genPtwL1SetIdx(vpn) 291 require(set.getWidth == log2Up(l2tlbParams.l1nSets)) 292 val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W))) 293 l1vVec(set) 294 } 295 def getl1hSet(vpn: UInt) = { 296 require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays)) 297 val set = genPtwL1SetIdx(vpn) 298 require(set.getWidth == log2Up(l2tlbParams.l1nSets)) 299 l1h(set) 300 } 301 302 // l0: level 0 leaf pte of 4KB pages 303 val l0 = Module(new SplittedSRAM( 304 l0EntryType, 305 set = l2tlbParams.l0nSets, 306 way = l2tlbParams.l0nWays, 307 waySplit = 2, 308 dataSplit = 4, 309 singlePort = sramSinglePort, 310 readMCP2 = false, 311 hasMbist = hasMbist 312 )) 313 val mbistPlL0 = MbistPipeline.PlaceMbistPipeline(1, s"MbistPipePtwL0", hasMbist) 314 val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W)) 315 val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W)) 316 val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W)))) 317 val l0asids = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(l2tlbParams.hashAsidWidth.W)))) 318 val l0vmids = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(l2tlbParams.hashAsidWidth.W)))) 319 val l0vpns = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(l2tlbParams.hashVpnWidth.W)))) 320 def getl0vSet(vpn: UInt) = { 321 require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays)) 322 val set = genPtwL0SetIdx(vpn) 323 require(set.getWidth == log2Up(l2tlbParams.l0nSets)) 324 val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W))) 325 l0vVec(set) 326 } 327 def getl0hSet(vpn: UInt) = { 328 require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays)) 329 val set = genPtwL0SetIdx(vpn) 330 require(set.getWidth == log2Up(l2tlbParams.l0nSets)) 331 l0h(set) 332 } 333 334 // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages 335 val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true, hasNapot = true))) 336 val spv = RegInit(0.U(l2tlbParams.spSize.W)) 337 val spg = Reg(UInt(l2tlbParams.spSize.W)) 338 val spasids = sp.map(_.asid) 339 val spvmids = sp.map(_.vmid) 340 val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W))) 341 342 if (HasBitmapCheck) { 343 // wakeup corresponding entry 344 when (io.bitmap_wakeup.get.valid) { 345 when (io.bitmap_wakeup.get.bits.isSp) { 346 for (i <- 0 until l2tlbParams.spSize) { 347 when (sp(i).tag === io.bitmap_wakeup.get.bits.tag && spv(i) === 1.U) { 348 spBitmapReg(i) := io.bitmap_wakeup.get.bits.check_success 349 } 350 } 351 } .otherwise { 352 val wakeup_setindex = io.bitmap_wakeup.get.bits.setIndex 353 l0BitmapReg(wakeup_setindex)(OHToUInt(io.bitmap_wakeup.get.bits.way_info))(io.bitmap_wakeup.get.bits.pte_index) := io.bitmap_wakeup.get.bits.check_success 354 assert(l0v(wakeup_setindex * l2tlbParams.l0nWays.U + OHToUInt(io.bitmap_wakeup.get.bits.way_info)) === 1.U, 355 "Wakeuped entry must be valid!") 356 } 357 } 358 } 359 360 // Access Perf 361 val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None 362 val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool())) 363 val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool())) 364 val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool())) 365 val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 366 if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B)) 367 l2AccessPerf.map(_ := false.B) 368 l1AccessPerf.map(_ := false.B) 369 l0AccessPerf.map(_ := false.B) 370 spAccessPerf.map(_ := false.B) 371 372 373 374 def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = { 375 (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3)) 376 } 377 // NOTE: not actually bypassed, just check if hit, re-access the page cache 378 def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = { 379 val change_h = MuxLookup(h_search, noS2xlate)(Seq( 380 allStage -> onlyStage1, 381 onlyStage1 -> onlyStage1, 382 onlyStage2 -> onlyStage2 383 )) 384 val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq( 385 allStage -> onlyStage1, 386 onlyStage1 -> onlyStage1, 387 onlyStage2 -> onlyStage2 388 )) 389 val refill_vpn = io.refill.bits.req_info_dup(0).vpn 390 io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h 391 } 392 393 val vpn_search = stageReq.bits.req_info.vpn 394 val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq( 395 allStage -> onlyStage1, 396 onlyStage1 -> onlyStage1, 397 onlyStage2 -> onlyStage2 398 )) 399 400 // l3 401 val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None 402 val l3HitPPN = if(EnableSv48) Some(Wire(UInt(ppnLen.W))) else None 403 val l3HitPbmt = if(EnableSv48) Some(Wire(UInt(ptePbmtLen.W))) else None 404 val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None 405 val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None 406 if (EnableSv48) { 407 val hitVecT = l3.get.zipWithIndex.map { 408 case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate) 409 && l3v.get(i) && h_search === l3h.get(i)) 410 } 411 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 412 413 // stageDelay, but check for l3 414 val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle) 415 val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.pbmt)), stageDelay_valid_1cycle) 416 val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle) 417 val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle) 418 419 when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) } 420 421 l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle} 422 for (i <- 0 until l2tlbParams.l3Size) { 423 XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n") 424 } 425 XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 426 XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 427 428 VecInit(hitVecT).suggestName(s"l3_hitVecT") 429 VecInit(hitVec).suggestName(s"l3_hitVec") 430 431 // synchronize with other entries with RegEnable 432 l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire)) 433 l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire)) 434 l3HitPbmt.map(_ := RegEnable(hitPbmt, stageDelay(1).fire)) 435 l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire)) 436 } 437 438 // l2 439 val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size) 440 val (l2Hit, l2HitPPN, l2HitPbmt, l2Pre) = { 441 val hitVecT = l2.zipWithIndex.map { 442 case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate) 443 && l2v(i) && h_search === l2h(i)) 444 } 445 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 446 447 // stageDelay, but check for l2 448 val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle) 449 val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.pbmt)), stageDelay_valid_1cycle) 450 val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle) 451 val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle) 452 453 when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) } 454 455 l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle} 456 for (i <- 0 until l2tlbParams.l2Size) { 457 XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n") 458 } 459 XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 460 XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 461 462 VecInit(hitVecT).suggestName(s"l2_hitVecT") 463 VecInit(hitVec).suggestName(s"l2_hitVec") 464 465 // synchronize with other entries with RegEnable 466 (RegEnable(hit, stageDelay(1).fire), 467 RegEnable(hitPPN, stageDelay(1).fire), 468 RegEnable(hitPbmt, stageDelay(1).fire), 469 RegEnable(hitPre, stageDelay(1).fire)) 470 } 471 472 // l1 473 val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets) 474 val (l1Hit, l1HitPPN, l1HitPbmt, l1Pre, l1eccError) = { 475 val ridx = genPtwL1SetIdx(vpn_search) 476 l1.io.r.req.valid := stageReq.fire 477 l1.io.r.req.bits.apply(setIdx = ridx) 478 val vVec_req = getl1vSet(vpn_search) 479 val hVec_req = getl1hSet(vpn_search) 480 481 // delay one cycle after sram read 482 val delay_vpn = stageDelay(0).bits.req_info.vpn 483 val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq( 484 allStage -> onlyStage1, 485 onlyStage1 -> onlyStage1, 486 onlyStage2 -> onlyStage2 487 )) 488 val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle) 489 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 490 val hVec_delay = RegEnable(hVec_req, stageReq.fire) 491 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) => 492 wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)}) 493 494 // check hit and ecc 495 val check_vpn = stageCheck(0).bits.req_info.vpn 496 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 497 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools 498 499 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 500 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 501 val hitWayData = hitWayEntry.entries 502 val hit = ParallelOR(hitVec) 503 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W))) 504 val eccError = WireInit(false.B) 505 if (l2tlbParams.enablePTWECC) { 506 eccError := hitWayEntry.decode() 507 } else { 508 eccError := false.B 509 } 510 511 ridx.suggestName(s"l1_ridx") 512 ramDatas.suggestName(s"l1_ramDatas") 513 hitVec.suggestName(s"l1_hitVec") 514 hitWayData.suggestName(s"l1_hitWayData") 515 hitWay.suggestName(s"l1_hitWay") 516 517 when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) } 518 519 l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 520 XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n") 521 for (i <- 0 until l2tlbParams.l1nWays) { 522 XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)} l1v:${vVec(i)} hit:${hit}\n") 523 } 524 XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n") 525 526 (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.pbmts(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError) 527 } 528 val te = ClockGate.genTeSink 529 val l0_masked_clock = ClockGate(te.cgen, stageReq.fire | (!flush_dup(0) && refill.levelOH.l0) | mbistPlL0.map(_.mbist.req).getOrElse(false.B), clock) 530 val l1_masked_clock = ClockGate(te.cgen, stageReq.fire | (!flush_dup(1) && refill.levelOH.l1) | mbistPlL1.map(_.mbist.req).getOrElse(false.B), clock) 531 l0.clock := l0_masked_clock 532 l1.clock := l1_masked_clock 533 // l0 534 val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets) 535 val (l0Hit, l0HitData, l0Pre, l0eccError, l0HitWay, l0BitmapCheckResult, l0JmpBitmapCheck) = { 536 val ridx = genPtwL0SetIdx(vpn_search) 537 l0.io.r.req.valid := stageReq.fire 538 l0.io.r.req.bits.apply(setIdx = ridx) 539 val vVec_req = getl0vSet(vpn_search) 540 val hVec_req = getl0hSet(vpn_search) 541 542 // delay one cycle after sram read 543 val delay_vpn = stageDelay(0).bits.req_info.vpn 544 val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq( 545 allStage -> onlyStage1, 546 onlyStage1 -> onlyStage1, 547 onlyStage2 -> onlyStage2 548 )) 549 val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle) 550 val vVec_delay = RegEnable(vVec_req, stageReq.fire) 551 val hVec_delay = RegEnable(hVec_req, stageReq.fire) 552 val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) => 553 wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)}) 554 555 // check hit and ecc 556 val check_vpn = stageCheck(0).bits.req_info.vpn 557 val ramDatas = RegEnable(data_resp, stageDelay(1).fire) 558 val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools 559 560 val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire) 561 val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas) 562 val hitWayData = hitWayEntry.entries 563 val hitWayEcc = hitWayEntry.ecc 564 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W))) 565 566 val ishptw = RegEnable(stageDelay(0).bits.isHptwReq,stageDelay(1).fire) 567 val s2x_info = RegEnable(stageDelay(0).bits.req_info.s2xlate,stageDelay(1).fire) 568 val pte_index = RegEnable(stageDelay(0).bits.req_info.vpn(sectortlbwidth - 1, 0),stageDelay(1).fire) 569 val jmp_bitmap_check = WireInit(false.B) 570 val hit = WireInit(false.B) 571 val l0bitmapreg = WireInit((VecInit(Seq.fill(l2tlbParams.l0nWays)(VecInit(Seq.fill(tlbcontiguous)(0.U(1.W))))))) 572 if (HasBitmapCheck) { 573 l0bitmapreg := RegEnable(RegNext(l0BitmapReg(ridx)), stageDelay(1).fire) 574 // cause llptw will trigger bitmapcheck 575 // add a coniditonal logic 576 // (s2x_info =/= allStage || ishptw) 577 hit := Mux(bitmapEnable && (s2x_info =/= allStage || ishptw), ParallelOR(hitVec) && l0bitmapreg(hitWay)(pte_index) === 1.U, ParallelOR(hitVec)) 578 when (bitmapEnable && (s2x_info =/= allStage || ishptw) && ParallelOR(hitVec) && l0bitmapreg(hitWay)(pte_index) === 0.U) { 579 jmp_bitmap_check := true.B 580 } 581 } else { 582 hit := ParallelOR(hitVec) 583 } 584 val eccError = WireInit(false.B) 585 if (l2tlbParams.enablePTWECC) { 586 eccError := hitWayEntry.decode() 587 } else { 588 eccError := false.B 589 } 590 591 when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) } 592 593 l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle } 594 XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n") 595 for (i <- 0 until l2tlbParams.l0nWays) { 596 XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)} l0v:${vVec(i)} hit:${hitVec(i)}\n") 597 } 598 XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n") 599 600 ridx.suggestName(s"l0_ridx") 601 ramDatas.suggestName(s"l0_ramDatas") 602 hitVec.suggestName(s"l0_hitVec") 603 hitWay.suggestName(s"l0_hitWay") 604 605 (hit, hitWayData, hitWayData.prefetch, eccError, UIntToOH(hitWay), l0bitmapreg(hitWay), jmp_bitmap_check) 606 } 607 val l0HitPPN = l0HitData.ppns 608 val l0HitPbmt = l0HitData.pbmts 609 val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle))) 610 val l0HitValid = VecInit(l0HitData.onlypf.map(!_)) 611 val l0Ptes = WireInit(VecInit(Seq.fill(tlbcontiguous)(0.U(XLEN.W)))) // L0 lavel Page Table Entry Vector 612 val l0cfs = WireInit(VecInit(Seq.fill(tlbcontiguous)(false.B))) // L0 lavel Bitmap Check Failed Vector 613 if (HasBitmapCheck) { 614 for (i <- 0 until tlbcontiguous) { 615 l0Ptes(i) := Cat(l0HitData.pbmts(i).asUInt,l0HitPPN(i), 0.U(2.W),l0HitPerm(i).asUInt,l0HitValid(i).asUInt) 616 l0cfs(i) := !l0BitmapCheckResult(i) 617 } 618 } 619 620 // super page 621 val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize) 622 val (spHit, spHitData, spPre, spValid, spJmpBitmapCheck) = { 623 val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, allType = true, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) } 624 val hitVec = hitVecT.map(RegEnable(_, stageReq.fire)) 625 val hitData = ParallelPriorityMux(hitVec zip sp) 626 val ishptw = RegEnable(stageReq.bits.isHptwReq, stageReq.fire) 627 val s2x_info = RegEnable(stageReq.bits.req_info.s2xlate, stageReq.fire) 628 val jmp_bitmap_check = WireInit(false.B) 629 val hit = WireInit(false.B) 630 if (HasBitmapCheck) { 631 hit := Mux(bitmapEnable && (s2x_info =/= allStage || ishptw), ParallelOR(hitVec) && spBitmapReg(OHToUInt(hitVec)) === 1.U, ParallelOR(hitVec)) 632 when (bitmapEnable && (s2x_info =/= allStage || ishptw) && ParallelOR(hitVec) && spBitmapReg(OHToUInt(hitVec)) === 0.U) { 633 jmp_bitmap_check := true.B 634 } 635 } else { 636 hit := ParallelOR(hitVec) 637 } 638 639 when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) } 640 641 spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle } 642 for (i <- 0 until l2tlbParams.spSize) { 643 XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n") 644 } 645 XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n") 646 647 VecInit(hitVecT).suggestName(s"sp_hitVecT") 648 VecInit(hitVec).suggestName(s"sp_hitVec") 649 650 (RegEnable(hit, stageDelay(1).fire), 651 RegEnable(hitData, stageDelay(1).fire), 652 RegEnable(hitData.prefetch, stageDelay(1).fire), 653 RegEnable(hitData.v, stageDelay(1).fire), 654 RegEnable(jmp_bitmap_check, stageDelay(1).fire)) 655 } 656 val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle)) 657 val spHitLevel = spHitData.level.getOrElse(0.U) 658 val spPte = Cat(spHitData.pbmt.asUInt,spHitData.ppn, 0.U(2.W), spHitPerm.asUInt,spHitData.v.asUInt) // Super-page Page Table Entry 659 660 val check_res = Wire(new PageCacheRespBundle) 661 check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get, l3HitPbmt.get)) 662 check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, l2HitPbmt) 663 check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, l1HitPbmt, ecc = l1eccError) 664 check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPbmt, l0HitPerm, l0eccError, valid = l0HitValid, jmp_bitmap_check = l0JmpBitmapCheck, hitway = l0HitWay, ptes = l0Ptes, cfs = l0cfs) 665 check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitData.pbmt, spHitData.n.getOrElse(0.U), spHitPerm, false.B, spHitLevel, spValid, spJmpBitmapCheck, spPte) 666 667 val resp_res = Reg(new PageCacheRespBundle) 668 when (stageCheck(1).fire) { resp_res := check_res } 669 670 // stageResp bypass 671 val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool())) 672 bypassed.indices.foreach(i => 673 bypassed(i) := stageResp.bits.bypassed(i) || 674 ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate), 675 OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid) 676 ) 677 678 // stageResp bypass to hptw 679 val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool())) 680 hptw_bypassed.indices.foreach(i => 681 hptw_bypassed(i) := stageResp.bits.bypassed(i) || 682 ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate), 683 io.resp.fire) 684 ) 685 686 val isAllStage = stageResp.bits.req_info.s2xlate === allStage 687 val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2 688 val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage 689 val idx = stageResp.bits.req_info.vpn(2, 0) 690 val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v) 691 io.resp.bits.req_info := stageResp.bits.req_info 692 io.resp.bits.isFirst := stageResp.bits.isFirst 693 io.resp.bits.hit := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf) 694 if (EnableSv48) { 695 io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage 696 } else { 697 io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage 698 } 699 io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit 700 io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq) 701 io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq 702 io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq 703 io.resp.bits.toFsm.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn)) 704 io.resp.bits.toFsm.stage1Hit := stage1Hit 705 if (HasBitmapCheck) { 706 io.resp.bits.toFsm.bitmapCheck.get.jmp_bitmap_check := resp_res.l0.bitmapCheck.get.jmp_bitmap_check || resp_res.sp.bitmapCheck.get.jmp_bitmap_check 707 io.resp.bits.toFsm.bitmapCheck.get.toLLPTW := resp_res.l0.bitmapCheck.get.jmp_bitmap_check && (stageResp.bits.req_info.s2xlate === noS2xlate || stageResp.bits.req_info.s2xlate === onlyStage1) 708 io.resp.bits.toFsm.bitmapCheck.get.hitway := resp_res.l0.bitmapCheck.get.hitway 709 io.resp.bits.toFsm.bitmapCheck.get.pte := resp_res.sp.bitmapCheck.get.pte 710 io.resp.bits.toFsm.bitmapCheck.get.ptes := resp_res.l0.bitmapCheck.get.ptes 711 io.resp.bits.toFsm.bitmapCheck.get.cfs := resp_res.l0.bitmapCheck.get.cfs 712 io.resp.bits.toFsm.bitmapCheck.get.SPlevel := resp_res.sp.level 713 } 714 715 io.resp.bits.isHptwReq := stageResp.bits.isHptwReq 716 if (EnableSv48) { 717 io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq 718 } else { 719 io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq 720 } 721 io.resp.bits.toHptw.id := stageResp.bits.hptwId 722 io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq) 723 io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq 724 io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq 725 io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0) 726 io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn 727 io.resp.bits.toHptw.resp.entry.asid := DontCare 728 io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.vmid) 729 io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level)) 730 io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source) 731 io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0) 732 io.resp.bits.toHptw.resp.entry.pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(idx), resp_res.sp.pbmt) 733 io.resp.bits.toHptw.resp.entry.n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U)) 734 io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm)) 735 io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v) 736 io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v 737 io.resp.bits.toHptw.resp.gaf := false.B 738 if (HasBitmapCheck) { 739 io.resp.bits.toHptw.bitmapCheck.get.jmp_bitmap_check := resp_res.l0.bitmapCheck.get.jmp_bitmap_check || resp_res.sp.bitmapCheck.get.jmp_bitmap_check 740 io.resp.bits.toHptw.bitmapCheck.get.hitway := resp_res.l0.bitmapCheck.get.hitway 741 io.resp.bits.toHptw.bitmapCheck.get.pte := resp_res.sp.bitmapCheck.get.pte 742 io.resp.bits.toHptw.bitmapCheck.get.ptes := resp_res.l0.bitmapCheck.get.ptes 743 io.resp.bits.toHptw.bitmapCheck.get.cfs := resp_res.l0.bitmapCheck.get.cfs 744 io.resp.bits.toHptw.bitmapCheck.get.fromSP := resp_res.sp.bitmapCheck.get.jmp_bitmap_check 745 io.resp.bits.toHptw.bitmapCheck.get.SPlevel := resp_res.sp.level 746 } 747 748 io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3)) 749 io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare 750 io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.vmid)) 751 if (EnableSv48) { 752 io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U, 753 Mux(resp_res.sp.hit, resp_res.sp.level, 754 Mux(resp_res.l1.hit, 1.U, 755 Mux(resp_res.l2.hit, 2.U, 3.U)))))) 756 } else { 757 io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U, 758 Mux(resp_res.sp.hit, resp_res.sp.level, 759 Mux(resp_res.l1.hit, 1.U, 2.U))))) 760 } 761 io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source)) 762 for (i <- 0 until tlbcontiguous) { 763 if (EnableSv48) { 764 io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth), 765 Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth), 766 Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth), 767 Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth), 768 resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth))))) 769 io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0), 770 Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0), 771 Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0), 772 Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0), 773 resp_res.l3.get.ppn(sectortlbwidth - 1, 0))))) 774 io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i), 775 Mux(resp_res.sp.hit, resp_res.sp.v, 776 Mux(resp_res.l1.hit, resp_res.l1.v, 777 Mux(resp_res.l2.hit, resp_res.l2.v, 778 resp_res.l3.get.v)))) 779 } else { 780 io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth), 781 Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth), 782 Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth), 783 resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth)))) 784 io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0), 785 Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0), 786 Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0), 787 resp_res.l2.ppn(sectortlbwidth - 1, 0)))) 788 io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i), 789 Mux(resp_res.sp.hit, resp_res.sp.v, 790 Mux(resp_res.l1.hit, resp_res.l1.v, 791 resp_res.l2.v))) 792 } 793 io.resp.bits.stage1.entry(i).pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(i), 794 Mux(resp_res.sp.hit, resp_res.sp.pbmt, 795 Mux(resp_res.l1.hit, resp_res.l1.pbmt, 796 resp_res.l2.pbmt))) 797 io.resp.bits.stage1.entry(i).n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U)) 798 io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i), Mux(resp_res.sp.hit, resp_res.sp.perm, 0.U.asTypeOf(new PtePermBundle)))) 799 io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v 800 io.resp.bits.stage1.entry(i).af := false.B 801 io.resp.bits.stage1.entry(i).cf := l0cfs(i) // L0 lavel Bitmap Check Failed Vector 802 } 803 io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools 804 io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B) 805 io.resp.bits.stage1.not_merge := false.B 806 io.resp.valid := stageResp.valid 807 XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit") 808 809 // refill Perf 810 val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None 811 val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool())) 812 val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool())) 813 val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool())) 814 val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 815 l3RefillPerf.map(_.map(_ := false.B)) 816 l2RefillPerf.map(_ := false.B) 817 l1RefillPerf.map(_ := false.B) 818 l0RefillPerf.map(_ := false.B) 819 spRefillPerf.map(_ := false.B) 820 821 // refill 822 l1.io.w.req <> DontCare 823 l0.io.w.req <> DontCare 824 l1.io.w.req.valid := false.B 825 l0.io.w.req.valid := false.B 826 827 val memRdata = refill.ptes 828 val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle)) 829 val memSelData = io.refill.bits.sel_pte_dup 830 val memPte = memSelData.map(a => a.asTypeOf(new PteBundle)) 831 val mPBMTE = io.csr.mPBMTE 832 val hPBMTE = io.csr.hPBMTE 833 val pbmte = Mux(refill.req_info_dup(0).s2xlate === onlyStage1 || refill.req_info_dup(0).s2xlate === allStage, hPBMTE, mPBMTE) 834 835 def Tran2D(flushMask: UInt): Vec[UInt] = { 836 val tran2D = Wire(Vec(l2tlbParams.l0nSets,UInt(l2tlbParams.l0nWays.W))) 837 for (i <- 0 until l2tlbParams.l0nSets) { 838 tran2D(i) := flushMask((i + 1) * l2tlbParams.l0nWays - 1, i * l2tlbParams.l0nWays) 839 } 840 tran2D 841 } 842 def updateL0BitmapReg(l0BitmapReg: Vec[Vec[Vec[UInt]]], tran2D: Vec[UInt]) = { 843 for (i <- 0 until l2tlbParams.l0nSets) { 844 for (j <- 0 until l2tlbParams.l0nWays) { 845 when (tran2D(i)(j) === 0.U) { 846 for (k <- 0 until tlbcontiguous) { 847 l0BitmapReg(i)(j)(k) := 0.U 848 } 849 } 850 } 851 } 852 } 853 def TranVec(flushMask: UInt): Vec[UInt] = { 854 val vec = Wire(Vec(l2tlbParams.spSize,UInt(1.W))) 855 for (i <- 0 until l2tlbParams.spSize) { 856 vec(i) := flushMask(i) 857 } 858 vec 859 } 860 def updateSpBitmapReg(spBitmapReg: Vec[UInt], vec : Vec[UInt]) = { 861 for (i <- 0 until l2tlbParams.spSize) { 862 spBitmapReg(i) := spBitmapReg(i) & vec(i) 863 } 864 } 865 866 // TODO: handle sfenceLatch outsize 867 if (EnableSv48) { 868 val l3Refill = 869 !flush_dup(2) && 870 refill.levelOH.l3.get && 871 !memPte(2).isLeaf() && 872 memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode) 873 val l3RefillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way).suggestName(s"l3_refillIdx") 874 val l3RfOH = UIntToOH(l3RefillIdx).asUInt.suggestName(s"l3_rfOH") 875 when (l3Refill) { 876 l3.get(l3RefillIdx).refill( 877 refill.req_info_dup(2).vpn, 878 Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), 879 io.csr_dup(2).hgatp.vmid, 880 memSelData(2), 881 3.U, 882 refill_prefetch_dup(2) 883 ) 884 ptwl2replace.access(l3RefillIdx) 885 l3v.get := l3v.get | l3RfOH 886 l3g.get := (l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U) 887 l3h.get(l3RefillIdx) := refill_h(2) 888 889 for (i <- 0 until l2tlbParams.l3Size) { 890 l3RefillPerf.get(i) := i.U === l3RefillIdx 891 } 892 } 893 XSDebug(l3Refill, p"[l3 refill] refillIdx:${l3RefillIdx} refillEntry:${l3.get(l3RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n") 894 XSDebug(l3Refill, p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | l3RfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U))}\n") 895 } 896 897 // L2 refill 898 val l2Refill = 899 !flush_dup(2) && 900 refill.levelOH.l2 && 901 !memPte(2).isLeaf() && 902 memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode) 903 val l2RefillIdx = replaceWrapper(l2v, ptwl2replace.way).suggestName(s"l2_refillIdx") 904 val l2RfOH = UIntToOH(l2RefillIdx).asUInt.suggestName(s"l2_rfOH") 905 when ( 906 l2Refill 907 ) { 908 l2(l2RefillIdx).refill( 909 refill.req_info_dup(2).vpn, 910 Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), 911 io.csr_dup(2).hgatp.vmid, 912 memSelData(2), 913 2.U, 914 refill_prefetch_dup(2) 915 ) 916 ptwl2replace.access(l2RefillIdx) 917 l2v := l2v | l2RfOH 918 l2g := (l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U) 919 l2h(l2RefillIdx) := refill_h(2) 920 921 for (i <- 0 until l2tlbParams.l2Size) { 922 l2RefillPerf(i) := i.U === l2RefillIdx 923 } 924 } 925 XSDebug(l2Refill, p"[l2 refill] refillIdx:${l2RefillIdx} refillEntry:${l2(l2RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n") 926 XSDebug(l2Refill, p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | l2RfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U))}\n") 927 928 // L1 refill 929 val l1Refill = !flush_dup(1) && refill.levelOH.l1 930 val l1RefillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn).suggestName(s"l1_refillIdx") 931 val l1VictimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(l1RefillIdx)).suggestName(s"l1_victimWay") 932 val l1VictimWayOH = UIntToOH(l1VictimWay).suggestName(s"l1_victimWayOH") 933 val l1RfvOH = UIntToOH(Cat(l1RefillIdx, l1VictimWay)).asUInt.suggestName(s"l1_rfvOH") 934 val l1Wdata = Wire(l1EntryType) 935 val l1Wvpn = refill.req_info_dup(1).vpn 936 val l1Wasid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid) 937 l1Wdata.gen( 938 vpn = l1Wvpn, 939 asid = l1Wasid, 940 vmid = io.csr_dup(1).hgatp.vmid, 941 data = memRdata, 942 levelUInt = 1.U, 943 refill_prefetch_dup(1), 944 refill.req_info_dup(1).s2xlate, 945 pbmte, 946 io.csr_dup(1).vsatp.mode 947 ) 948 when (l1Refill) { 949 l1.io.w.apply( 950 valid = true.B, 951 setIdx = l1RefillIdx, 952 data = l1Wdata, 953 waymask = l1VictimWayOH 954 ) 955 ptwl1replace.access(l1RefillIdx, l1VictimWay) 956 l1v := l1v | l1RfvOH 957 l1g := l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U) 958 l1h(l1RefillIdx)(l1VictimWay) := refill_h(1) 959 l1asids(l1RefillIdx)(l1VictimWay) := XORFold(l1Wasid, l2tlbParams.hashAsidWidth) 960 l1vmids(l1RefillIdx)(l1VictimWay) := XORFold(io.csr_dup(1).hgatp.vmid, l2tlbParams.hashAsidWidth) 961 962 for (i <- 0 until l2tlbParams.l1nWays) { 963 l1RefillPerf(i) := i.U === l1VictimWay 964 } 965 } 966 XSDebug(l1Refill, p"[l1 refill] refillIdx:0x${Hexadecimal(l1RefillIdx)} victimWay:${l1VictimWay} victimWayOH:${Binary(l1VictimWayOH)} rfvOH(in UInt):${Cat(l1RefillIdx, l1VictimWay)}\n") 967 XSDebug(l1Refill, p"[l1 refill] refilldata:0x${l1Wdata}\n") 968 XSDebug(l1Refill, p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | l1RfvOH)}\n") 969 XSDebug(l1Refill, p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U))}\n") 970 971 // L0 refill 972 val l0Refill = !flush_dup(0) && refill.levelOH.l0 && !memPte(0).isNapot(refill.level_dup(0)) 973 val l0RefillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn).suggestName(s"l0_refillIdx") 974 val l0VictimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(l0RefillIdx)).suggestName(s"l0_victimWay") 975 val l0VictimWayOH = UIntToOH(l0VictimWay).asUInt.suggestName(s"l0_victimWayOH") 976 val l0RfvOH = UIntToOH(Cat(l0RefillIdx, l0VictimWay)).suggestName(s"l0_rfvOH") 977 val l0Wdata = Wire(l0EntryType) 978 // trans the l0 way info, for late wakeup logic 979 if (HasBitmapCheck) { 980 io.l0_way_info.get := l0VictimWayOH 981 } 982 val l0Wvpn = refill.req_info_dup(0).vpn 983 val l0Wasid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid) 984 l0Wdata.gen( 985 vpn = l0Wvpn, 986 asid = l0Wasid, 987 vmid = io.csr_dup(0).hgatp.vmid, 988 data = memRdata, 989 levelUInt = 0.U, 990 refill_prefetch_dup(0), 991 refill.req_info_dup(0).s2xlate, 992 pbmte, 993 io.csr_dup(0).vsatp.mode 994 ) 995 when (l0Refill) { 996 l0.io.w.apply( 997 valid = true.B, 998 setIdx = l0RefillIdx, 999 data = l0Wdata, 1000 waymask = l0VictimWayOH 1001 ) 1002 ptwl0replace.access(l0RefillIdx, l0VictimWay) 1003 l0v := l0v | l0RfvOH 1004 l0g := l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U) 1005 l0h(l0RefillIdx)(l0VictimWay) := refill_h(0) 1006 if (HasBitmapCheck) {updateL0BitmapReg(l0BitmapReg, Tran2D(~l0RfvOH))} 1007 l0asids(l0RefillIdx)(l0VictimWay) := XORFold(l0Wasid, l2tlbParams.hashAsidWidth) 1008 l0vmids(l0RefillIdx)(l0VictimWay) := XORFold(io.csr_dup(0).hgatp.vmid, l2tlbParams.hashAsidWidth) 1009 l0vpns(l0RefillIdx)(l0VictimWay) := XORFold(l0Wvpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth) 1010 1011 for (i <- 0 until l2tlbParams.l0nWays) { 1012 l0RefillPerf(i) := i.U === l0VictimWay 1013 } 1014 } 1015 XSDebug(l0Refill, p"[l0 refill] refillIdx:0x${Hexadecimal(l0RefillIdx)} victimWay:${l0VictimWay} victimWayOH:${Binary(l0VictimWayOH)} rfvOH(in UInt):${Cat(l0RefillIdx, l0VictimWay)}\n") 1016 XSDebug(l0Refill, p"[l0 refill] refilldata:0x${l0Wdata}\n") 1017 XSDebug(l0Refill, p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | l0RfvOH)}\n") 1018 XSDebug(l0Refill, p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U))}\n") 1019 1020 1021 // misc entries: super & invalid 1022 val spRefill = 1023 !flush_dup(0) && 1024 (refill.levelOH.sp || (refill.levelOH.l0 && memPte(0).isNapot(refill.level_dup(0)))) && 1025 ((memPte(0).isLeaf() && memPte(0).canRefill(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte, io.csr_dup(0).vsatp.mode)) || 1026 memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte)) 1027 val spRefillIdx = spreplace.way.suggestName(s"sp_refillIdx") // LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU 1028 val spRfOH = UIntToOH(spRefillIdx).asUInt.suggestName(s"sp_rfOH") 1029 when (spRefill) { 1030 sp(spRefillIdx).refill( 1031 refill.req_info_dup(0).vpn, 1032 Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), 1033 io.csr_dup(0).hgatp.vmid, 1034 memSelData(0), 1035 refill.level_dup(0), 1036 refill_prefetch_dup(0), 1037 !memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte) 1038 ) 1039 spreplace.access(spRefillIdx) 1040 spv := spv | spRfOH 1041 spg := spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U) 1042 sph(spRefillIdx) := refill_h(0) 1043 if (HasBitmapCheck) {updateSpBitmapReg(spBitmapReg, TranVec(~spRfOH))} 1044 1045 for (i <- 0 until l2tlbParams.spSize) { 1046 spRefillPerf(i) := i.U === spRefillIdx 1047 } 1048 } 1049 XSDebug(spRefill, p"[sp refill] refillIdx:${spRefillIdx} refillEntry:${sp(spRefillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n") 1050 XSDebug(spRefill, p"[sp refill] spv:${Binary(spv)}->${Binary(spv | spRfOH)} spg:${Binary(spg)}->${Binary(spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U))}\n") 1051 1052 val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B) 1053 val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B) 1054 val eccVpn = stageResp.bits.req_info.vpn 1055 1056 XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage") 1057 XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage") 1058 when (l1eccFlush) { 1059 val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn)) 1060 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt 1061 l1v := l1v & ~flushMask 1062 l1g := l1g & ~flushMask 1063 } 1064 1065 when (l0eccFlush) { 1066 val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn)) 1067 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt 1068 l0v := l0v & ~flushMask 1069 l0g := l0g & ~flushMask 1070 } 1071 1072 // sfence logic 1073 val l0hashAsid = XORFold(sfence_dup(0).bits.id, l2tlbParams.hashAsidWidth) 1074 val l1hashAsid = XORFold(sfence_dup(1).bits.id, l2tlbParams.hashAsidWidth) 1075 val l0asidhit = VecInit(l0asids.flatMap(_.map(_ === l0hashAsid))).asUInt 1076 val l1asidhit = VecInit(l1asids.flatMap(_.map(_ === l1hashAsid))).asUInt 1077 val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt 1078 val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt 1079 1080 val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv 1081 when (sfence_valid) { 1082 val l0hashVmid = XORFold(io.csr_dup(0).hgatp.vmid, l2tlbParams.hashAsidWidth) 1083 val l1hashVmid = XORFold(io.csr_dup(1).hgatp.vmid, l2tlbParams.hashAsidWidth) 1084 val l0vmidhit = VecInit(l0vmids.flatMap(_.map(_ === l0hashVmid))).asUInt 1085 val l1vmidhit = VecInit(l1vmids.flatMap(_.map(_ === l1hashVmid))).asUInt 1086 val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1087 val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt 1088 1089 val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt 1090 val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt 1091 val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt 1092 val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt 1093 val l0virthit = l0hhit & VecInit(l0vmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt 1094 val l1virthit = l1hhit & VecInit(l1vmidhit.asBools.map{a => io.csr_dup(1).priv.virt && a || !io.csr_dup(1).priv.virt}).asUInt 1095 val l2virthit = l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt 1096 val spvirthit = sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt 1097 1098 val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 1099 val l0hashVpn = XORFold(sfence_vpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth) 1100 val l0vpnhit = VecInit(l0vpns.flatMap(_.map(_ === l0hashVpn))).asUInt 1101 val l0flushSetIdx = UIntToOH(genPtwL0SetIdx(sfence_vpn)) 1102 val l0flushMask = VecInit(l0flushSetIdx.asBools.map{a => Fill(l2tlbParams.l0nWays, a.asUInt)}).asUInt 1103 1104 when (sfence_dup(0).bits.rs1/*va*/) { 1105 when (sfence_dup(0).bits.rs2) { 1106 // all va && all asid 1107 l0v := l0v & ~l0virthit 1108 l1v := l1v & ~l1virthit 1109 l2v := l2v & ~l2virthit 1110 spv := spv & ~spvirthit 1111 } .otherwise { 1112 // all va && specific asid except global 1113 l0v := l0v & ~(l0virthit & ~l0g & l0asidhit) 1114 l1v := l1v & ~(l1virthit & ~l1g & l1asidhit) 1115 l2v := l2v & ~(l2virthit & ~l2g & l2asidhit) 1116 spv := spv & ~(spvirthit & ~spg & spasidhit) 1117 } 1118 } .otherwise { 1119 when (sfence_dup(0).bits.rs2) { 1120 // specific leaf of addr && all asid 1121 l0v := l0v & ~(l0virthit & l0vpnhit & l0flushMask) 1122 spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt) 1123 } .otherwise { 1124 // specific leaf of addr && specific asid 1125 l0v := l0v & ~(l0virthit & ~l0g & l0asidhit & l0vpnhit & l0flushMask) 1126 spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = io.csr_dup(0).priv.virt))).asUInt) 1127 } 1128 } 1129 } 1130 1131 val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv 1132 when (hfencev_valid) { 1133 val l0hashVmid = XORFold(io.csr_dup(0).hgatp.vmid, l2tlbParams.hashAsidWidth) 1134 val l1hashVmid = XORFold(io.csr_dup(1).hgatp.vmid, l2tlbParams.hashAsidWidth) 1135 val l0vmidhit = VecInit(l0vmids.flatMap(_.map(_ === l0hashVmid))).asUInt 1136 val l1vmidhit = VecInit(l1vmids.flatMap(_.map(_ === l1hashVmid))).asUInt 1137 val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1138 val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt 1139 1140 val l0hhit = VecInit(l0h.flatMap(_.map(_ === onlyStage1))).asUInt 1141 val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt 1142 val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt 1143 val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt 1144 1145 val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen) 1146 val l0hashVpn = XORFold(hfencev_vpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth) 1147 val l0vpnhit = VecInit(l0vpns.flatMap(_.map(_ === l0hashVpn))).asUInt 1148 val l0flushSetIdx = UIntToOH(genPtwL0SetIdx(hfencev_vpn)) 1149 val l0flushMask = VecInit(l0flushSetIdx.asBools.map{a => Fill(l2tlbParams.l0nWays, a.asUInt)}).asUInt 1150 1151 when(sfence_dup(0).bits.rs1) { 1152 when(sfence_dup(0).bits.rs2) { 1153 l0v := l0v & ~(l0hhit & l0vmidhit) 1154 l1v := l1v & ~(l1hhit & l1vmidhit) 1155 l2v := l2v & ~(l2hhit & l2vmidhit) 1156 spv := spv & ~(sphhit & spvmidhit) 1157 }.otherwise { 1158 l0v := l0v & ~(l0hhit & l0vmidhit & ~l0g & l0asidhit) 1159 l1v := l1v & ~(l1hhit & l1vmidhit & ~l1g & l1asidhit) 1160 l2v := l2v & ~(l2hhit & l2vmidhit & ~l2g & l2asidhit) 1161 spv := spv & ~(sphhit & spvmidhit & ~spg & spasidhit) 1162 } 1163 }.otherwise { 1164 when(sfence_dup(0).bits.rs2) { 1165 l0v := l0v & ~(l0hhit & l0vmidhit & l0vpnhit & l0flushMask) 1166 spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = true.B))).asUInt) 1167 }.otherwise { 1168 l0v := l0v & ~(l0hhit & l0vmidhit & ~l0g & l0asidhit & l0vpnhit & l0flushMask) 1169 spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = true.B))).asUInt) 1170 } 1171 } 1172 } 1173 1174 1175 val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg 1176 when(hfenceg_valid) { 1177 val l0hashVmid = XORFold(sfence_dup(0).bits.id, l2tlbParams.hashAsidWidth) 1178 val l1hashVmid = XORFold(sfence_dup(1).bits.id, l2tlbParams.hashAsidWidth) 1179 val l0vmidhit = VecInit(l0vmids.flatMap(_.map(_ === l0hashVmid))).asUInt 1180 val l1vmidhit = VecInit(l1vmids.flatMap(_.map(_ === l1hashVmid))).asUInt 1181 val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt 1182 val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt 1183 1184 val l0hhit = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt 1185 val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt 1186 val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt 1187 val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt 1188 1189 val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen) 1190 val l0hashVpn = XORFold(hfenceg_gvpn(vpnLen - 1, vpnLen - PtwL0TagLen), l2tlbParams.hashVpnWidth) 1191 val l0vpnhit = VecInit(l0vpns.flatMap(_.map(_ === l0hashVpn))).asUInt 1192 val l0flushSetIdx = UIntToOH(genPtwL0SetIdx(hfenceg_gvpn)) 1193 val l0flushMask = VecInit(l0flushSetIdx.asBools.map{a => Fill(l2tlbParams.l0nWays, a.asUInt)}).asUInt 1194 1195 when(sfence_dup(0).bits.rs1) { 1196 when(sfence_dup(0).bits.rs2) { 1197 l0v := l0v & ~l0hhit 1198 l1v := l1v & ~l1hhit 1199 l2v := l2v & ~l2hhit 1200 spv := spv & ~sphhit 1201 }.otherwise { 1202 l0v := l0v & ~(l0hhit & l0vmidhit) 1203 l1v := l1v & ~(l1hhit & l1vmidhit) 1204 l2v := l2v & ~(l2hhit & l2vmidhit) 1205 spv := spv & ~(sphhit & spvmidhit) 1206 } 1207 }.otherwise { 1208 when(sfence_dup(0).bits.rs2) { 1209 l0v := l0v & ~(l0hhit & l0vpnhit & l0flushMask) 1210 spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt) 1211 }.otherwise { 1212 l0v := l0v & ~(l0hhit & l0vmidhit & l0vpnhit & l0flushMask) 1213 spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt) 1214 } 1215 } 1216 } 1217 1218 if (EnableSv48) { 1219 val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt 1220 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1221 val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt 1222 1223 when (sfence_valid) { 1224 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1225 val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt 1226 val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen) 1227 1228 when (sfence_dup(2).bits.rs1/*va*/) { 1229 when (sfence_dup(2).bits.rs2) { 1230 // all va && all asid 1231 l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)) 1232 } .otherwise { 1233 // all va && specific asid except global 1234 l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)) 1235 } 1236 } 1237 } 1238 1239 when (hfencev_valid) { 1240 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt 1241 val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt 1242 val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen) 1243 when(sfence_dup(2).bits.rs1) { 1244 when(sfence_dup(2).bits.rs2) { 1245 l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit)) 1246 }.otherwise { 1247 l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit)) 1248 } 1249 } 1250 } 1251 1252 when (hfenceg_valid) { 1253 val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt 1254 val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt 1255 val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen) 1256 when(sfence_dup(2).bits.rs1) { 1257 when(sfence_dup(2).bits.rs2) { 1258 l3v.map(_ := l3v.get & ~l3hhit) 1259 }.otherwise { 1260 l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit)) 1261 } 1262 } 1263 } 1264 } 1265 1266 def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = { 1267 in.ready := !in.valid || out.ready 1268 out.valid := in.valid 1269 out.bits := in.bits 1270 out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) => 1271 val bypassed_reg = Reg(Bool()) 1272 val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid 1273 when (inFire) { bypassed_reg := bypassed_wire } 1274 .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire } 1275 1276 b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire)) 1277 } 1278 } 1279 1280 // Perf Count 1281 val resp_l0 = resp_res.l0.hit 1282 val resp_sp = resp_res.sp.hit 1283 val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None 1284 val resp_l2_pre = resp_res.l2.pre 1285 val resp_l1_pre = resp_res.l1.pre 1286 val resp_l0_pre = resp_res.l0.pre 1287 val resp_sp_pre = resp_res.sp.pre 1288 val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire 1289 XSPerfAccumulate("access", base_valid_access_0) 1290 if (EnableSv48) { 1291 XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1292 } 1293 XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1294 XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1295 XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0) 1296 XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp) 1297 XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit) 1298 1299 if (EnableSv48) { 1300 XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1301 } 1302 XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1303 XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1304 XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0) 1305 XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp) 1306 XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1307 1308 val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire 1309 XSPerfAccumulate("pre_access", base_valid_access_1) 1310 if (EnableSv48) { 1311 XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1312 } 1313 XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1314 XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1315 XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0) 1316 XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp) 1317 XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit) 1318 1319 if (EnableSv48) { 1320 XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1321 } 1322 XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1323 XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1324 XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0) 1325 XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp) 1326 XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1327 1328 val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire 1329 XSPerfAccumulate("access_first", base_valid_access_2) 1330 if (EnableSv48) { 1331 XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1332 } 1333 XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1334 XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1335 XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0) 1336 XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp) 1337 XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit) 1338 1339 if (EnableSv48) { 1340 XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1341 } 1342 XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1343 XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1344 XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0) 1345 XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp) 1346 XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1347 1348 val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire 1349 XSPerfAccumulate("pre_access_first", base_valid_access_3) 1350 if (EnableSv48) { 1351 XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1352 } 1353 XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1354 XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1355 XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0) 1356 XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp) 1357 XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit) 1358 1359 if (EnableSv48) { 1360 XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1361 } 1362 XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1363 XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit) 1364 XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0) 1365 XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp) 1366 XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit) 1367 1368 XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready) 1369 XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready) 1370 if (EnableSv48) { 1371 l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) } 1372 } 1373 l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) } 1374 l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) } 1375 l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) } 1376 spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) } 1377 if (EnableSv48) { 1378 l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) } 1379 } 1380 l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) } 1381 l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) } 1382 l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) } 1383 spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) } 1384 1385 if (EnableSv48) { 1386 XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR) 1387 } 1388 XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR) 1389 XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR) 1390 XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR) 1391 XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR) 1392 if (EnableSv48) { 1393 XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0)) 1394 } 1395 XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0)) 1396 XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0)) 1397 XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0)) 1398 XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0)) 1399 1400 // debug 1401 XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n") 1402 if (EnableSv48) { 1403 XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n") 1404 } 1405 XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n") 1406 XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n") 1407 XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n") 1408 XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n") 1409 XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n") 1410 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n") 1411 if (EnableSv48) { 1412 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n") 1413 } 1414 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n") 1415 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n") 1416 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n") 1417 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n") 1418 XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n") 1419 1420 val perfEvents = Seq( 1421 ("access ", base_valid_access_0 ), 1422 ("l2_hit ", l2Hit ), 1423 ("l1_hit ", l1Hit ), 1424 ("l0_hit ", l0Hit ), 1425 ("sp_hit ", spHit ), 1426 ("pte_hit ", l0Hit || spHit ), 1427 ("rwHarzad ", io.req.valid && !io.req.ready ), 1428 ("out_blocked ", io.resp.valid && !io.resp.ready ), 1429 ) 1430 generatePerfEvent() 1431} 1432