1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.cache.mmu 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import chisel3.internal.naming.chiselName 23import xiangshan._ 24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 25import utils._ 26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 27import freechips.rocketchip.tilelink._ 28 29/* ptw cache caches the page table of all the three layers 30 * ptw cache resp at next cycle 31 * the cache should not be blocked 32 * when miss queue if full, just block req outside 33 */ 34class PtwCacheIO()(implicit p: Parameters) extends PtwBundle { 35 val req = Flipped(DecoupledIO(new Bundle { 36 val vpn = UInt(vpnLen.W) 37 val source = UInt(bPtwWidth.W) 38 val isReplay = Bool() 39 })) 40 val resp = DecoupledIO(new Bundle { 41 val source = UInt(bPtwWidth.W) 42 val vpn = UInt(vpnLen.W) 43 val isReplay = Bool() 44 val hit = Bool() 45 val toFsm = new Bundle { 46 val l1Hit = Bool() 47 val l2Hit = Bool() 48 val ppn = UInt(ppnLen.W) 49 } 50 val toTlb = new PtwEntry(tagLen = vpnLen, hasPerm = true, hasLevel = true) 51 }) 52 val refill = Flipped(ValidIO(new Bundle { 53 val ptes = UInt(blockBits.W) 54 val vpn = UInt(vpnLen.W) 55 val level = UInt(log2Up(Level).W) 56 val addr_low = UInt((log2Up(l2tlbParams.blockBytes) - log2Up(XLEN/8)).W) 57 })) 58 val sfence = Input(new SfenceBundle) 59} 60 61 62@chiselName 63class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst { 64 val io = IO(new PtwCacheIO) 65 66 // TODO: four caches make the codes dirty, think about how to deal with it 67 68 val sfence = io.sfence 69 val refill = io.refill.bits 70 71 val first_valid = io.req.valid 72 val first_fire = first_valid && io.req.ready 73 val first_req = io.req.bits 74 val second_ready = Wire(Bool()) 75 val second_valid = ValidHold(first_fire, io.resp.fire(), sfence.valid) 76 val second_req = RegEnable(first_req, first_fire) 77 // NOTE: if ptw cache resp may be blocked, hard to handle refill 78 // when miss queue is full, please to block itlb and dtlb input 79 80 // when refill, refuce to accept new req 81 val rwHarzad = if (sramSinglePort) io.refill.valid else false.B 82 io.req.ready := !rwHarzad && (second_ready || io.req.bits.isReplay) 83 // NOTE: when write, don't ready, whe 84 // when replay, just come in, out make sure resp.fire() 85 86 // l1: level 0 non-leaf pte 87 val l1 = Reg(Vec(l2tlbParams.l1Size, new PtwEntry(tagLen = PtwL1TagLen))) 88 val l1v = RegInit(0.U(l2tlbParams.l1Size.W)) 89 val l1g = Reg(UInt(l2tlbParams.l1Size.W)) 90 91 // l2: level 1 non-leaf pte 92 val l2 = Module(new SRAMTemplate( 93 new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false), 94 set = l2tlbParams.l2nSets, 95 way = l2tlbParams.l2nWays, 96 singlePort = sramSinglePort 97 )) 98 val l2v = RegInit(0.U((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W)) 99 val l2g = Reg(UInt((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W)) 100 def getl2vSet(vpn: UInt) = { 101 require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays)) 102 val set = genPtwL2SetIdx(vpn) 103 require(set.getWidth == log2Up(l2tlbParams.l2nSets)) 104 val l2vVec = l2v.asTypeOf(Vec(l2tlbParams.l2nSets, UInt(l2tlbParams.l2nWays.W))) 105 l2vVec(set) 106 } 107 108 // l3: level 2 leaf pte of 4KB pages 109 val l3 = Module(new SRAMTemplate( 110 new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true), 111 set = l2tlbParams.l3nSets, 112 way = l2tlbParams.l3nWays, 113 singlePort = sramSinglePort 114 )) 115 val l3v = RegInit(0.U((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W)) 116 val l3g = Reg(UInt((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W)) 117 def getl3vSet(vpn: UInt) = { 118 require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays)) 119 val set = genPtwL3SetIdx(vpn) 120 require(set.getWidth == log2Up(l2tlbParams.l3nSets)) 121 val l3vVec = l3v.asTypeOf(Vec(l2tlbParams.l3nSets, UInt(l2tlbParams.l3nWays.W))) 122 l3vVec(set) 123 } 124 125 // sp: level 0/1 leaf pte of 1GB/2MB super pages 126 val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true))) 127 val spv = RegInit(0.U(l2tlbParams.spSize.W)) 128 val spg = Reg(UInt(l2tlbParams.spSize.W)) 129 130 // Access Perf 131 val l1AccessPerf = Wire(Vec(l2tlbParams.l1Size, Bool())) 132 val l2AccessPerf = Wire(Vec(l2tlbParams.l2nWays, Bool())) 133 val l3AccessPerf = Wire(Vec(l2tlbParams.l3nWays, Bool())) 134 val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 135 l1AccessPerf.map(_ := false.B) 136 l2AccessPerf.map(_ := false.B) 137 l3AccessPerf.map(_ := false.B) 138 spAccessPerf.map(_ := false.B) 139 140 // l1 141 val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer, l2tlbParams.l1Size) 142 val (l1Hit, l1HitPPN) = { 143 val hitVecT = l1.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && l1v(i) } 144 val hitVec = hitVecT.map(RegEnable(_, first_fire)) 145 val hitPPN = ParallelPriorityMux(hitVec zip l1.map(_.ppn)) 146 val hit = ParallelOR(hitVec) && second_valid 147 148 when (hit) { ptwl1replace.access(OHToUInt(hitVec)) } 149 150 l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire)} 151 for (i <- 0 until l2tlbParams.l1Size) { 152 XSDebug(first_fire, p"[l1] l1(${i.U}) ${l1(i)} hit:${l1(i).hit(first_req.vpn)}\n") 153 } 154 XSDebug(first_fire, p"[l1] l1v:${Binary(l1v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n") 155 XSDebug(second_valid, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n") 156 157 VecInit(hitVecT).suggestName(s"l1_hitVecT") 158 VecInit(hitVec).suggestName(s"l1_hitVec") 159 160 (hit, hitPPN) 161 } 162 163 // l2 164 val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer,l2tlbParams.l2nWays,l2tlbParams.l2nSets) 165 val (l2Hit, l2HitPPN) = { 166 val ridx = genPtwL2SetIdx(first_req.vpn) 167 val vidx = RegEnable(VecInit(getl2vSet(first_req.vpn).asBools), first_fire) 168 l2.io.r.req.valid := first_fire 169 l2.io.r.req.bits.apply(setIdx = ridx) 170 val ramDatas = l2.io.r.resp.data 171 // val hitVec = VecInit(ramDatas.map{wayData => wayData.hit(first_req.vpn) }) 172 val hitVec = VecInit(ramDatas.zip(vidx).map { case (wayData, v) => wayData.hit(second_req.vpn) && v }) 173 val hitWayData = ParallelPriorityMux(hitVec zip ramDatas) 174 val hit = ParallelOR(hitVec) && second_valid 175 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l2nWays).map(_.U)) 176 177 ridx.suggestName(s"l2_ridx") 178 vidx.suggestName(s"l2_vidx") 179 ramDatas.suggestName(s"l2_ramDatas") 180 hitVec.suggestName(s"l2_hitVec") 181 hitWayData.suggestName(s"l2_hitWayData") 182 hitWay.suggestName(s"l2_hitWay") 183 184 when (hit) { ptwl2replace.access(genPtwL2SetIdx(second_req.vpn), hitWay) } 185 186 l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) } 187 XSDebug(first_fire, p"[l2] ridx:0x${Hexadecimal(ridx)}\n") 188 for (i <- 0 until l2tlbParams.l2nWays) { 189 XSDebug(RegNext(first_fire), p"[l2] ramDatas(${i.U}) ${ramDatas(i)} l2v:${vidx(i)} hit:${ramDatas(i).hit(second_req.vpn)}\n") 190 } 191 XSDebug(second_valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n") 192 193 (hit, hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn))) 194 } 195 196 // l3 197 val ptwl3replace = ReplacementPolicy.fromString(l2tlbParams.l3Replacer,l2tlbParams.l3nWays,l2tlbParams.l3nSets) 198 val (l3Hit, l3HitData) = { 199 val ridx = genPtwL3SetIdx(first_req.vpn) 200 val vidx = RegEnable(VecInit(getl3vSet(first_req.vpn).asBools), first_fire) 201 l3.io.r.req.valid := first_fire 202 l3.io.r.req.bits.apply(setIdx = ridx) 203 val ramDatas = l3.io.r.resp.data 204 val hitVec = VecInit(ramDatas.zip(vidx).map{ case (wayData, v) => wayData.hit(second_req.vpn) && v }) 205 val hitWayData = ParallelPriorityMux(hitVec zip ramDatas) 206 val hit = ParallelOR(hitVec) && second_valid 207 val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l3nWays).map(_.U)) 208 209 when (hit) { ptwl3replace.access(genPtwL3SetIdx(second_req.vpn), hitWay) } 210 211 l3AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) } 212 XSDebug(first_fire, p"[l3] ridx:0x${Hexadecimal(ridx)}\n") 213 for (i <- 0 until l2tlbParams.l3nWays) { 214 XSDebug(RegNext(first_fire), p"[l3] ramDatas(${i.U}) ${ramDatas(i)} l3v:${vidx(i)} hit:${ramDatas(i).hit(second_req.vpn)}\n") 215 } 216 XSDebug(second_valid, p"[l3] l3Hit:${hit} l3HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n") 217 218 ridx.suggestName(s"l3_ridx") 219 vidx.suggestName(s"l3_vidx") 220 ramDatas.suggestName(s"l3_ramDatas") 221 hitVec.suggestName(s"l3_hitVec") 222 hitWay.suggestName(s"l3_hitWay") 223 224 (hit, hitWayData) 225 } 226 val l3HitPPN = l3HitData.ppns(genPtwL3SectorIdx(second_req.vpn)) 227 val l3HitPerm = l3HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL3SectorSize, new PtePermBundle)))(genPtwL3SectorIdx(second_req.vpn)) 228 229 // super page 230 val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize) 231 val (spHit, spHitData) = { 232 val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && spv(i) } 233 val hitVec = hitVecT.map(RegEnable(_, first_fire)) 234 val hitData = ParallelPriorityMux(hitVec zip sp) 235 val hit = ParallelOR(hitVec) && second_valid 236 237 when (hit) { spreplace.access(OHToUInt(hitVec)) } 238 239 spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && RegNext(first_fire) } 240 for (i <- 0 until l2tlbParams.spSize) { 241 XSDebug(first_fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(first_req.vpn)} spv:${spv(i)}\n") 242 } 243 XSDebug(second_valid, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n") 244 245 VecInit(hitVecT).suggestName(s"sp_hitVecT") 246 VecInit(hitVec).suggestName(s"sp_hitVec") 247 248 (hit, hitData) 249 } 250 val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle)) 251 val spHitLevel = spHitData.level.getOrElse(0.U) 252 253 val resp = Wire(io.resp.bits.cloneType) 254 val resp_latch = RegEnable(resp, io.resp.valid && !io.resp.ready) 255 val resp_latch_valid = ValidHold(io.resp.valid && !io.resp.ready, io.resp.ready, sfence.valid) 256 second_ready := !(second_valid || resp_latch_valid) || io.resp.fire() 257 resp.source := second_req.source 258 resp.vpn := second_req.vpn 259 resp.isReplay := second_req.isReplay 260 resp.hit := l3Hit || spHit 261 resp.toFsm.l1Hit := l1Hit 262 resp.toFsm.l2Hit := l2Hit 263 resp.toFsm.ppn := Mux(l2Hit, l2HitPPN, l1HitPPN) 264 resp.toTlb.tag := second_req.vpn 265 resp.toTlb.ppn := Mux(l3Hit, l3HitPPN, spHitData.ppn) 266 resp.toTlb.perm.map(_ := Mux(l3Hit, l3HitPerm, spHitPerm)) 267 resp.toTlb.level.map(_ := Mux(l3Hit, 2.U, spHitLevel)) 268 269 io.resp.valid := second_valid 270 io.resp.bits := Mux(resp_latch_valid, resp_latch, resp) 271 assert(!(l3Hit && spHit), "normal page and super page both hit") 272 273 // refill Perf 274 val l1RefillPerf = Wire(Vec(l2tlbParams.l1Size, Bool())) 275 val l2RefillPerf = Wire(Vec(l2tlbParams.l2nWays, Bool())) 276 val l3RefillPerf = Wire(Vec(l2tlbParams.l3nWays, Bool())) 277 val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool())) 278 l1RefillPerf.map(_ := false.B) 279 l2RefillPerf.map(_ := false.B) 280 l3RefillPerf.map(_ := false.B) 281 spRefillPerf.map(_ := false.B) 282 283 // refill 284 l2.io.w.req <> DontCare 285 l3.io.w.req <> DontCare 286 l2.io.w.req.valid := false.B 287 l3.io.w.req.valid := false.B 288 289 def get_part(data: UInt, index: UInt): UInt = { 290 val inner_data = data.asTypeOf(Vec(data.getWidth / XLEN, UInt(XLEN.W))) 291 inner_data(index) 292 } 293 294 val memRdata = refill.ptes 295 val memSelData = get_part(memRdata, refill.addr_low) 296 val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle)) 297 val memPte = memSelData.asTypeOf(new PteBundle) 298 299 memPte.suggestName("memPte") 300 301 // TODO: handle sfenceLatch outsize 302 when (io.refill.valid && !memPte.isPf(refill.level) && !sfence.valid ) { 303 when (refill.level === 0.U && !memPte.isLeaf()) { 304 // val refillIdx = LFSR64()(log2Up(l2tlbParams.l1Size)-1,0) // TODO: may be LRU 305 val refillIdx = replaceWrapper(l1v, ptwl1replace.way) 306 refillIdx.suggestName(s"PtwL1RefillIdx") 307 val rfOH = UIntToOH(refillIdx) 308 l1(refillIdx).refill(refill.vpn, memSelData) 309 ptwl1replace.access(refillIdx) 310 l1v := l1v | rfOH 311 l1g := (l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U) 312 313 for (i <- 0 until l2tlbParams.l1Size) { 314 l1RefillPerf(i) := i.U === refillIdx 315 } 316 317 XSDebug(p"[l1 refill] refillIdx:${refillIdx} refillEntry:${l1(refillIdx).genPtwEntry(refill.vpn, memSelData)}\n") 318 XSDebug(p"[l1 refill] l1v:${Binary(l1v)}->${Binary(l1v | rfOH)} l1g:${Binary(l1g)}->${Binary((l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U))}\n") 319 320 refillIdx.suggestName(s"l1_refillIdx") 321 rfOH.suggestName(s"l1_rfOH") 322 } 323 324 when (refill.level === 1.U && !memPte.isLeaf()) { 325 val refillIdx = genPtwL2SetIdx(refill.vpn) 326 val victimWay = replaceWrapper(RegEnable(VecInit(getl2vSet(refill.vpn).asBools).asUInt, first_fire), ptwl2replace.way(refillIdx)) 327 val victimWayOH = UIntToOH(victimWay) 328 val rfvOH = UIntToOH(Cat(refillIdx, victimWay)) 329 l2.io.w.apply( 330 valid = true.B, 331 setIdx = refillIdx, 332 data = (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries( 333 vpn = refill.vpn, data = memRdata, levelUInt = 1.U 334 ), 335 waymask = victimWayOH 336 ) 337 ptwl2replace.access(refillIdx, victimWay) 338 l2v := l2v | rfvOH 339 l2g := l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U) 340 341 for (i <- 0 until l2tlbParams.l2nWays) { 342 l2RefillPerf(i) := i.U === victimWay 343 } 344 345 XSDebug(p"[l2 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n") 346 XSDebug(p"[l2 refill] refilldata:0x${ 347 (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries( 348 vpn = refill.vpn, data = memRdata, levelUInt = 1.U) 349 }\n") 350 XSDebug(p"[l2 refill] l2v:${Binary(l2v)} -> ${Binary(l2v | rfvOH)}\n") 351 XSDebug(p"[l2 refill] l2g:${Binary(l2g)} -> ${Binary(l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n") 352 353 refillIdx.suggestName(s"l2_refillIdx") 354 victimWay.suggestName(s"l2_victimWay") 355 victimWayOH.suggestName(s"l2_victimWayOH") 356 rfvOH.suggestName(s"l2_rfvOH") 357 } 358 359 when (refill.level === 2.U && memPte.isLeaf()) { 360 val refillIdx = genPtwL3SetIdx(refill.vpn) 361 val victimWay = replaceWrapper(RegEnable(VecInit(getl3vSet(refill.vpn).asBools).asUInt, first_fire), ptwl3replace.way(refillIdx)) 362 val victimWayOH = UIntToOH(victimWay) 363 val rfvOH = UIntToOH(Cat(refillIdx, victimWay)) 364 l3.io.w.apply( 365 valid = true.B, 366 setIdx = refillIdx, 367 data = (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries( 368 vpn = refill.vpn, data = memRdata, levelUInt = 2.U 369 ), 370 waymask = victimWayOH 371 ) 372 ptwl3replace.access(refillIdx, victimWay) 373 l3v := l3v | rfvOH 374 l3g := l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U) 375 376 for (i <- 0 until l2tlbParams.l3nWays) { 377 l3RefillPerf(i) := i.U === victimWay 378 } 379 380 XSDebug(p"[l3 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n") 381 XSDebug(p"[l3 refill] refilldata:0x${ 382 (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries( 383 vpn = refill.vpn, data = memRdata, levelUInt = 2.U) 384 }\n") 385 XSDebug(p"[l3 refill] l3v:${Binary(l3v)} -> ${Binary(l3v | rfvOH)}\n") 386 XSDebug(p"[l3 refill] l3g:${Binary(l3g)} -> ${Binary(l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n") 387 388 refillIdx.suggestName(s"l3_refillIdx") 389 victimWay.suggestName(s"l3_victimWay") 390 victimWayOH.suggestName(s"l3_victimWayOH") 391 rfvOH.suggestName(s"l3_rfvOH") 392 } 393 when ((refill.level === 0.U || refill.level === 1.U) && memPte.isLeaf()) { 394 val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU 395 val rfOH = UIntToOH(refillIdx) 396 sp(refillIdx).refill(refill.vpn, memSelData, refill.level) 397 spreplace.access(refillIdx) 398 spv := spv | rfOH 399 spg := spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U) 400 401 for (i <- 0 until l2tlbParams.spSize) { 402 spRefillPerf(i) := i.U === refillIdx 403 } 404 405 XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.vpn, memSelData, refill.level)}\n") 406 XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U))}\n") 407 408 refillIdx.suggestName(s"sp_refillIdx") 409 rfOH.suggestName(s"sp_rfOH") 410 } 411 } 412 413 // sfence 414 when (sfence.valid) { 415 when (sfence.bits.rs1/*va*/) { 416 when (sfence.bits.rs2) { 417 // all va && all asid 418 l1v := 0.U 419 l2v := 0.U 420 l3v := 0.U 421 spv := 0.U 422 } .otherwise { 423 // all va && specific asid except global 424 l1v := l1v & l1g 425 l2v := l2v & l2g 426 l3v := l3v & l3g 427 spv := spv & spg 428 } 429 } .otherwise { 430 // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen))) 431 val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen))) 432 // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l3nWays, _.asUInt))).asUInt 433 val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt 434 flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH") 435 flushMask.suggestName(s"sfence_nrs1_flushMask") 436 when (sfence.bits.rs2) { 437 // specific leaf of addr && all asid 438 l3v := l3v & ~flushMask 439 l3g := l3g & ~flushMask 440 } .otherwise { 441 // specific leaf of addr && specific asid 442 l3v := l3v & (~flushMask | l3g) 443 } 444 spv := 0.U 445 } 446 } 447 448 // Perf Count 449 XSPerfAccumulate("access", second_valid) 450 XSPerfAccumulate("l1_hit", l1Hit) 451 XSPerfAccumulate("l2_hit", l2Hit) 452 XSPerfAccumulate("l3_hit", l3Hit) 453 XSPerfAccumulate("sp_hit", spHit) 454 XSPerfAccumulate("pte_hit", l3Hit || spHit) 455 XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready) 456 XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready) 457 l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1AccessIndex${i}", l) } 458 l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2AccessIndex${i}", l) } 459 l3AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3AccessIndex${i}", l) } 460 spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) } 461 l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1RefillIndex${i}", l) } 462 l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2RefillIndex${i}", l) } 463 l3RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3RefillIndex${i}", l) } 464 spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) } 465 466 // debug 467 XSDebug(sfence.valid, p"[sfence] original v and g vector:\n") 468 XSDebug(sfence.valid, p"[sfence] l1v:${Binary(l1v)}\n") 469 XSDebug(sfence.valid, p"[sfence] l2v:${Binary(l2v)}\n") 470 XSDebug(sfence.valid, p"[sfence] l3v:${Binary(l3v)}\n") 471 XSDebug(sfence.valid, p"[sfence] l3g:${Binary(l3g)}\n") 472 XSDebug(sfence.valid, p"[sfence] spv:${Binary(spv)}\n") 473 XSDebug(RegNext(sfence.valid), p"[sfence] new v and g vector:\n") 474 XSDebug(RegNext(sfence.valid), p"[sfence] l1v:${Binary(l1v)}\n") 475 XSDebug(RegNext(sfence.valid), p"[sfence] l2v:${Binary(l2v)}\n") 476 XSDebug(RegNext(sfence.valid), p"[sfence] l3v:${Binary(l3v)}\n") 477 XSDebug(RegNext(sfence.valid), p"[sfence] l3g:${Binary(l3g)}\n") 478 XSDebug(RegNext(sfence.valid), p"[sfence] spv:${Binary(spv)}\n") 479} 480