xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/TLB.scala (revision 6d5ddbce72c9c67dcf0ec08cc682a9545ceb5f6c)
1*6d5ddbceSLemover/***************************************************************************************
2*6d5ddbceSLemover* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3*6d5ddbceSLemover*
4*6d5ddbceSLemover* XiangShan is licensed under Mulan PSL v2.
5*6d5ddbceSLemover* You can use this software according to the terms and conditions of the Mulan PSL v2.
6*6d5ddbceSLemover* You may obtain a copy of Mulan PSL v2 at:
7*6d5ddbceSLemover*          http://license.coscl.org.cn/MulanPSL2
8*6d5ddbceSLemover*
9*6d5ddbceSLemover* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
10*6d5ddbceSLemover* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
11*6d5ddbceSLemover* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
12*6d5ddbceSLemover*
13*6d5ddbceSLemover* See the Mulan PSL v2 for more details.
14*6d5ddbceSLemover***************************************************************************************/
15*6d5ddbceSLemover
16*6d5ddbceSLemoverpackage xiangshan.cache.mmu
17*6d5ddbceSLemover
18*6d5ddbceSLemoverimport chipsalliance.rocketchip.config.Parameters
19*6d5ddbceSLemoverimport chisel3._
20*6d5ddbceSLemoverimport chisel3.util._
21*6d5ddbceSLemoverimport xiangshan._
22*6d5ddbceSLemoverimport utils._
23*6d5ddbceSLemoverimport xiangshan.backend.roq.RoqPtr
24*6d5ddbceSLemoverimport xiangshan.backend.fu.util.HasCSRConst
25*6d5ddbceSLemover
26*6d5ddbceSLemover
27*6d5ddbceSLemover
28*6d5ddbceSLemoverclass TLB(Width: Int, isDtlb: Boolean)(implicit p: Parameters) extends TlbModule with HasCSRConst{
29*6d5ddbceSLemover  val io = IO(new TlbIO(Width))
30*6d5ddbceSLemover
31*6d5ddbceSLemover  val req    = io.requestor.map(_.req)
32*6d5ddbceSLemover  val resp   = io.requestor.map(_.resp)
33*6d5ddbceSLemover  val ptw    = io.ptw
34*6d5ddbceSLemover
35*6d5ddbceSLemover  val sfence = io.sfence
36*6d5ddbceSLemover  val csr    = io.csr
37*6d5ddbceSLemover  val satp   = csr.satp
38*6d5ddbceSLemover  val priv   = csr.priv
39*6d5ddbceSLemover  val ifecth = if (isDtlb) false.B else true.B
40*6d5ddbceSLemover  val mode   = if (isDtlb) priv.dmode else priv.imode
41*6d5ddbceSLemover  // val vmEnable = satp.mode === 8.U // && (mode < ModeM) // FIXME: fix me when boot xv6/linux...
42*6d5ddbceSLemover  val vmEnable = if(EnbaleTlbDebug) (satp.mode === 8.U)
43*6d5ddbceSLemover                 else               (satp.mode === 8.U && (mode < ModeM))
44*6d5ddbceSLemover
45*6d5ddbceSLemover  val reqAddr = req.map(_.bits.vaddr.asTypeOf(vaBundle))
46*6d5ddbceSLemover  val cmd     = req.map(_.bits.cmd)
47*6d5ddbceSLemover  val valid   = req.map(_.valid)
48*6d5ddbceSLemover
49*6d5ddbceSLemover  def widthMapSeq[T <: Seq[Data]](f: Int => T) = (0 until Width).map(f)
50*6d5ddbceSLemover  def widthMap[T <: Data](f: Int => T) = (0 until Width).map(f)
51*6d5ddbceSLemover
52*6d5ddbceSLemover  // Normal page && Super page
53*6d5ddbceSLemover  val nv = RegInit(VecInit(Seq.fill(TlbEntrySize)(false.B)))
54*6d5ddbceSLemover  val nMeta = Module(new CAMTemplate(UInt(vpnLen.W), TlbEntrySize, Width + 1)).io
55*6d5ddbceSLemover  val nData = Reg(Vec(TlbEntrySize, new TlbData(false)))
56*6d5ddbceSLemover  val sv = RegInit(VecInit(Seq.fill(TlbSPEntrySize)(false.B)))
57*6d5ddbceSLemover  val sMeta = Reg(Vec(TlbSPEntrySize, new TlbSPMeta))
58*6d5ddbceSLemover  val sData = Reg(Vec(TlbSPEntrySize, new TlbData(true)))
59*6d5ddbceSLemover  val v = nv ++ sv
60*6d5ddbceSLemover  val data = nData ++ sData
61*6d5ddbceSLemover  val g = VecInit(data.map(_.perm.g))
62*6d5ddbceSLemover  val pf = VecInit(data.zip(v).map{ case(e, vi) => e.perm.pf & vi })
63*6d5ddbceSLemover
64*6d5ddbceSLemover  /**
65*6d5ddbceSLemover    * PTW refill
66*6d5ddbceSLemover    */
67*6d5ddbceSLemover  val refill = ptw.resp.fire() && !sfence.valid
68*6d5ddbceSLemover
69*6d5ddbceSLemover  val normalReplacer = if (isDtlb) Some("random") else Some("plru")
70*6d5ddbceSLemover  val superReplacer = if (isDtlb) Some("random") else Some("plru")
71*6d5ddbceSLemover  val nReplace = ReplacementPolicy.fromString(normalReplacer, TlbEntrySize)
72*6d5ddbceSLemover  val sReplace = ReplacementPolicy.fromString(superReplacer, TlbSPEntrySize)
73*6d5ddbceSLemover  val nRefillIdx = replaceWrapper(nv, nReplace.way)
74*6d5ddbceSLemover  val sRefillIdx = replaceWrapper(sv, sReplace.way)
75*6d5ddbceSLemover
76*6d5ddbceSLemover  nMeta.w := DontCare
77*6d5ddbceSLemover  nMeta.w.valid := false.B
78*6d5ddbceSLemover  when (refill) {
79*6d5ddbceSLemover    val resp = ptw.resp.bits
80*6d5ddbceSLemover    when (resp.entry.level.getOrElse(0.U) === 2.U) {
81*6d5ddbceSLemover      val refillIdx = nRefillIdx
82*6d5ddbceSLemover      refillIdx.suggestName(s"NormalRefillIdx")
83*6d5ddbceSLemover
84*6d5ddbceSLemover      nv(refillIdx) := true.B
85*6d5ddbceSLemover      nMeta.w.bits.index := nRefillIdx
86*6d5ddbceSLemover      nMeta.w.bits.data  := resp.entry.tag
87*6d5ddbceSLemover      nMeta.w.valid := true.B
88*6d5ddbceSLemover      nData(refillIdx).apply(
89*6d5ddbceSLemover        ppn   = resp.entry.ppn,
90*6d5ddbceSLemover        level = resp.entry.level.getOrElse(0.U),
91*6d5ddbceSLemover        perm  = VecInit(resp.entry.perm.getOrElse(0.U)).asUInt,
92*6d5ddbceSLemover        pf    = resp.pf
93*6d5ddbceSLemover      )
94*6d5ddbceSLemover      nReplace.access(nRefillIdx)
95*6d5ddbceSLemover      XSDebug(p"Refill normal: idx:${refillIdx} entry:${resp.entry} pf:${resp.pf}\n")
96*6d5ddbceSLemover    }.otherwise {
97*6d5ddbceSLemover      val refillIdx = sRefillIdx
98*6d5ddbceSLemover      refillIdx.suggestName(s"SuperRefillIdx")
99*6d5ddbceSLemover
100*6d5ddbceSLemover      val dup = Cat(sv.zip(sMeta).map{ case (v, m) =>
101*6d5ddbceSLemover        v && m.hit(resp.entry.tag)
102*6d5ddbceSLemover      }).orR // NOTE: may have long latency, RegNext it
103*6d5ddbceSLemover
104*6d5ddbceSLemover      when (!dup) {
105*6d5ddbceSLemover        sv(refillIdx) := true.B
106*6d5ddbceSLemover        sMeta(refillIdx).apply(
107*6d5ddbceSLemover          vpn = resp.entry.tag,
108*6d5ddbceSLemover          level = resp.entry.level.getOrElse(0.U)
109*6d5ddbceSLemover        )
110*6d5ddbceSLemover        sData(refillIdx).apply(
111*6d5ddbceSLemover          ppn   = resp.entry.ppn,
112*6d5ddbceSLemover          level = resp.entry.level.getOrElse(0.U),
113*6d5ddbceSLemover          perm  = VecInit(resp.entry.perm.getOrElse(0.U)).asUInt,
114*6d5ddbceSLemover          pf    = resp.pf
115*6d5ddbceSLemover        )
116*6d5ddbceSLemover        sReplace.access(sRefillIdx)
117*6d5ddbceSLemover        XSDebug(p"Refill superpage: idx:${refillIdx} entry:${resp.entry} pf:${resp.pf}\n")
118*6d5ddbceSLemover      }
119*6d5ddbceSLemover    }
120*6d5ddbceSLemover  }
121*6d5ddbceSLemover
122*6d5ddbceSLemover  /**
123*6d5ddbceSLemover    * L1 TLB read
124*6d5ddbceSLemover    */
125*6d5ddbceSLemover  val sfenceVpn = sfence.bits.addr.asTypeOf(vaBundle).vpn
126*6d5ddbceSLemover  for (i <- 0 until Width) {
127*6d5ddbceSLemover    nMeta.r.req(i) := io.requestor(i).req.bits.vaddr.asTypeOf(vaBundle).vpn
128*6d5ddbceSLemover  }
129*6d5ddbceSLemover  nMeta.r.req(Width) := sfenceVpn
130*6d5ddbceSLemover
131*6d5ddbceSLemover  val nRefillMask = Mux(refill, UIntToOH(nRefillIdx)(TlbEntrySize-1, 0), 0.U).asBools
132*6d5ddbceSLemover  val sRefillMask = Mux(refill, UIntToOH(sRefillIdx)(TlbSPEntrySize-1, 0), 0.U).asBools
133*6d5ddbceSLemover  def TLBNormalRead(i: Int) = {
134*6d5ddbceSLemover    val entryHitVec = (
135*6d5ddbceSLemover      if (isDtlb)
136*6d5ddbceSLemover        VecInit(nMeta.r.resp(i).zip(nRefillMask).map{ case (e, m) => ~m && e } ++
137*6d5ddbceSLemover                sMeta.zip(sRefillMask).map{ case (e,m) => ~m && e.hit(reqAddr(i).vpn) })
138*6d5ddbceSLemover      else
139*6d5ddbceSLemover        VecInit(nMeta.r.resp(i) ++ sMeta.map(_.hit(reqAddr(i).vpn/*, satp.asid*/)))
140*6d5ddbceSLemover    )
141*6d5ddbceSLemover
142*6d5ddbceSLemover    val reqAddrReg = if (isDtlb) RegNext(reqAddr(i)) else reqAddr(i)
143*6d5ddbceSLemover    val cmdReg = if (isDtlb) RegNext(cmd(i)) else cmd(i)
144*6d5ddbceSLemover    val validReg = if (isDtlb) RegNext(valid(i)) else valid(i)
145*6d5ddbceSLemover    val entryHitVecReg = if (isDtlb) RegNext(entryHitVec) else entryHitVec
146*6d5ddbceSLemover    entryHitVecReg.suggestName(s"entryHitVecReg_${i}")
147*6d5ddbceSLemover
148*6d5ddbceSLemover    val hitVec  = VecInit((v zip entryHitVecReg).map{ case (a,b) => a&b })
149*6d5ddbceSLemover    val pfHitVec   = VecInit((pf zip entryHitVecReg).map{ case (a,b) => a&b })
150*6d5ddbceSLemover    val pfArray = ParallelOR(pfHitVec).asBool && validReg && vmEnable
151*6d5ddbceSLemover    val hit     = ParallelOR(hitVec).asBool && validReg && vmEnable && ~pfArray
152*6d5ddbceSLemover    val miss    = !hit && validReg && vmEnable && ~pfArray
153*6d5ddbceSLemover    val hitppn  = ParallelMux(hitVec zip data.map(_.genPPN(reqAddrReg.vpn)))
154*6d5ddbceSLemover    val hitPerm = ParallelMux(hitVec zip data.map(_.perm))
155*6d5ddbceSLemover
156*6d5ddbceSLemover    hitVec.suggestName(s"hitVec_${i}")
157*6d5ddbceSLemover    pfHitVec.suggestName(s"pfHitVec_${i}")
158*6d5ddbceSLemover    hit.suggestName(s"hit_${i}")
159*6d5ddbceSLemover    miss.suggestName(s"miss_${i}")
160*6d5ddbceSLemover    hitppn.suggestName(s"hitppn_${i}")
161*6d5ddbceSLemover    hitPerm.suggestName(s"hitPerm_${i}")
162*6d5ddbceSLemover
163*6d5ddbceSLemover    if (!isDtlb) { // NOTE: only support one access
164*6d5ddbceSLemover      val hitVecUInt = hitVec.asUInt
165*6d5ddbceSLemover      XSDebug(hitVecUInt.orR, p"HitVecUInt:${Hexadecimal(hitVecUInt)}\n")
166*6d5ddbceSLemover      when (Cat(hitVecUInt(TlbEntrySize-1, 0)).orR && validReg && vmEnable) {
167*6d5ddbceSLemover        nReplace.access(OHToUInt(hitVecUInt(TlbEntrySize-1, 0)))
168*6d5ddbceSLemover        XSDebug(p"Normal Page Access: ${Hexadecimal(OHToUInt(hitVecUInt(TlbEntrySize-1, 0)))}\n")
169*6d5ddbceSLemover      }
170*6d5ddbceSLemover      when (Cat(hitVecUInt(TlbEntrySize + TlbSPEntrySize - 1, TlbEntrySize)).orR && validReg && vmEnable) {
171*6d5ddbceSLemover        sReplace.access(OHToUInt(hitVecUInt(TlbEntrySize + TlbSPEntrySize - 1, TlbEntrySize)))
172*6d5ddbceSLemover        XSDebug(p"Super Page Access: ${Hexadecimal(OHToUInt(hitVecUInt(TlbEntrySize + TlbSPEntrySize - 1, TlbEntrySize)))}\n")
173*6d5ddbceSLemover      }
174*6d5ddbceSLemover    }
175*6d5ddbceSLemover
176*6d5ddbceSLemover    XSDebug(valid(i), p"(${i.U}) entryHit:${Hexadecimal(entryHitVec.asUInt)}\n")
177*6d5ddbceSLemover    XSDebug(validReg, p"(${i.U}) entryHitReg:${Hexadecimal(entryHitVecReg.asUInt)} hitVec:${Hexadecimal(hitVec.asUInt)} pfHitVec:${Hexadecimal(pfHitVec.asUInt)} pfArray:${Hexadecimal(pfArray.asUInt)} hit:${hit} miss:${miss} hitppn:${Hexadecimal(hitppn)} hitPerm:${hitPerm}\n")
178*6d5ddbceSLemover
179*6d5ddbceSLemover    // resp  // TODO: A/D has not being concerned
180*6d5ddbceSLemover    val paddr = Cat(hitppn, reqAddrReg.off)
181*6d5ddbceSLemover    val vaddr = SignExt(req(i).bits.vaddr, PAddrBits)
182*6d5ddbceSLemover
183*6d5ddbceSLemover    req(i).ready := resp(i).ready
184*6d5ddbceSLemover    resp(i).valid := validReg
185*6d5ddbceSLemover    resp(i).bits.paddr := Mux(vmEnable, paddr, if (isDtlb) RegNext(vaddr) else vaddr)
186*6d5ddbceSLemover    resp(i).bits.miss := miss
187*6d5ddbceSLemover    resp(i).bits.ptwBack := io.ptw.resp.fire()
188*6d5ddbceSLemover
189*6d5ddbceSLemover    val perm = hitPerm // NOTE: given the excp, the out module choose one to use?
190*6d5ddbceSLemover    val update = false.B && hit && (!hitPerm.a || !hitPerm.d && TlbCmd.isWrite(cmdReg)) // update A/D through exception
191*6d5ddbceSLemover    val modeCheck = !(mode === ModeU && !perm.u || mode === ModeS && perm.u && (!priv.sum || ifecth))
192*6d5ddbceSLemover    val ldPf = (pfArray && TlbCmd.isRead(cmdReg) && true.B /*!isAMO*/) || hit && !(modeCheck && (perm.r || priv.mxr && perm.x)) && (TlbCmd.isRead(cmdReg) && true.B/*!isAMO*/) // TODO: handle isAMO
193*6d5ddbceSLemover    val stPf = (pfArray && TlbCmd.isWrite(cmdReg) || false.B /*isAMO*/ ) || hit && !(modeCheck && perm.w) && (TlbCmd.isWrite(cmdReg) || false.B/*TODO isAMO. */)
194*6d5ddbceSLemover    val instrPf = (pfArray && TlbCmd.isExec(cmdReg)) || hit && !(modeCheck && perm.x) && TlbCmd.isExec(cmdReg)
195*6d5ddbceSLemover    resp(i).bits.excp.pf.ld    := ldPf || update
196*6d5ddbceSLemover    resp(i).bits.excp.pf.st    := stPf || update
197*6d5ddbceSLemover    resp(i).bits.excp.pf.instr := instrPf || update
198*6d5ddbceSLemover
199*6d5ddbceSLemover    // if vmenable, use pre-calcuated pma check result
200*6d5ddbceSLemover    resp(i).bits.mmio := Mux(TlbCmd.isExec(cmdReg), !perm.pi, !perm.pd)
201*6d5ddbceSLemover    resp(i).bits.excp.af.ld    := Mux(TlbCmd.isAtom(cmdReg), !perm.pa, !perm.pr) && TlbCmd.isRead(cmdReg)
202*6d5ddbceSLemover    resp(i).bits.excp.af.st    := Mux(TlbCmd.isAtom(cmdReg), !perm.pa, !perm.pw) && TlbCmd.isWrite(cmdReg)
203*6d5ddbceSLemover    resp(i).bits.excp.af.instr := Mux(TlbCmd.isAtom(cmdReg), false.B, !perm.pe)
204*6d5ddbceSLemover
205*6d5ddbceSLemover    // if !vmenable, check pma
206*6d5ddbceSLemover    val (pmaMode, accessWidth) = AddressSpace.memmapAddrMatch(resp(i).bits.paddr)
207*6d5ddbceSLemover    when(!vmEnable){
208*6d5ddbceSLemover      resp(i).bits.mmio := Mux(TlbCmd.isExec(cmdReg), !PMAMode.icache(pmaMode), !PMAMode.dcache(pmaMode))
209*6d5ddbceSLemover      resp(i).bits.excp.af.ld    := Mux(TlbCmd.isAtom(cmdReg), !PMAMode.atomic(pmaMode), !PMAMode.read(pmaMode)) && TlbCmd.isRead(cmdReg)
210*6d5ddbceSLemover      resp(i).bits.excp.af.st    := Mux(TlbCmd.isAtom(cmdReg), !PMAMode.atomic(pmaMode), !PMAMode.write(pmaMode)) && TlbCmd.isWrite(cmdReg)
211*6d5ddbceSLemover      resp(i).bits.excp.af.instr := Mux(TlbCmd.isAtom(cmdReg), false.B, !PMAMode.execute(pmaMode))
212*6d5ddbceSLemover    }
213*6d5ddbceSLemover
214*6d5ddbceSLemover    // TODO: MMIO check
215*6d5ddbceSLemover
216*6d5ddbceSLemover    (hit, miss, hitVec, validReg)
217*6d5ddbceSLemover  }
218*6d5ddbceSLemover
219*6d5ddbceSLemover  val readResult = (0 until Width).map(TLBNormalRead(_))
220*6d5ddbceSLemover  val hitVec = readResult.map(res => res._1)
221*6d5ddbceSLemover  val missVec = readResult.map(res => res._2)
222*6d5ddbceSLemover  val hitVecVec = readResult.map(res => res._3)
223*6d5ddbceSLemover  val validRegVec = readResult.map(res => res._4)
224*6d5ddbceSLemover
225*6d5ddbceSLemover  for (i <- 0 until Width) {
226*6d5ddbceSLemover    io.ptw.req(i).valid := validRegVec(i) && missVec(i) && !RegNext(refill)
227*6d5ddbceSLemover    io.ptw.req(i).bits.vpn := RegNext(reqAddr(i).vpn)
228*6d5ddbceSLemover  }
229*6d5ddbceSLemover  io.ptw.resp.ready := true.B
230*6d5ddbceSLemover
231*6d5ddbceSLemover  // val tooManyPf = PopCount(pf) > 5.U
232*6d5ddbceSLemover  // when (tooManyPf) { // when too much pf, just clear
233*6d5ddbceSLemover  //   XSDebug(p"Too many pf just flush all the pf v:${Hexadecimal(VecInit(v).asUInt)} pf:${Hexadecimal(pf.asUInt)}\n")
234*6d5ddbceSLemover  //   v.zipWithIndex.map{ case (a, i) => a := a & !pf(i) }
235*6d5ddbceSLemover  // }
236*6d5ddbceSLemover
237*6d5ddbceSLemover  // sfence (flush)
238*6d5ddbceSLemover  val sfenceHit = nMeta.r.resp(Width) ++ sMeta.map(_.hit(sfenceVpn))
239*6d5ddbceSLemover  when (sfence.valid) {
240*6d5ddbceSLemover    when (sfence.bits.rs1) { // virtual address *.rs1 <- (rs1===0.U)
241*6d5ddbceSLemover      when (sfence.bits.rs2) { // asid, but i do not want to support asid, *.rs2 <- (rs2===0.U)
242*6d5ddbceSLemover        // all addr and all asid
243*6d5ddbceSLemover        v.map(_ := false.B)
244*6d5ddbceSLemover      }.otherwise {
245*6d5ddbceSLemover        // all addr but specific asid
246*6d5ddbceSLemover        v.zipWithIndex.map{ case (a,i) => a := a & g(i) }
247*6d5ddbceSLemover      }
248*6d5ddbceSLemover    }.otherwise {
249*6d5ddbceSLemover      when (sfence.bits.rs2) {
250*6d5ddbceSLemover        // specific addr but all asid
251*6d5ddbceSLemover        v.zipWithIndex.map{ case (a,i) => a := a & !sfenceHit(i) }
252*6d5ddbceSLemover      }.otherwise {
253*6d5ddbceSLemover        // specific addr and specific asid
254*6d5ddbceSLemover        v.zipWithIndex.map{ case (a,i) => a := a & !sfenceHit(i) && !g(i) }
255*6d5ddbceSLemover      }
256*6d5ddbceSLemover    }
257*6d5ddbceSLemover  }
258*6d5ddbceSLemover
259*6d5ddbceSLemover  if (isDtlb) {
260*6d5ddbceSLemover    for (i <- 0 until Width) {
261*6d5ddbceSLemover      XSPerfAccumulate("first_access" + Integer.toString(i, 10), validRegVec(i) && vmEnable && RegNext(req(i).bits.debug.isFirstIssue))
262*6d5ddbceSLemover      XSPerfAccumulate("access" + Integer.toString(i, 10), validRegVec(i) && vmEnable)
263*6d5ddbceSLemover    }
264*6d5ddbceSLemover    for (i <- 0 until Width) {
265*6d5ddbceSLemover      XSPerfAccumulate("first_miss" + Integer.toString(i, 10), validRegVec(i) && vmEnable && missVec(i) && RegNext(req(i).bits.debug.isFirstIssue))
266*6d5ddbceSLemover      XSPerfAccumulate("miss" + Integer.toString(i, 10), validRegVec(i) && vmEnable && missVec(i))
267*6d5ddbceSLemover    }
268*6d5ddbceSLemover  } else {
269*6d5ddbceSLemover    // NOTE: ITLB is blocked, so every resp will be valid only when hit
270*6d5ddbceSLemover    // every req will be ready only when hit
271*6d5ddbceSLemover    XSPerfAccumulate("access", io.requestor(0).req.fire() && vmEnable)
272*6d5ddbceSLemover    XSPerfAccumulate("miss", ptw.req(0).fire())
273*6d5ddbceSLemover  }
274*6d5ddbceSLemover  //val reqCycleCnt = Reg(UInt(16.W))
275*6d5ddbceSLemover  //reqCycleCnt := reqCycleCnt + BoolStopWatch(ptw.req(0).fire(), ptw.resp.fire || sfence.valid)
276*6d5ddbceSLemover  //XSPerfAccumulate("ptw_req_count", ptw.req.fire())
277*6d5ddbceSLemover  //XSPerfAccumulate("ptw_req_cycle", Mux(ptw.resp.fire(), reqCycleCnt, 0.U))
278*6d5ddbceSLemover  XSPerfAccumulate("ptw_resp_count", ptw.resp.fire())
279*6d5ddbceSLemover  XSPerfAccumulate("ptw_resp_pf_count", ptw.resp.fire() && ptw.resp.bits.pf)
280*6d5ddbceSLemover  for (i <- 0 until TlbEntrySize) {
281*6d5ddbceSLemover    val indexHitVec = hitVecVec.zip(validRegVec).map{ case (h, v) => h(i) && v }
282*6d5ddbceSLemover    XSPerfAccumulate(s"NormalAccessIndex${i}", Mux(vmEnable, PopCount(indexHitVec), 0.U))
283*6d5ddbceSLemover  }
284*6d5ddbceSLemover  for (i <- 0 until TlbSPEntrySize) {
285*6d5ddbceSLemover    val indexHitVec = hitVecVec.zip(validRegVec).map{ case (h, v) => h(i + TlbEntrySize) && v }
286*6d5ddbceSLemover    XSPerfAccumulate(s"SuperAccessIndex${i}", Mux(vmEnable, PopCount(indexHitVec), 0.U))
287*6d5ddbceSLemover  }
288*6d5ddbceSLemover  for (i <- 0 until TlbEntrySize) {
289*6d5ddbceSLemover    XSPerfAccumulate(s"NormalRefillIndex${i}", refill && ptw.resp.bits.entry.level.getOrElse(0.U) === 2.U && i.U === nRefillIdx)
290*6d5ddbceSLemover  }
291*6d5ddbceSLemover  for (i <- 0 until TlbSPEntrySize) {
292*6d5ddbceSLemover    XSPerfAccumulate(s"SuperRefillIndex${i}", refill && ptw.resp.bits.entry.level.getOrElse(0.U) =/= 2.U && i.U === sRefillIdx)
293*6d5ddbceSLemover  }
294*6d5ddbceSLemover
295*6d5ddbceSLemover  // Log
296*6d5ddbceSLemover  for(i <- 0 until Width) {
297*6d5ddbceSLemover    XSDebug(req(i).valid, p"req(${i.U}): (${req(i).valid} ${req(i).ready}) ${req(i).bits}\n")
298*6d5ddbceSLemover    XSDebug(resp(i).valid, p"resp(${i.U}): (${resp(i).valid} ${resp(i).ready}) ${resp(i).bits}\n")
299*6d5ddbceSLemover  }
300*6d5ddbceSLemover
301*6d5ddbceSLemover  XSDebug(sfence.valid, p"Sfence: ${sfence}\n")
302*6d5ddbceSLemover  XSDebug(ParallelOR(valid)|| ptw.resp.valid, p"CSR: ${csr}\n")
303*6d5ddbceSLemover  XSDebug(ParallelOR(valid) || ptw.resp.valid, p"vmEnable:${vmEnable} hit:${Binary(VecInit(hitVec).asUInt)} miss:${Binary(VecInit(missVec).asUInt)} v:${Hexadecimal(VecInit(v).asUInt)} pf:${Hexadecimal(pf.asUInt)}\n")
304*6d5ddbceSLemover  for (i <- ptw.req.indices) {
305*6d5ddbceSLemover    XSDebug(ptw.req(i).fire(), p"PTW req:${ptw.req(i).bits}\n")
306*6d5ddbceSLemover  }
307*6d5ddbceSLemover  XSDebug(ptw.resp.valid, p"PTW resp:${ptw.resp.bits} (v:${ptw.resp.valid}r:${ptw.resp.ready}) \n")
308*6d5ddbceSLemover
309*6d5ddbceSLemover//   // NOTE: just for simple tlb debug, comment it after tlb's debug
310*6d5ddbceSLemover  // assert(!io.ptw.resp.valid || io.ptw.resp.bits.entry.tag === io.ptw.resp.bits.entry.ppn, "Simple tlb debug requires vpn === ppn")
311*6d5ddbceSLemover}
312*6d5ddbceSLemover
313*6d5ddbceSLemoverobject TLB {
314*6d5ddbceSLemover  def apply
315*6d5ddbceSLemover  (
316*6d5ddbceSLemover    in: Seq[BlockTlbRequestIO],
317*6d5ddbceSLemover    sfence: SfenceBundle,
318*6d5ddbceSLemover    csr: TlbCsrBundle,
319*6d5ddbceSLemover    width: Int,
320*6d5ddbceSLemover    isDtlb: Boolean,
321*6d5ddbceSLemover    shouldBlock: Boolean
322*6d5ddbceSLemover  )(implicit p: Parameters) = {
323*6d5ddbceSLemover    require(in.length == width)
324*6d5ddbceSLemover
325*6d5ddbceSLemover    val tlb = Module(new TLB(width, isDtlb))
326*6d5ddbceSLemover
327*6d5ddbceSLemover    tlb.io.sfence <> sfence
328*6d5ddbceSLemover    tlb.io.csr <> csr
329*6d5ddbceSLemover
330*6d5ddbceSLemover    if (!shouldBlock) { // dtlb
331*6d5ddbceSLemover      for (i <- 0 until width) {
332*6d5ddbceSLemover        tlb.io.requestor(i) <> in(i)
333*6d5ddbceSLemover        // tlb.io.requestor(i).req.valid := in(i).req.valid
334*6d5ddbceSLemover        // tlb.io.requestor(i).req.bits := in(i).req.bits
335*6d5ddbceSLemover        // in(i).req.ready := tlb.io.requestor(i).req.ready
336*6d5ddbceSLemover
337*6d5ddbceSLemover        // in(i).resp.valid := tlb.io.requestor(i).resp.valid
338*6d5ddbceSLemover        // in(i).resp.bits := tlb.io.requestor(i).resp.bits
339*6d5ddbceSLemover        // tlb.io.requestor(i).resp.ready := in(i).resp.ready
340*6d5ddbceSLemover      }
341*6d5ddbceSLemover    } else { // itlb
342*6d5ddbceSLemover      require(width == 1)
343*6d5ddbceSLemover      tlb.io.requestor(0).req.valid := in(0).req.valid
344*6d5ddbceSLemover      tlb.io.requestor(0).req.bits := in(0).req.bits
345*6d5ddbceSLemover      in(0).req.ready := !tlb.io.requestor(0).resp.bits.miss && in(0).resp.ready && tlb.io.requestor(0).req.ready
346*6d5ddbceSLemover
347*6d5ddbceSLemover      in(0).resp.valid := tlb.io.requestor(0).resp.valid && !tlb.io.requestor(0).resp.bits.miss
348*6d5ddbceSLemover      in(0).resp.bits := tlb.io.requestor(0).resp.bits
349*6d5ddbceSLemover      tlb.io.requestor(0).resp.ready := in(0).resp.ready
350*6d5ddbceSLemover    }
351*6d5ddbceSLemover
352*6d5ddbceSLemover    tlb.io.ptw
353*6d5ddbceSLemover  }
354*6d5ddbceSLemover}
355