xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/TLBStorage.scala (revision a1d4b4bfaad30c6cc4e94d9602e3f65b9e9899d7)
1/***************************************************************************************
2  * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3  * Copyright (c) 2020-2021 Peng Cheng Laboratory
4  *
5  * XiangShan is licensed under Mulan PSL v2.
6  * You can use this software according to the terms and conditions of the Mulan PSL v2.
7  * You may obtain a copy of Mulan PSL v2 at:
8  *          http://license.coscl.org.cn/MulanPSL2
9  *
10  * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11  * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12  * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13  *
14  * See the Mulan PSL v2 for more details.
15  ***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import freechips.rocketchip.formal.PropertyClass
25import xiangshan.backend.fu.util.HasCSRConst
26
27import scala.math.min
28
29// For Direct-map TLBs, we do not use it now
30class BankedAsyncDataModuleTemplateWithDup[T <: Data](
31  gen: T,
32  numEntries: Int,
33  numRead: Int,
34  numDup: Int,
35  numBanks: Int
36) extends Module {
37  val io = IO(new Bundle {
38    val raddr = Vec(numRead, Input(UInt(log2Ceil(numEntries).W)))
39    val rdata = Vec(numRead, Vec(numDup, Output(gen)))
40    val wen   = Input(Bool())
41    val waddr = Input(UInt(log2Ceil(numEntries).W))
42    val wdata = Input(gen)
43  })
44  require(numBanks > 1)
45  require(numEntries > numBanks)
46
47  val numBankEntries = numEntries / numBanks
48  def bankOffset(address: UInt): UInt = {
49    address(log2Ceil(numBankEntries) - 1, 0)
50  }
51
52  def bankIndex(address: UInt): UInt = {
53    address(log2Ceil(numEntries) - 1, log2Ceil(numBankEntries))
54  }
55
56  val dataBanks = Seq.tabulate(numBanks)(i => {
57    val bankEntries = if (i < numBanks - 1) numBankEntries else (numEntries - (i * numBankEntries))
58    Mem(bankEntries, gen)
59  })
60
61  // async read, but regnext
62  for (i <- 0 until numRead) {
63    val data_read = Reg(Vec(numDup, Vec(numBanks, gen)))
64    val bank_index = Reg(Vec(numDup, UInt(numBanks.W)))
65    for (j <- 0 until numDup) {
66      bank_index(j) := UIntToOH(bankIndex(io.raddr(i)))
67      for (k <- 0 until numBanks) {
68        data_read(j)(k) := Mux(io.wen && (io.waddr === io.raddr(i)),
69          io.wdata, dataBanks(k)(bankOffset(io.raddr(i))))
70      }
71    }
72    // next cycle
73    for (j <- 0 until numDup) {
74      io.rdata(i)(j) := Mux1H(bank_index(j), data_read(j))
75    }
76  }
77
78  // write
79  for (i <- 0 until numBanks) {
80    when (io.wen && (bankIndex(io.waddr) === i.U)) {
81      dataBanks(i)(bankOffset(io.waddr)) := io.wdata
82    }
83  }
84}
85
86class TLBFA(
87  parentName: String,
88  ports: Int,
89  nDups: Int,
90  nSets: Int,
91  nWays: Int,
92  saveLevel: Boolean = false,
93  normalPage: Boolean,
94  superPage: Boolean
95)(implicit p: Parameters) extends TlbModule with HasPerfEvents {
96
97  val io = IO(new TlbStorageIO(nSets, nWays, ports, nDups))
98  io.r.req.map(_.ready := true.B)
99
100  val v = RegInit(VecInit(Seq.fill(nWays)(false.B)))
101  val entries = Reg(Vec(nWays, new TlbSectorEntry(normalPage, superPage)))
102  val g = entries.map(_.perm.g)
103
104  for (i <- 0 until ports) {
105    val req = io.r.req(i)
106    val resp = io.r.resp(i)
107    val access = io.access(i)
108
109    val vpn = req.bits.vpn
110    val vpn_reg = RegEnable(vpn, req.fire())
111    val vpn_gen_ppn = if(saveLevel) vpn else vpn_reg
112    val hasS2xlate = req.bits.s2xlate =/= noS2xlate
113    val OnlyS2 = req.bits.s2xlate === onlyStage2
114    val OnlyS1 = req.bits.s2xlate === onlyStage1
115    val refill_mask = Mux(io.w.valid, UIntToOH(io.w.bits.wayIdx), 0.U(nWays.W))
116    val hitVec = VecInit((entries.zipWithIndex).zip(v zip refill_mask.asBools).map{
117      case (e, m) => {
118        val s2xlate_hit = e._1.s2xlate === req.bits.s2xlate
119        val hit = e._1.hit(vpn, Mux(hasS2xlate, io.csr.vsatp.asid, io.csr.satp.asid), vmid = io.csr.hgatp.asid, hasS2xlate = hasS2xlate, onlyS2 = OnlyS2, onlyS1 = OnlyS1)
120        s2xlate_hit && hit && m._1 && !m._2
121      }
122    })
123
124    hitVec.suggestName("hitVec")
125
126    val hitVecReg = RegEnable(hitVec, req.fire)
127    // Sector tlb may trigger multi-hit, see def "wbhit"
128    XSPerfAccumulate(s"port${i}_multi_hit", !(!resp.valid || (PopCount(hitVecReg) === 0.U || PopCount(hitVecReg) === 1.U)))
129
130    resp.valid := RegNext(req.valid)
131    resp.bits.hit := Cat(hitVecReg).orR
132    if (nWays == 1) {
133      for (d <- 0 until nDups) {
134        resp.bits.ppn(d) := RegEnable(entries(0).genPPN(saveLevel, req.valid)(vpn), req.fire)
135        resp.bits.perm(d) := RegEnable(entries(0).perm, req.fire)
136      }
137    } else {
138      for (d <- 0 until nDups) {
139        resp.bits.ppn(d) := RegEnable(ParallelMux(hitVec zip entries.map(_.genPPN(saveLevel, req.valid)(vpn))), req.fire)
140        resp.bits.perm(d) := RegEnable(ParallelMux(hitVec zip entries.map(_.perm)), req.fire)
141      }
142    }
143
144    access.sets := get_set_idx(vpn_reg(vpn_reg.getWidth - 1, sectortlbwidth), nSets) // no use
145    access.touch_ways.valid := resp.valid && Cat(hitVecReg).orR
146    access.touch_ways.bits := OHToUInt(hitVecReg)
147
148    resp.bits.hit.suggestName("hit")
149    resp.bits.ppn.suggestName("ppn")
150    resp.bits.perm.suggestName("perm")
151    resp.bits.g_perm.suggestName("g_perm")
152  }
153
154  when (io.w.valid) {
155    v(io.w.bits.wayIdx) := true.B
156    entries(io.w.bits.wayIdx).apply(io.w.bits.data)
157  }
158  // write assert, should not duplicate with the existing entries
159  val w_hit_vec = VecInit(entries.zip(v).map{case (e, vi) => e.wbhit(io.w.bits.data, Mux(io.w.bits.data.s2xlate =/= noS2xlate, io.csr.vsatp.asid, io.csr.satp.asid), s2xlate = io.w.bits.data.s2xlate) && vi })
160  XSError(io.w.valid && Cat(w_hit_vec).orR, s"${parentName} refill, duplicate with existing entries")
161
162  val refill_vpn_reg = RegNext(io.w.bits.data.s1.entry.tag)
163  val refill_wayIdx_reg = RegNext(io.w.bits.wayIdx)
164  when (RegNext(io.w.valid)) {
165    io.access.map { access =>
166      access.sets := get_set_idx(refill_vpn_reg, nSets)
167      access.touch_ways.valid := true.B
168      access.touch_ways.bits := refill_wayIdx_reg
169    }
170  }
171
172  val sfence = io.sfence
173  val sfence_valid = sfence.valid && !sfence.bits.hg && !sfence.bits.hv
174  val sfence_vpn = sfence.bits.addr(VAddrBits - 1, offLen)
175  val sfenceHit = entries.map(_.hit(sfence_vpn, sfence.bits.id, vmid = io.csr.hgatp.asid, hasS2xlate = io.csr.priv.virt))
176  val sfenceHit_noasid = entries.map(_.hit(sfence_vpn, sfence.bits.id, ignoreAsid = true, vmid = io.csr.hgatp.asid, hasS2xlate = io.csr.priv.virt))
177  // Sfence will flush all sectors of an entry when hit
178  when (sfence_valid) {
179    when (sfence.bits.rs1) { // virtual address *.rs1 <- (rs1===0.U)
180      when (sfence.bits.rs2) { // asid, but i do not want to support asid, *.rs2 <- (rs2===0.U)
181        // all addr and all asid
182        v.zipWithIndex.map{ case(a, i) => a := a && !((io.csr.priv.virt === false.B && entries(i).s2xlate === noS2xlate) ||
183          (io.csr.priv.virt && entries(i).s2xlate =/= noS2xlate && entries(i).vmid === io.csr.hgatp.asid))}
184      }.otherwise {
185        // all addr but specific asid
186        v.zipWithIndex.map{ case (a, i) => a := a && !(!g(i) && ((!io.csr.priv.virt && entries(i).s2xlate === noS2xlate && entries(i).asid === sfence.bits.id) ||
187          (io.csr.priv.virt && entries(i).s2xlate =/= noS2xlate && entries(i).asid === sfence.bits.id && entries(i).vmid === io.csr.hgatp.asid)))}
188      }
189    }.otherwise {
190      when (sfence.bits.rs2) {
191        // specific addr but all asid
192        v.zipWithIndex.map{ case (a, i) => a := a & !sfenceHit_noasid(i) }
193      }.otherwise {
194        // specific addr and specific asid
195        v.zipWithIndex.map{ case (a, i) => a := a & !(sfenceHit(i) && !g(i)) }
196      }
197    }
198  }
199
200  val hfencev_valid = sfence.valid && sfence.bits.hv
201  val hfenceg_valid = sfence.valid && sfence.bits.hg
202  val hfencev = io.sfence
203  val hfencev_vpn = sfence_vpn
204  val hfencevHit = entries.map(_.hit(hfencev_vpn, hfencev.bits.id, vmid = io.csr.hgatp.asid, hasS2xlate = true.B))
205  val hfencevHit_noasid = entries.map(_.hit(hfencev_vpn, 0.U, ignoreAsid = true, vmid = io.csr.hgatp.asid, hasS2xlate = true.B))
206  when (hfencev_valid) {
207    when (hfencev.bits.rs1) {
208      when (hfencev.bits.rs2) {
209        v.zipWithIndex.map { case (a, i) => a := a && !(entries(i).s2xlate =/= noS2xlate && entries(i).vmid === io.csr.hgatp.asid)}
210      }.otherwise {
211        v.zipWithIndex.map { case (a, i) => a := a && !(!g(i) && (entries(i).s2xlate =/= noS2xlate && entries(i).asid === sfence.bits.id && entries(i).vmid === io.csr.hgatp.asid))
212        }
213      }
214    }.otherwise {
215      when (hfencev.bits.rs2) {
216        v.zipWithIndex.map{ case (a, i) => a := a && !hfencevHit_noasid(i) }
217      }.otherwise {
218        v.zipWithIndex.map{ case (a, i) => a := a && !(hfencevHit(i) && !g(i)) }
219      }
220    }
221  }
222
223
224  val hfenceg = io.sfence
225  val hfenceg_gvpn = sfence_vpn
226  when (hfenceg_valid) {
227    when(hfenceg.bits.rs2) {
228      v.zipWithIndex.map { case (a, i) => a := a && !(entries(i).s2xlate =/= noS2xlate) }
229    }.otherwise {
230      v.zipWithIndex.map { case (a, i) => a := a && !(entries(i).s2xlate =/= noS2xlate && entries(i).vmid === sfence.bits.id) }
231    }
232  }
233
234  XSPerfAccumulate(s"access", io.r.resp.map(_.valid.asUInt).fold(0.U)(_ + _))
235  XSPerfAccumulate(s"hit", io.r.resp.map(a => a.valid && a.bits.hit).fold(0.U)(_.asUInt + _.asUInt))
236
237  for (i <- 0 until nWays) {
238    XSPerfAccumulate(s"access${i}", io.r.resp.zip(io.access.map(acc => UIntToOH(acc.touch_ways.bits))).map{ case (a, b) =>
239      a.valid && a.bits.hit && b(i)}.fold(0.U)(_.asUInt + _.asUInt))
240  }
241  for (i <- 0 until nWays) {
242    XSPerfAccumulate(s"refill${i}", io.w.valid && io.w.bits.wayIdx === i.U)
243  }
244
245  val perfEvents = Seq(
246    ("tlbstore_access", io.r.resp.map(_.valid.asUInt).fold(0.U)(_ + _)                            ),
247    ("tlbstore_hit   ", io.r.resp.map(a => a.valid && a.bits.hit).fold(0.U)(_.asUInt + _.asUInt)),
248  )
249  generatePerfEvent()
250
251  println(s"${parentName} tlb_fa: nSets${nSets} nWays:${nWays}")
252}
253
254class TLBFakeFA(
255             ports: Int,
256             nDups: Int,
257             nSets: Int,
258             nWays: Int,
259             useDmode: Boolean = false
260           )(implicit p: Parameters) extends TlbModule with HasCSRConst{
261
262  val io = IO(new TlbStorageIO(nSets, nWays, ports, nDups))
263  io.r.req.map(_.ready := true.B)
264  val mode = if (useDmode) io.csr.priv.dmode else io.csr.priv.imode
265  val vmEnable = if (EnbaleTlbDebug) (io.csr.satp.mode === 8.U)
266    else (io.csr.satp.mode === 8.U && (mode < ModeM))
267
268  for (i <- 0 until ports) {
269    val req = io.r.req(i)
270    val resp = io.r.resp(i)
271
272    val helper = Module(new PTEHelper())
273    helper.clock := clock
274    helper.satp := io.csr.satp.ppn
275    helper.enable := req.fire && vmEnable
276    helper.vpn := req.bits.vpn
277
278    val pte = helper.pte.asTypeOf(new PteBundle)
279    val ppn = pte.ppn
280    val vpn_reg = RegNext(req.bits.vpn)
281    val pf = helper.pf
282    val level = helper.level
283
284    resp.valid := RegNext(req.valid)
285    resp.bits.hit := true.B
286    for (d <- 0 until nDups) {
287      resp.bits.perm(d).pf := pf
288      resp.bits.perm(d).af := false.B
289      resp.bits.perm(d).d := pte.perm.d
290      resp.bits.perm(d).a := pte.perm.a
291      resp.bits.perm(d).g := pte.perm.g
292      resp.bits.perm(d).u := pte.perm.u
293      resp.bits.perm(d).x := pte.perm.x
294      resp.bits.perm(d).w := pte.perm.w
295      resp.bits.perm(d).r := pte.perm.r
296
297      resp.bits.ppn(d) := MuxLookup(level, 0.U)(Seq(
298        0.U -> Cat(ppn(ppn.getWidth-1, vpnnLen*2), vpn_reg(vpnnLen*2-1, 0)),
299        1.U -> Cat(ppn(ppn.getWidth-1, vpnnLen), vpn_reg(vpnnLen-1, 0)),
300        2.U -> ppn)
301      )
302    }
303  }
304
305  io.access := DontCare
306}
307
308object TlbStorage {
309  def apply
310  (
311    parentName: String,
312    associative: String,
313    ports: Int,
314    nDups: Int = 1,
315    nSets: Int,
316    nWays: Int,
317    saveLevel: Boolean = false,
318    normalPage: Boolean,
319    superPage: Boolean,
320    useDmode: Boolean,
321    SoftTLB: Boolean
322  )(implicit p: Parameters) = {
323    if (SoftTLB) {
324      val storage = Module(new TLBFakeFA(ports, nDups, nSets, nWays, useDmode))
325      storage.suggestName(s"${parentName}_fake_fa")
326      storage.io
327    } else {
328       val storage = Module(new TLBFA(parentName, ports, nDups, nSets, nWays, saveLevel, normalPage, superPage))
329       storage.suggestName(s"${parentName}_fa")
330       storage.io
331    }
332  }
333}
334
335class TlbStorageWrapper(ports: Int, q: TLBParameters, nDups: Int = 1)(implicit p: Parameters) extends TlbModule {
336  val io = IO(new TlbStorageWrapperIO(ports, q, nDups))
337
338  val page = TlbStorage(
339    parentName = q.name + "_storage",
340    associative = q.Associative,
341    ports = ports,
342    nDups = nDups,
343    nSets = q.NSets,
344    nWays = q.NWays,
345    normalPage = true,
346    superPage = true,
347    useDmode = q.useDmode,
348    SoftTLB = coreParams.softTLB
349  )
350
351  for (i <- 0 until ports) {
352    page.r_req_apply(
353      valid = io.r.req(i).valid,
354      vpn = io.r.req(i).bits.vpn,
355      i = i,
356      s2xlate = io.r.req(i).bits.s2xlate
357    )
358  }
359
360  for (i <- 0 until ports) {
361    val q = page.r.req(i)
362    val p = page.r.resp(i)
363    val rq = io.r.req(i)
364    val rp = io.r.resp(i)
365    rq.ready := q.ready // actually, not used
366    rp.valid := p.valid // actually, not used
367    rp.bits.hit := p.bits.hit
368    for (d <- 0 until nDups) {
369      rp.bits.ppn(d) := p.bits.ppn(d)
370      rp.bits.perm(d).pf := p.bits.perm(d).pf
371      rp.bits.perm(d).af := p.bits.perm(d).af
372      rp.bits.perm(d).d := p.bits.perm(d).d
373      rp.bits.perm(d).a := p.bits.perm(d).a
374      rp.bits.perm(d).g := p.bits.perm(d).g
375      rp.bits.perm(d).u := p.bits.perm(d).u
376      rp.bits.perm(d).x := p.bits.perm(d).x
377      rp.bits.perm(d).w := p.bits.perm(d).w
378      rp.bits.perm(d).r := p.bits.perm(d).r
379    }
380  }
381
382  page.sfence <> io.sfence
383  page.csr <> io.csr
384
385  val refill_idx = if (q.outReplace) {
386    io.replace.page.access <> page.access
387    io.replace.page.chosen_set := DontCare
388    io.replace.page.refillIdx
389  } else {
390    val re = ReplacementPolicy.fromString(q.Replacer, q.NWays)
391    re.access(page.access.map(_.touch_ways))
392    re.way
393  }
394
395  page.w_apply(
396    valid = io.w.valid,
397    wayIdx = refill_idx,
398    data = io.w.bits.data
399  )
400
401    // replacement
402  def get_access(one_hot: UInt, valid: Bool): Valid[UInt] = {
403    val res = Wire(Valid(UInt(log2Up(one_hot.getWidth).W)))
404    res.valid := Cat(one_hot).orR && valid
405    res.bits := OHToUInt(one_hot)
406    res
407  }
408}
409