xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/Repeater.scala (revision 60ebee385ce85a25a994f6da0c84ecce9bb91bca)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
24import utils._
25import utility._
26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
27import freechips.rocketchip.tilelink._
28
29class PTWReapterIO(Width: Int)(implicit p: Parameters) extends MMUIOBaseBundle {
30  val tlb = Flipped(new TlbPtwIO(Width))
31  val ptw = new TlbPtwIO
32
33  def apply(tlb: TlbPtwIO, ptw: TlbPtwIO, sfence: SfenceBundle, csr: TlbCsrBundle): Unit = {
34    this.tlb <> tlb
35    this.ptw <> ptw
36    this.sfence <> sfence
37    this.csr <> csr
38  }
39
40  def apply(tlb: TlbPtwIO, sfence: SfenceBundle, csr: TlbCsrBundle): Unit = {
41    this.tlb <> tlb
42    this.sfence <> sfence
43    this.csr <> csr
44  }
45
46}
47
48class PTWRepeater(Width: Int = 1, FenceDelay: Int)(implicit p: Parameters) extends XSModule with HasPtwConst {
49  val io = IO(new PTWReapterIO(Width))
50
51  val req_in = if (Width == 1) {
52    io.tlb.req(0)
53  } else {
54    val arb = Module(new RRArbiter(io.tlb.req(0).bits.cloneType, Width))
55    arb.io.in <> io.tlb.req
56    arb.io.out
57  }
58  val (tlb, ptw, flush) = (io.tlb, io.ptw, DelayN(io.sfence.valid || io.csr.satp.changed, FenceDelay))
59  val req = RegEnable(req_in.bits, req_in.fire())
60  val resp = RegEnable(ptw.resp.bits, ptw.resp.fire())
61  val haveOne = BoolStopWatch(req_in.fire(), tlb.resp.fire() || flush)
62  val sent = BoolStopWatch(ptw.req(0).fire(), req_in.fire() || flush)
63  val recv = BoolStopWatch(ptw.resp.fire() && haveOne, req_in.fire() || flush)
64
65  req_in.ready := !haveOne
66  ptw.req(0).valid := haveOne && !sent
67  ptw.req(0).bits := req
68
69  tlb.resp.bits := resp
70  tlb.resp.valid := haveOne && recv
71  ptw.resp.ready := !recv
72
73  XSPerfAccumulate("req_count", ptw.req(0).fire())
74  XSPerfAccumulate("tlb_req_cycle", BoolStopWatch(req_in.fire(), tlb.resp.fire() || flush))
75  XSPerfAccumulate("ptw_req_cycle", BoolStopWatch(ptw.req(0).fire(), ptw.resp.fire() || flush))
76
77  XSDebug(haveOne, p"haveOne:${haveOne} sent:${sent} recv:${recv} sfence:${flush} req:${req} resp:${resp}")
78  XSDebug(req_in.valid || io.tlb.resp.valid, p"tlb: ${tlb}\n")
79  XSDebug(io.ptw.req(0).valid || io.ptw.resp.valid, p"ptw: ${ptw}\n")
80  assert(!RegNext(recv && io.ptw.resp.valid, init = false.B), "re-receive ptw.resp")
81  XSError(io.ptw.req(0).valid && io.ptw.resp.valid && !flush, "ptw repeater recv resp when sending")
82  XSError(io.ptw.resp.valid && (req.vpn =/= io.ptw.resp.bits.entry.tag), "ptw repeater recv resp with wrong tag")
83  XSError(io.ptw.resp.valid && !io.ptw.resp.ready, "ptw repeater's ptw resp back, but not ready")
84  TimeOutAssert(sent && !recv, timeOutThreshold, "Repeater doesn't recv resp in time")
85}
86
87/* dtlb
88 *
89 */
90
91class PTWRepeaterNB(Width: Int = 1, passReady: Boolean = false, FenceDelay: Int)(implicit p: Parameters) extends XSModule with HasPtwConst {
92  val io = IO(new PTWReapterIO(Width))
93
94  val req_in = if (Width == 1) {
95    io.tlb.req(0)
96  } else {
97    val arb = Module(new RRArbiter(io.tlb.req(0).bits.cloneType, Width))
98    arb.io.in <> io.tlb.req
99    arb.io.out
100  }
101  val (tlb, ptw, flush) = (io.tlb, io.ptw, DelayN(io.sfence.valid || io.csr.satp.changed, FenceDelay))
102  /* sent: tlb -> repeater -> ptw
103   * recv: ptw -> repeater -> tlb
104   * different from PTWRepeater
105   */
106
107  // tlb -> repeater -> ptw
108  val req = RegEnable(req_in.bits, req_in.fire())
109  val sent = BoolStopWatch(req_in.fire(), ptw.req(0).fire() || flush)
110  req_in.ready := !sent || { if (passReady) ptw.req(0).ready else false.B }
111  ptw.req(0).valid := sent
112  ptw.req(0).bits := req
113
114  // ptw -> repeater -> tlb
115  val resp = RegEnable(ptw.resp.bits, ptw.resp.fire())
116  val recv = BoolStopWatch(ptw.resp.fire(), tlb.resp.fire() || flush)
117  ptw.resp.ready := !recv || { if (passReady) tlb.resp.ready else false.B }
118  tlb.resp.valid := recv
119  tlb.resp.bits := resp
120
121  XSPerfAccumulate("req", req_in.fire())
122  XSPerfAccumulate("resp", tlb.resp.fire())
123  if (!passReady) {
124    XSPerfAccumulate("req_blank", req_in.valid && sent && ptw.req(0).ready)
125    XSPerfAccumulate("resp_blank", ptw.resp.valid && recv && tlb.resp.ready)
126    XSPerfAccumulate("req_blank_ignore_ready", req_in.valid && sent)
127    XSPerfAccumulate("resp_blank_ignore_ready", ptw.resp.valid && recv)
128  }
129  XSDebug(req_in.valid || io.tlb.resp.valid, p"tlb: ${tlb}\n")
130  XSDebug(io.ptw.req(0).valid || io.ptw.resp.valid, p"ptw: ${ptw}\n")
131}
132
133class PTWFilterIO(Width: Int)(implicit p: Parameters) extends MMUIOBaseBundle {
134  val tlb = Flipped(new VectorTlbPtwIO(Width))
135  val ptw = new TlbPtwIO()
136  val rob_head_miss_in_tlb = Output(Bool())
137  val debugTopDown = new Bundle {
138    val robHeadVaddr = Flipped(Valid(UInt(VAddrBits.W)))
139  }
140
141  def apply(tlb: VectorTlbPtwIO, ptw: TlbPtwIO, sfence: SfenceBundle, csr: TlbCsrBundle): Unit = {
142    this.tlb <> tlb
143    this.ptw <> ptw
144    this.sfence <> sfence
145    this.csr <> csr
146  }
147
148  def apply(tlb: VectorTlbPtwIO, sfence: SfenceBundle, csr: TlbCsrBundle): Unit = {
149    this.tlb <> tlb
150    this.sfence <> sfence
151    this.csr <> csr
152  }
153
154}
155
156class PTWFilter(Width: Int, Size: Int, FenceDelay: Int)(implicit p: Parameters) extends XSModule with HasPtwConst {
157  require(Size >= Width)
158
159  val io = IO(new PTWFilterIO(Width))
160
161  val v = RegInit(VecInit(Seq.fill(Size)(false.B)))
162  val ports = Reg(Vec(Size, Vec(Width, Bool()))) // record which port(s) the entry come from, may not able to cover all the ports
163  val vpn = Reg(Vec(Size, UInt(vpnLen.W)))
164  val memidx = Reg(Vec(Size, new MemBlockidxBundle))
165  val enqPtr = RegInit(0.U(log2Up(Size).W)) // Enq
166  val issPtr = RegInit(0.U(log2Up(Size).W)) // Iss to Ptw
167  val deqPtr = RegInit(0.U(log2Up(Size).W)) // Deq
168  val mayFullDeq = RegInit(false.B)
169  val mayFullIss = RegInit(false.B)
170  val counter = RegInit(0.U(log2Up(Size+1).W))
171
172  val flush = DelayN(io.sfence.valid || io.csr.satp.changed, FenceDelay)
173  val tlb_req = WireInit(io.tlb.req) // NOTE: tlb_req is not io.tlb.req, see below codes, just use cloneType
174  tlb_req.suggestName("tlb_req")
175
176  val inflight_counter = RegInit(0.U(log2Up(Size + 1).W))
177  val inflight_full = inflight_counter === Size.U
178  when (io.ptw.req(0).fire() =/= io.ptw.resp.fire()) {
179    inflight_counter := Mux(io.ptw.req(0).fire(), inflight_counter + 1.U, inflight_counter - 1.U)
180  }
181
182  val canEnqueue = Wire(Bool()) // NOTE: actually enqueue
183  val ptwResp = RegEnable(io.ptw.resp.bits, io.ptw.resp.fire())
184  val ptwResp_OldMatchVec = vpn.zip(v).map{ case (pi, vi) =>
185    vi && io.ptw.resp.bits.hit(pi, io.csr.satp.asid, true, true)}
186  val ptwResp_valid = RegNext(io.ptw.resp.fire() && Cat(ptwResp_OldMatchVec).orR, init = false.B)
187  // May send repeated requests to L2 tlb with same vpn(26, 3) when sector tlb
188  val oldMatchVec_early = io.tlb.req.map(a => vpn.zip(v).map{ case (pi, vi) => vi && pi === a.bits.vpn})
189  val lastReqMatchVec_early = io.tlb.req.map(a => tlb_req.map{ b => b.valid && b.bits.vpn === a.bits.vpn && canEnqueue})
190  val newMatchVec_early = io.tlb.req.map(a => io.tlb.req.map(b => a.bits.vpn === b.bits.vpn))
191
192  (0 until Width) foreach { i =>
193    tlb_req(i).valid := RegNext(io.tlb.req(i).valid &&
194      !(ptwResp_valid && ptwResp.hit(io.tlb.req(i).bits.vpn, 0.U, true, true)) &&
195      !Cat(lastReqMatchVec_early(i)).orR,
196      init = false.B)
197    tlb_req(i).bits := RegEnable(io.tlb.req(i).bits, io.tlb.req(i).valid)
198  }
199
200  val oldMatchVec = oldMatchVec_early.map(a => RegNext(Cat(a).orR))
201  val newMatchVec = (0 until Width).map(i => (0 until Width).map(j =>
202    RegNext(newMatchVec_early(i)(j)) && tlb_req(j).valid
203  ))
204  val ptwResp_newMatchVec = tlb_req.map(a =>
205    ptwResp_valid && ptwResp.hit(a.bits.vpn, 0.U, allType = true, true))
206
207  val oldMatchVec2 = (0 until Width).map(i => oldMatchVec_early(i).map(RegNext(_)).map(_ & tlb_req(i).valid))
208  val update_ports = v.indices.map(i => oldMatchVec2.map(j => j(i)))
209  val ports_init = (0 until Width).map(i => (1 << i).U(Width.W))
210  val filter_ports = (0 until Width).map(i => ParallelMux(newMatchVec(i).zip(ports_init).drop(i)))
211  val resp_vector = RegEnable(ParallelMux(ptwResp_OldMatchVec zip ports), io.ptw.resp.fire())
212
213  def canMerge(index: Int) : Bool = {
214    ptwResp_newMatchVec(index) || oldMatchVec(index) ||
215    Cat(newMatchVec(index).take(index)).orR
216  }
217
218  def filter_req() = {
219    val reqs =  tlb_req.indices.map{ i =>
220      val req = Wire(ValidIO(new PtwReqwithMemIdx()))
221      val merge = canMerge(i)
222      req.bits := tlb_req(i).bits
223      req.valid := !merge && tlb_req(i).valid
224      req
225    }
226    reqs
227  }
228
229  val reqs = filter_req()
230  val req_ports = filter_ports
231  val isFull = enqPtr === deqPtr && mayFullDeq
232  val isEmptyDeq = enqPtr === deqPtr && !mayFullDeq
233  val isEmptyIss = enqPtr === issPtr && !mayFullIss
234  val accumEnqNum = (0 until Width).map(i => PopCount(reqs.take(i).map(_.valid)))
235  val enqPtrVecInit = VecInit((0 until Width).map(i => enqPtr + i.U))
236  val enqPtrVec = VecInit((0 until Width).map(i => enqPtrVecInit(accumEnqNum(i))))
237  val enqNum = PopCount(reqs.map(_.valid))
238  canEnqueue := counter +& enqNum <= Size.U
239
240  // the req may recv false ready, but actually received. Filter and TLB will handle it.
241  val enqNum_fake = PopCount(io.tlb.req.map(_.valid))
242  val canEnqueue_fake = counter +& enqNum_fake <= Size.U
243  io.tlb.req.map(_.ready := canEnqueue_fake) // NOTE: just drop un-fire reqs
244
245  // tlb req flushed by ptw resp: last ptw resp && current ptw resp
246  // the flushed tlb req will fakely enq, with a false valid
247  val tlb_req_flushed = reqs.map(a => io.ptw.resp.valid && io.ptw.resp.bits.hit(a.bits.vpn, 0.U, true, true))
248
249  io.tlb.resp.valid := ptwResp_valid
250  io.tlb.resp.bits.data.entry := ptwResp.entry
251  io.tlb.resp.bits.data.addr_low := ptwResp.addr_low
252  io.tlb.resp.bits.data.ppn_low := ptwResp.ppn_low
253  io.tlb.resp.bits.data.valididx := ptwResp.valididx
254  io.tlb.resp.bits.data.pteidx := ptwResp.pteidx
255  io.tlb.resp.bits.data.pf := ptwResp.pf
256  io.tlb.resp.bits.data.af := ptwResp.af
257  io.tlb.resp.bits.data.memidx := memidx(OHToUInt(ptwResp_OldMatchVec))
258  io.tlb.resp.bits.vector := resp_vector
259
260  val issue_valid = v(issPtr) && !isEmptyIss && !inflight_full
261  val issue_filtered = ptwResp_valid && ptwResp.hit(io.ptw.req(0).bits.vpn, io.csr.satp.asid, allType=true, ignoreAsid=true)
262  val issue_fire_fake = issue_valid && (io.ptw.req(0).ready || (issue_filtered && false.B /*timing-opt*/))
263  io.ptw.req(0).valid := issue_valid && !issue_filtered
264  io.ptw.req(0).bits.vpn := vpn(issPtr)
265  io.ptw.resp.ready := true.B
266
267  reqs.zipWithIndex.map{
268    case (req, i) =>
269      when (req.valid && canEnqueue) {
270        v(enqPtrVec(i)) := !tlb_req_flushed(i)
271        vpn(enqPtrVec(i)) := req.bits.vpn
272        memidx(enqPtrVec(i)) := req.bits.memidx
273        ports(enqPtrVec(i)) := req_ports(i).asBools
274      }
275  }
276  for (i <- ports.indices) {
277    when (v(i)) {
278      ports(i) := ports(i).zip(update_ports(i)).map(a => a._1 || a._2)
279    }
280  }
281
282  val do_enq = canEnqueue && Cat(reqs.map(_.valid)).orR
283  val do_deq = (!v(deqPtr) && !isEmptyDeq)
284  val do_iss = issue_fire_fake || (!v(issPtr) && !isEmptyIss)
285  when (do_enq) {
286    enqPtr := enqPtr + enqNum
287  }
288  when (do_deq) {
289    deqPtr := deqPtr + 1.U
290  }
291  when (do_iss) {
292    issPtr := issPtr + 1.U
293  }
294  when (issue_fire_fake && issue_filtered) { // issued but is filtered
295    v(issPtr) := false.B
296  }
297  when (do_enq =/= do_deq) {
298    mayFullDeq := do_enq
299  }
300  when (do_enq =/= do_iss) {
301    mayFullIss := do_enq
302  }
303
304  when (io.ptw.resp.fire()) {
305    v.zip(ptwResp_OldMatchVec).map{ case (vi, mi) => when (mi) { vi := false.B }}
306  }
307
308  counter := counter - do_deq + Mux(do_enq, enqNum, 0.U)
309  assert(counter <= Size.U, "counter should be no more than Size")
310  assert(inflight_counter <= Size.U, "inflight should be no more than Size")
311  when (counter === 0.U) {
312    assert(!io.ptw.req(0).fire(), "when counter is 0, should not req")
313    assert(isEmptyDeq && isEmptyIss, "when counter is 0, should be empty")
314  }
315  when (counter === Size.U) {
316    assert(mayFullDeq, "when counter is Size, should be full")
317  }
318
319  when (flush) {
320    v.map(_ := false.B)
321    deqPtr := 0.U
322    enqPtr := 0.U
323    issPtr := 0.U
324    ptwResp_valid := false.B
325    mayFullDeq := false.B
326    mayFullIss := false.B
327    counter := 0.U
328    inflight_counter := 0.U
329  }
330
331  val robHeadVaddr = io.debugTopDown.robHeadVaddr
332  io.rob_head_miss_in_tlb := VecInit(v.zip(vpn).map{case (vi, vpni) => {
333    vi && robHeadVaddr.valid && vpni === get_pn(robHeadVaddr.bits)
334  }}).asUInt.orR
335
336  // perf
337  XSPerfAccumulate("tlb_req_count", PopCount(Cat(io.tlb.req.map(_.valid))))
338  XSPerfAccumulate("tlb_req_count_filtered", Mux(do_enq, accumEnqNum(Width - 1), 0.U))
339  XSPerfAccumulate("ptw_req_count", io.ptw.req(0).fire())
340  XSPerfAccumulate("ptw_req_cycle", inflight_counter)
341  XSPerfAccumulate("tlb_resp_count", io.tlb.resp.fire())
342  XSPerfAccumulate("ptw_resp_count", io.ptw.resp.fire())
343  XSPerfAccumulate("inflight_cycle", !isEmptyDeq)
344  for (i <- 0 until Size + 1) {
345    XSPerfAccumulate(s"counter${i}", counter === i.U)
346  }
347
348  for (i <- 0 until Size) {
349    TimeOutAssert(v(i), timeOutThreshold, s"Filter ${i} doesn't recv resp in time")
350  }
351}
352
353object PTWRepeater {
354  def apply(fenceDelay: Int,
355    tlb: TlbPtwIO,
356    sfence: SfenceBundle,
357    csr: TlbCsrBundle
358  )(implicit p: Parameters) = {
359    val width = tlb.req.size
360    val repeater = Module(new PTWRepeater(width, fenceDelay))
361    repeater.io.apply(tlb, sfence, csr)
362    repeater
363  }
364
365  def apply(fenceDelay: Int,
366    tlb: TlbPtwIO,
367    ptw: TlbPtwIO,
368    sfence: SfenceBundle,
369    csr: TlbCsrBundle
370  )(implicit p: Parameters) = {
371    val width = tlb.req.size
372    val repeater = Module(new PTWRepeater(width, fenceDelay))
373    repeater.io.apply(tlb, ptw, sfence, csr)
374    repeater
375  }
376}
377
378object PTWRepeaterNB {
379  def apply(passReady: Boolean, fenceDelay: Int,
380    tlb: TlbPtwIO,
381    sfence: SfenceBundle,
382    csr: TlbCsrBundle
383  )(implicit p: Parameters) = {
384    val width = tlb.req.size
385    val repeater = Module(new PTWRepeaterNB(width, passReady,fenceDelay))
386    repeater.io.apply(tlb, sfence, csr)
387    repeater
388  }
389
390  def apply(passReady: Boolean, fenceDelay: Int,
391    tlb: TlbPtwIO,
392    ptw: TlbPtwIO,
393    sfence: SfenceBundle,
394    csr: TlbCsrBundle
395  )(implicit p: Parameters) = {
396    val width = tlb.req.size
397    val repeater = Module(new PTWRepeaterNB(width, passReady, fenceDelay))
398    repeater.io.apply(tlb, ptw, sfence, csr)
399    repeater
400  }
401}
402
403object PTWFilter {
404  def apply(fenceDelay: Int,
405    tlb: VectorTlbPtwIO,
406    ptw: TlbPtwIO,
407    sfence: SfenceBundle,
408    csr: TlbCsrBundle,
409    size: Int
410  )(implicit p: Parameters) = {
411    val width = tlb.req.size
412    val filter = Module(new PTWFilter(width, size, fenceDelay))
413    filter.io.apply(tlb, ptw, sfence, csr)
414    filter
415  }
416
417  def apply(fenceDelay: Int,
418    tlb: VectorTlbPtwIO,
419    sfence: SfenceBundle,
420    csr: TlbCsrBundle,
421    size: Int
422  )(implicit p: Parameters) = {
423    val width = tlb.req.size
424    val filter = Module(new PTWFilter(width, size, fenceDelay))
425    filter.io.apply(tlb, sfence, csr)
426    filter
427  }
428
429}
430