xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/L2TLBMissQueue.scala (revision b848eea577a14a673dec8efb4a812696b1bddba9)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import chisel3.internal.naming.chiselName
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
27import freechips.rocketchip.tilelink._
28
29/* Miss Queue dont care about duplicate req, which is done by PtwFilter
30 * PtwMissQueue is just a Queue inside Chisel with flush
31 */
32
33class L2TlbMQEntry(implicit p: Parameters) extends XSBundle with HasPtwConst {
34  val vpn = UInt(vpnLen.W)
35  val source = UInt(bPtwWidth.W)
36  val ppn = UInt(ppnLen.W)
37}
38
39class L2TlbMQIO(implicit p: Parameters) extends XSBundle with HasPtwConst {
40  val in = Flipped(Decoupled(new Bundle {
41    val vpn = Output(UInt(vpnLen.W))
42    val source = Output(UInt(bPtwWidth.W))
43    val l3 = Valid(UInt(PAddrBits.W))
44  }))
45  val sfence = Input(new SfenceBundle)
46  val cache = Decoupled(new Bundle {
47    val vpn = UInt(vpnLen.W)
48    val source = UInt(bPtwWidth.W)
49  })
50  val mem = new Bundle {
51    val req = DecoupledIO(new L2TlbMemReqBundle())
52    val resp = Flipped(Valid(new Bundle {
53      val id = Output(UInt(log2Up(MSHRSize).W))
54    }))
55    val out = DecoupledIO(new Bundle {
56      val source = Output(UInt(bPtwWidth.W))
57      val id = Output(UInt(bMemID.W))
58      val vpn = Output(UInt(vpnLen.W))
59    })
60    val refill_vpn = Output(UInt(vpnLen.W))
61    val req_mask = Input(Vec(MSHRSize, Bool()))
62  }
63}
64
65@chiselName
66class L2TlbMissQueue(implicit p: Parameters) extends XSModule with HasPtwConst {
67  val io = IO(new L2TlbMQIO())
68
69  val state_idle :: state_cache :: state_mem_req :: state_mem_waiting :: state_mem_out :: Nil = Enum(5)
70
71  val state = RegInit(VecInit(Seq.fill(MSHRSize)(state_idle)))
72  val is_emptys = state.map(_ === state_idle)
73  val is_caches = state.map(_ === state_cache)
74  val is_mems = state.map(_ === state_mem_req)
75  val is_waiting = state.map(_ === state_mem_waiting)
76  val is_having = state.map(_ === state_mem_out)
77
78  val entries = Reg(Vec(MSHRSize, new L2TlbMQEntry()))
79
80  val full = !ParallelOR(is_emptys).asBool()
81  val non_empty = ParallelOR(is_caches).asBool()
82
83  val enq_ptr = ParallelPriorityEncoder(is_emptys)
84  val cache_ptr = ParallelPriorityEncoder(is_caches)
85  val mem_ptr = ParallelPriorityEncoder(is_having)
86
87  val mem_arb = Module(new RRArbiter(new L2TlbMQEntry(), MSHRSize))
88  for (i <- 0 until MSHRSize) {
89    mem_arb.io.in(i).bits := entries(i)
90    mem_arb.io.in(i).valid := is_mems(i) && !io.mem.req_mask(i)
91  }
92
93  when (io.in.fire()) {
94    state(enq_ptr) := Mux(io.in.bits.l3.valid, state_mem_req, state_cache)
95    entries(enq_ptr).vpn := io.in.bits.vpn
96    entries(enq_ptr).ppn := io.in.bits.l3.bits
97    entries(enq_ptr).source := io.in.bits.source
98  }
99  when (mem_arb.io.out.fire()) {
100    state(mem_arb.io.chosen) := state_mem_waiting
101  }
102  when (io.mem.resp.fire()) {
103    state(io.mem.resp.bits.id(log2Up(MSHRSize)-1, 0)) := state_mem_out
104  }
105  when (io.mem.out.fire()) {
106    assert(state(mem_ptr) === state_mem_out)
107    state(mem_ptr) := state_idle
108  }
109  when (io.cache.fire()) {
110    state(cache_ptr) := state_idle
111  }
112
113  when (io.sfence.valid) {
114    state.map(_ := state_idle)
115  }
116
117  io.in.ready := !full
118  io.cache.valid := ParallelOR(is_caches).asBool()
119  io.cache.bits.vpn := entries(cache_ptr).vpn
120  io.cache.bits.source := entries(cache_ptr).source
121  io.mem.out.valid := ParallelOR(is_having).asBool()
122  io.mem.out.bits.source := entries(mem_ptr).source
123  io.mem.out.bits.vpn := entries(mem_ptr).vpn
124  io.mem.out.bits.id := mem_ptr
125  io.mem.req.valid := mem_arb.io.out.valid
126  io.mem.req.bits.addr := MakeAddr(mem_arb.io.out.bits.ppn, getVpnn(mem_arb.io.out.bits.vpn, 0))
127  io.mem.req.bits.id := mem_arb.io.chosen
128  mem_arb.io.out.ready := io.mem.req.ready
129  io.mem.refill_vpn := entries(RegNext(io.mem.resp.bits.id(log2Up(MSHRSize)-1, 0))).vpn
130
131  XSPerfAccumulate("mq_in_count", io.in.fire())
132  XSPerfAccumulate("mq_in_block", io.in.valid && !io.in.ready)
133  for (i <- 0 until (MSHRSize + 1)) {
134    XSPerfAccumulate(s"util${i}", PopCount(is_emptys.map(!_)) === i.U)
135    XSPerfAccumulate(s"cache_util${i}", PopCount(is_caches) === i.U)
136    XSPerfAccumulate(s"mem_util${i}", PopCount(is_mems) === i.U)
137    XSPerfAccumulate(s"waiting_util${i}", PopCount(is_waiting) === i.U)
138  }
139  XSPerfAccumulate("mem_count", io.mem.req.fire())
140  XSPerfAccumulate("mem_cycle", PopCount(is_waiting) =/= 0.U)
141}