xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableWalker.scala (revision d1fe0262af06443a68a6b9a8190eb017fdd74ca2)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import chisel3.internal.naming.chiselName
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
27import freechips.rocketchip.tilelink._
28import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle}
29
30/* ptw finite state machine, the actual page table walker
31 */
32class PtwFsmIO()(implicit p: Parameters) extends PtwBundle {
33  val req = Flipped(DecoupledIO(new Bundle {
34    val source = UInt(bSourceWidth.W)
35    val l1Hit = Bool()
36    val vpn = UInt(vpnLen.W)
37    val ppn = UInt(ppnLen.W)
38  }))
39  val resp = DecoupledIO(new Bundle {
40    val source = UInt(bSourceWidth.W)
41    val resp = new PtwResp
42  })
43
44  val mq = DecoupledIO(new L2TlbMQInBundle())
45
46  val mem = new Bundle {
47    val req = DecoupledIO(new L2TlbMemReqBundle())
48    val resp = Flipped(ValidIO(UInt(XLEN.W)))
49    val mask = Input(Bool())
50  }
51  val pmp = new Bundle {
52    val req = ValidIO(new PMPReqBundle())
53    val resp = Flipped(new PMPRespBundle())
54  }
55
56  val csr = Input(new TlbCsrBundle)
57  val sfence = Input(new SfenceBundle)
58  val refill = Output(new Bundle {
59    val vpn = UInt(vpnLen.W)
60    val level = UInt(log2Up(Level).W)
61    val source = UInt(bSourceWidth.W)
62  })
63}
64
65@chiselName
66class PtwFsm()(implicit p: Parameters) extends XSModule with HasPtwConst {
67  val io = IO(new PtwFsmIO)
68
69  val sfence = io.sfence
70  val mem = io.mem
71  val satp = io.csr.satp
72
73  val s_idle :: s_addr_check :: s_mem_req :: s_mem_resp :: s_check_pte :: Nil = Enum(5)
74  val state = RegInit(s_idle)
75  val level = RegInit(0.U(log2Up(Level).W))
76  val af_level = RegInit(0.U(log2Up(Level).W)) // access fault return this level
77  val ppn = Reg(UInt(ppnLen.W))
78  val vpn = Reg(UInt(vpnLen.W))
79  val levelNext = level + 1.U
80  val l1Hit = Reg(Bool())
81  val memPte = mem.resp.bits.asTypeOf(new PteBundle().cloneType)
82  io.req.ready := state === s_idle
83
84  val finish = WireInit(false.B)
85  val sent_to_pmp = state === s_addr_check || (state === s_check_pte && !finish)
86  val accessFault = RegEnable(io.pmp.resp.ld, sent_to_pmp)
87  val pageFault = memPte.isPf(level)
88  switch (state) {
89    is (s_idle) {
90      when (io.req.fire()) {
91        val req = io.req.bits
92        state := s_addr_check
93        level := Mux(req.l1Hit, 1.U, 0.U)
94        af_level := Mux(req.l1Hit, 1.U, 0.U)
95        ppn := Mux(req.l1Hit, io.req.bits.ppn, satp.ppn)
96        vpn := io.req.bits.vpn
97        l1Hit := req.l1Hit
98        accessFault := false.B
99      }
100    }
101
102    is (s_addr_check) {
103      state := s_mem_req
104    }
105
106    is (s_mem_req) {
107      when (mem.req.fire()) {
108        state := s_mem_resp
109      }
110      when (accessFault) {
111        state := s_check_pte
112      }
113    }
114
115    is (s_mem_resp) {
116      when(mem.resp.fire()) {
117        state := s_check_pte
118        af_level := af_level + 1.U
119      }
120    }
121
122    is (s_check_pte) {
123      when (io.resp.valid) {
124        when (io.resp.fire()) {
125          state := s_idle
126        }
127        finish := true.B
128      }.otherwise {
129        when (io.pmp.resp.ld) {
130          // do nothing
131        }.elsewhen (io.mq.valid) {
132          when (io.mq.fire()) {
133            state := s_idle
134          }
135          finish := true.B
136        }.otherwise { // when level is 1.U, finish
137          assert(level =/= 2.U)
138          level := levelNext
139          state := s_mem_req
140        }
141      }
142    }
143  }
144
145  when (sfence.valid) {
146    state := s_idle
147    accessFault := false.B
148  }
149
150  // memPte is valid when at s_check_pte. when mem.resp.fire, it's not ready.
151  val is_pte = memPte.isLeaf() || memPte.isPf(level)
152  val find_pte = is_pte
153  val to_find_pte = level === 1.U && !is_pte
154  val source = RegEnable(io.req.bits.source, io.req.fire())
155  io.resp.valid := state === s_check_pte && (find_pte || accessFault)
156  io.resp.bits.source := source
157  io.resp.bits.resp.apply(pageFault && !accessFault, accessFault, Mux(accessFault, af_level, level), memPte, vpn)
158
159  io.mq.valid := state === s_check_pte && to_find_pte && !accessFault
160  io.mq.bits.source := source
161  io.mq.bits.vpn := vpn
162  io.mq.bits.l3.valid := true.B
163  io.mq.bits.l3.bits := memPte.ppn
164
165  assert(level =/= 2.U || level =/= 3.U)
166
167  val l1addr = MakeAddr(satp.ppn, getVpnn(vpn, 2))
168  val l2addr = MakeAddr(Mux(l1Hit, ppn, memPte.ppn), getVpnn(vpn, 1))
169  val mem_addr = Mux(af_level === 0.U, l1addr, l2addr)
170  io.pmp.req.valid := DontCare // samecycle, do not use valid
171  io.pmp.req.bits.addr := mem_addr
172  io.pmp.req.bits.size := 3.U // TODO: fix it
173  io.pmp.req.bits.cmd := TlbCmd.read
174
175  mem.req.valid := state === s_mem_req && !io.mem.mask && !accessFault
176  mem.req.bits.addr := mem_addr
177  mem.req.bits.id := FsmReqID.U(bMemID.W)
178
179  io.refill.vpn := vpn
180  io.refill.level := level
181  io.refill.source := source
182
183  XSDebug(p"[fsm] state:${state} level:${level} notFound:${pageFault}\n")
184
185  // perf
186  XSPerfAccumulate("fsm_count", io.req.fire())
187  for (i <- 0 until PtwWidth) {
188    XSPerfAccumulate(s"fsm_count_source${i}", io.req.fire() && io.req.bits.source === i.U)
189  }
190  XSPerfAccumulate("fsm_busy", state =/= s_idle)
191  XSPerfAccumulate("fsm_idle", state === s_idle)
192  XSPerfAccumulate("resp_blocked", io.resp.valid && !io.resp.ready)
193  XSPerfAccumulate("mem_count", mem.req.fire())
194  XSPerfAccumulate("mem_cycle", BoolStopWatch(mem.req.fire, mem.resp.fire(), true))
195  XSPerfAccumulate("mem_blocked", mem.req.valid && !mem.req.ready)
196
197  TimeOutAssert(state =/= s_idle, timeOutThreshold, "page table walker time out")
198}
199