xref: /XiangShan/src/main/scala/xiangshan/backend/fu/PMP.scala (revision 8891a219bbc84f568e1d134854d8d5ed86d6d560)
1b6982e83SLemover/***************************************************************************************
2b6982e83SLemover* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3b6982e83SLemover* Copyright (c) 2020-2021 Peng Cheng Laboratory
4b6982e83SLemover*
5b6982e83SLemover* XiangShan is licensed under Mulan PSL v2.
6b6982e83SLemover* You can use this software according to the terms and conditions of the Mulan PSL v2.
7b6982e83SLemover* You may obtain a copy of Mulan PSL v2 at:
8b6982e83SLemover*          http://license.coscl.org.cn/MulanPSL2
9b6982e83SLemover*
10b6982e83SLemover* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11b6982e83SLemover* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12b6982e83SLemover* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13b6982e83SLemover*
14b6982e83SLemover* See the Mulan PSL v2 for more details.
15b6982e83SLemover***************************************************************************************/
16b6982e83SLemover
17a15116bdSLemover// See LICENSE.SiFive for license details.
18a15116bdSLemover
19b6982e83SLemoverpackage xiangshan.backend.fu
20b6982e83SLemover
21*8891a219SYinan Xuimport org.chipsalliance.cde.config.Parameters
22b6982e83SLemoverimport chisel3._
23b6982e83SLemoverimport chisel3.util._
243c02ee8fSwakafaimport utility.MaskedRegMap.WritableMask
25b6982e83SLemoverimport xiangshan._
26b6982e83SLemoverimport xiangshan.backend.fu.util.HasCSRConst
27b6982e83SLemoverimport utils._
283c02ee8fSwakafaimport utility._
29b6982e83SLemoverimport xiangshan.cache.mmu.{TlbCmd, TlbExceptionBundle}
30b6982e83SLemover
3198c71602SJiawei Lintrait PMPConst extends HasPMParameters {
32b6982e83SLemover  val PMPOffBits = 2 // minimal 4bytes
33b6982e83SLemover  val CoarserGrain: Boolean = PlatformGrain > PMPOffBits
34b6982e83SLemover}
35b6982e83SLemover
3698c71602SJiawei Linabstract class PMPBundle(implicit val p: Parameters) extends Bundle with PMPConst
3798c71602SJiawei Linabstract class PMPModule(implicit val p: Parameters) extends Module with PMPConst
3898c71602SJiawei Linabstract class PMPXSModule(implicit p: Parameters) extends XSModule with PMPConst
39b6982e83SLemover
40b6982e83SLemoverclass PMPConfig(implicit p: Parameters) extends PMPBundle {
41b6982e83SLemover  val l = Bool()
42ca2f90a6SLemover  val c = Bool() // res(1), unuse in pmp
43ca2f90a6SLemover  val atomic = Bool() // res(0), unuse in pmp
44b6982e83SLemover  val a = UInt(2.W)
45b6982e83SLemover  val x = Bool()
46b6982e83SLemover  val w = Bool()
47b6982e83SLemover  val r = Bool()
48b6982e83SLemover
49ca2f90a6SLemover  def res: UInt = Cat(c, atomic) // in pmp, unused
50b6982e83SLemover  def off = a === 0.U
51b6982e83SLemover  def tor = a === 1.U
52b6982e83SLemover  def na4 = { if (CoarserGrain) false.B else a === 2.U }
53b6982e83SLemover  def napot = { if (CoarserGrain) a(1).asBool else a === 3.U }
54b6982e83SLemover  def off_tor = !a(1)
55b6982e83SLemover  def na4_napot = a(1)
56b6982e83SLemover
57b6982e83SLemover  def locked = l
58b6982e83SLemover  def addr_locked: Bool = locked
59b6982e83SLemover  def addr_locked(next: PMPConfig): Bool = locked || (next.locked && next.tor)
60ca2f90a6SLemover}
61b6982e83SLemover
6267ba96b4SYinan Xuobject PMPConfigUInt {
6367ba96b4SYinan Xu  def apply(
6467ba96b4SYinan Xu    l: Boolean = false,
6567ba96b4SYinan Xu    c: Boolean = false,
6667ba96b4SYinan Xu    atomic: Boolean = false,
6767ba96b4SYinan Xu    a: Int = 0,
6867ba96b4SYinan Xu    x: Boolean = false,
6967ba96b4SYinan Xu    w: Boolean = false,
7067ba96b4SYinan Xu    r: Boolean = false)(implicit p: Parameters): UInt = {
7167ba96b4SYinan Xu    var config = 0
7267ba96b4SYinan Xu    if (l) { config += (1 << 7) }
7367ba96b4SYinan Xu    if (c) { config += (1 << 6) }
7467ba96b4SYinan Xu    if (atomic) { config += (1 << 5) }
7567ba96b4SYinan Xu    if (a > 0) { config += (a << 3) }
7667ba96b4SYinan Xu    if (x) { config += (1 << 2) }
7767ba96b4SYinan Xu    if (w) { config += (1 << 1) }
7867ba96b4SYinan Xu    if (r) { config += (1 << 0) }
7967ba96b4SYinan Xu    config.U(8.W)
8067ba96b4SYinan Xu  }
8167ba96b4SYinan Xu}
8298c71602SJiawei Lintrait PMPReadWriteMethodBare extends PMPConst {
8398c71602SJiawei Lin  def match_mask(cfg: PMPConfig, paddr: UInt) = {
8498c71602SJiawei Lin    val match_mask_c_addr = Cat(paddr, cfg.a(0)) | (((1 << PlatformGrain) - 1) >> PMPOffBits).U((paddr.getWidth + 1).W)
8598c71602SJiawei Lin    Cat(match_mask_c_addr & ~(match_mask_c_addr + 1.U), ((1 << PMPOffBits) - 1).U(PMPOffBits.W))
86b6982e83SLemover  }
87b6982e83SLemover
88b6982e83SLemover  def write_cfg_vec(mask: Vec[UInt], addr: Vec[UInt], index: Int)(cfgs: UInt): UInt = {
89b6982e83SLemover    val cfgVec = Wire(Vec(cfgs.getWidth/8, new PMPConfig))
90b6982e83SLemover    for (i <- cfgVec.indices) {
91ca2f90a6SLemover      val cfg_w_m_tmp = cfgs((i+1)*8-1, i*8).asUInt.asTypeOf(new PMPConfig)
92ca2f90a6SLemover      cfgVec(i) := cfg_w_m_tmp
93ff1b5dbbSLemover      when (!cfg_w_m_tmp.l) {
94ca2f90a6SLemover        cfgVec(i).w := cfg_w_m_tmp.w && cfg_w_m_tmp.r
95ca2f90a6SLemover        if (CoarserGrain) { cfgVec(i).a := Cat(cfg_w_m_tmp.a(1), cfg_w_m_tmp.a.orR) }
96b6982e83SLemover        when (cfgVec(i).na4_napot) {
9798c71602SJiawei Lin          mask(index + i) := match_mask(cfgVec(i), addr(index + i))
98b6982e83SLemover        }
99b6982e83SLemover      }
100ff1b5dbbSLemover    }
101b6982e83SLemover    cfgVec.asUInt
102b6982e83SLemover  }
103b6982e83SLemover
104b6982e83SLemover  def read_addr(cfg: PMPConfig)(addr: UInt): UInt = {
105b6982e83SLemover    val G = PlatformGrain - PMPOffBits
106b6982e83SLemover    require(G >= 0)
107b6982e83SLemover    if (G == 0) {
108b6982e83SLemover      addr
109b6982e83SLemover    } else if (G >= 2) {
110b6982e83SLemover      Mux(cfg.na4_napot, set_low_bits(addr, G-1), clear_low_bits(addr, G))
111b6982e83SLemover    } else { // G is 1
112b6982e83SLemover      Mux(cfg.off_tor, clear_low_bits(addr, G), addr)
113b6982e83SLemover    }
114b6982e83SLemover  }
11598c71602SJiawei Lin
11698c71602SJiawei Lin  def write_addr(next: PMPConfig, mask: UInt)(paddr: UInt, cfg: PMPConfig, addr: UInt): UInt = {
11798c71602SJiawei Lin    val locked = cfg.addr_locked(next)
11898c71602SJiawei Lin    mask := Mux(!locked, match_mask(cfg, paddr), mask)
11998c71602SJiawei Lin    Mux(!locked, paddr, addr)
120b6982e83SLemover  }
121b6982e83SLemover
122b6982e83SLemover  def set_low_bits(data: UInt, num: Int): UInt = {
123b6982e83SLemover    require(num >= 0)
124b6982e83SLemover    data | ((1 << num)-1).U
125b6982e83SLemover  }
126b6982e83SLemover
127b6982e83SLemover  /** mask the data's low num bits (lsb) */
128b6982e83SLemover  def clear_low_bits(data: UInt, num: Int): UInt = {
129b6982e83SLemover    require(num >= 0)
130b6982e83SLemover    // use Cat instead of & with mask to avoid "Signal Width" problem
131b6982e83SLemover    if (num == 0) { data }
132b6982e83SLemover    else { Cat(data(data.getWidth-1, num), 0.U(num.W)) }
133b6982e83SLemover  }
134ca2f90a6SLemover}
135ca2f90a6SLemover
13698c71602SJiawei Lintrait PMPReadWriteMethod extends PMPReadWriteMethodBare  { this: PMPBase =>
13798c71602SJiawei Lin  def write_cfg_vec(cfgs: UInt): UInt = {
13898c71602SJiawei Lin    val cfgVec = Wire(Vec(cfgs.getWidth/8, new PMPConfig))
13998c71602SJiawei Lin    for (i <- cfgVec.indices) {
14098c71602SJiawei Lin      val cfg_w_tmp = cfgs((i+1)*8-1, i*8).asUInt.asTypeOf(new PMPConfig)
14198c71602SJiawei Lin      cfgVec(i) := cfg_w_tmp
142ff1b5dbbSLemover      when (!cfg_w_tmp.l) {
14398c71602SJiawei Lin        cfgVec(i).w := cfg_w_tmp.w && cfg_w_tmp.r
14498c71602SJiawei Lin        if (CoarserGrain) { cfgVec(i).a := Cat(cfg_w_tmp.a(1), cfg_w_tmp.a.orR) }
14598c71602SJiawei Lin      }
146ff1b5dbbSLemover    }
14798c71602SJiawei Lin    cfgVec.asUInt
14898c71602SJiawei Lin  }
14998c71602SJiawei Lin
15098c71602SJiawei Lin  /** In general, the PMP grain is 2**{G+2} bytes. when G >= 1, na4 is not selectable.
15198c71602SJiawei Lin   * When G >= 2 and cfg.a(1) is set(then the mode is napot), the bits addr(G-2, 0) read as zeros.
15298c71602SJiawei Lin   * When G >= 1 and cfg.a(1) is clear(the mode is off or tor), the addr(G-1, 0) read as zeros.
15398c71602SJiawei Lin   * The low OffBits is dropped
15498c71602SJiawei Lin   */
15598c71602SJiawei Lin  def read_addr(): UInt = {
15698c71602SJiawei Lin    read_addr(cfg)(addr)
15798c71602SJiawei Lin  }
15898c71602SJiawei Lin
15998c71602SJiawei Lin  /** addr for inside addr, drop OffBits with.
16098c71602SJiawei Lin   * compare_addr for inside addr for comparing.
16198c71602SJiawei Lin   * paddr for outside addr.
16298c71602SJiawei Lin   */
16398c71602SJiawei Lin  def write_addr(next: PMPConfig)(paddr: UInt): UInt = {
16498c71602SJiawei Lin    Mux(!cfg.addr_locked(next), paddr, addr)
16598c71602SJiawei Lin  }
16698c71602SJiawei Lin  def write_addr(paddr: UInt): UInt = {
16798c71602SJiawei Lin    Mux(!cfg.addr_locked, paddr, addr)
16898c71602SJiawei Lin  }
16998c71602SJiawei Lin}
17098c71602SJiawei Lin
171ca2f90a6SLemover/** PMPBase for CSR unit
172ca2f90a6SLemover  * with only read and write logic
173ca2f90a6SLemover  */
174ca2f90a6SLemoverclass PMPBase(implicit p: Parameters) extends PMPBundle with PMPReadWriteMethod {
175ca2f90a6SLemover  val cfg = new PMPConfig
17698c71602SJiawei Lin  val addr = UInt((PMPAddrBits - PMPOffBits).W)
177b6982e83SLemover
178b6982e83SLemover  def gen(cfg: PMPConfig, addr: UInt) = {
179b6982e83SLemover    require(addr.getWidth == this.addr.getWidth)
180b6982e83SLemover    this.cfg := cfg
181b6982e83SLemover    this.addr := addr
182b6982e83SLemover  }
183b6982e83SLemover}
184b6982e83SLemover
185ca2f90a6SLemovertrait PMPMatchMethod extends PMPConst { this: PMPEntry =>
186b6982e83SLemover  /** compare_addr is used to compare with input addr */
18798c71602SJiawei Lin  def compare_addr: UInt = ((addr << PMPOffBits) & ~(((1 << PlatformGrain) - 1).U(PMPAddrBits.W))).asUInt
188b6982e83SLemover
189b6982e83SLemover  /** size and maxSize are all log2 Size
19098c71602SJiawei Lin   * for dtlb, the maxSize is bPMXLEN which is 8
191b6982e83SLemover   * for itlb and ptw, the maxSize is log2(512) ?
192b6982e83SLemover   * but we may only need the 64 bytes? how to prevent the bugs?
19398c71602SJiawei Lin   * TODO: handle the special case that itlb & ptw & dcache access wider size than PMXLEN
194b6982e83SLemover   */
195b6982e83SLemover  def is_match(paddr: UInt, lgSize: UInt, lgMaxSize: Int, last_pmp: PMPEntry): Bool = {
196b6982e83SLemover    Mux(cfg.na4_napot, napotMatch(paddr, lgSize, lgMaxSize),
197b6982e83SLemover      Mux(cfg.tor, torMatch(paddr, lgSize, lgMaxSize, last_pmp), false.B))
198b6982e83SLemover  }
199b6982e83SLemover
200b6982e83SLemover  /** generate match mask to help match in napot mode */
20198c71602SJiawei Lin  def match_mask(paddr: UInt): UInt = {
20298c71602SJiawei Lin    match_mask(cfg, paddr)
203b6982e83SLemover  }
204b6982e83SLemover
205ca2f90a6SLemover  def boundMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int): Bool = {
206b6982e83SLemover    if (lgMaxSize <= PlatformGrain) {
207ca2f90a6SLemover      (paddr < compare_addr)
208b6982e83SLemover    } else {
209b6982e83SLemover      val highLess = (paddr >> lgMaxSize) < (compare_addr >> lgMaxSize)
210b6982e83SLemover      val highEqual = (paddr >> lgMaxSize) === (compare_addr >> lgMaxSize)
211b6982e83SLemover      val lowLess = (paddr(lgMaxSize-1, 0) | OneHot.UIntToOH1(lgSize, lgMaxSize))  < compare_addr(lgMaxSize-1, 0)
212b6982e83SLemover      highLess || (highEqual && lowLess)
213b6982e83SLemover    }
214b6982e83SLemover  }
215b6982e83SLemover
216ca2f90a6SLemover  def lowerBoundMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int): Bool = {
217b6982e83SLemover    !boundMatch(paddr, lgSize, lgMaxSize)
218b6982e83SLemover  }
219b6982e83SLemover
220b6982e83SLemover  def higherBoundMatch(paddr: UInt, lgMaxSize: Int) = {
221b6982e83SLemover    boundMatch(paddr, 0.U, lgMaxSize)
222b6982e83SLemover  }
223b6982e83SLemover
224ca2f90a6SLemover  def torMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int, last_pmp: PMPEntry): Bool = {
225b6982e83SLemover    last_pmp.lowerBoundMatch(paddr, lgSize, lgMaxSize) && higherBoundMatch(paddr, lgMaxSize)
226b6982e83SLemover  }
227b6982e83SLemover
228b6982e83SLemover  def unmaskEqual(a: UInt, b: UInt, m: UInt) = {
229b6982e83SLemover    (a & ~m) === (b & ~m)
230b6982e83SLemover  }
231b6982e83SLemover
232b6982e83SLemover  def napotMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int) = {
233b6982e83SLemover    if (lgMaxSize <= PlatformGrain) {
234b6982e83SLemover      unmaskEqual(paddr, compare_addr, mask)
235b6982e83SLemover    } else {
236b6982e83SLemover      val lowMask = mask | OneHot.UIntToOH1(lgSize, lgMaxSize)
237b6982e83SLemover      val highMatch = unmaskEqual(paddr >> lgMaxSize, compare_addr >> lgMaxSize, mask >> lgMaxSize)
238b6982e83SLemover      val lowMatch = unmaskEqual(paddr(lgMaxSize-1, 0), compare_addr(lgMaxSize-1, 0), lowMask(lgMaxSize-1, 0))
239b6982e83SLemover      highMatch && lowMatch
240b6982e83SLemover    }
241b6982e83SLemover  }
242b6982e83SLemover
243b6982e83SLemover  def aligned(paddr: UInt, lgSize: UInt, lgMaxSize: Int, last: PMPEntry) = {
244b6982e83SLemover    if (lgMaxSize <= PlatformGrain) {
245b6982e83SLemover      true.B
246b6982e83SLemover    } else {
247b6982e83SLemover      val lowBitsMask = OneHot.UIntToOH1(lgSize, lgMaxSize)
248b6982e83SLemover      val lowerBound = ((paddr >> lgMaxSize) === (last.compare_addr >> lgMaxSize)) &&
249b6982e83SLemover        ((~paddr(lgMaxSize-1, 0) & last.compare_addr(lgMaxSize-1, 0)) =/= 0.U)
250b6982e83SLemover      val upperBound = ((paddr >> lgMaxSize) === (compare_addr >> lgMaxSize)) &&
251b6982e83SLemover        ((compare_addr(lgMaxSize-1, 0) & (paddr(lgMaxSize-1, 0) | lowBitsMask)) =/= 0.U)
252b6982e83SLemover      val torAligned = !(lowerBound || upperBound)
253b6982e83SLemover      val napotAligned = (lowBitsMask & ~mask(lgMaxSize-1, 0)) === 0.U
254b6982e83SLemover      Mux(cfg.na4_napot, napotAligned, torAligned)
255b6982e83SLemover    }
256b6982e83SLemover  }
257ca2f90a6SLemover}
258ca2f90a6SLemover
259ca2f90a6SLemover/** PMPEntry for outside pmp copies
260ca2f90a6SLemover  * with one more elements mask to help napot match
261ca2f90a6SLemover  * TODO: make mask an element, not an method, for timing opt
262ca2f90a6SLemover  */
263ca2f90a6SLemoverclass PMPEntry(implicit p: Parameters) extends PMPBase with PMPMatchMethod {
26498c71602SJiawei Lin  val mask = UInt(PMPAddrBits.W) // help to match in napot
265ca2f90a6SLemover
26698c71602SJiawei Lin  def write_addr(next: PMPConfig, mask: UInt)(paddr: UInt) = {
26798c71602SJiawei Lin    mask := Mux(!cfg.addr_locked(next), match_mask(paddr), mask)
26898c71602SJiawei Lin    Mux(!cfg.addr_locked(next), paddr, addr)
269ca2f90a6SLemover  }
270ca2f90a6SLemover
271ca2f90a6SLemover  def write_addr(mask: UInt)(paddr: UInt) = {
272ca2f90a6SLemover    mask := Mux(!cfg.addr_locked, match_mask(paddr), mask)
273ca2f90a6SLemover    Mux(!cfg.addr_locked, paddr, addr)
274ca2f90a6SLemover  }
275b6982e83SLemover
276b6982e83SLemover  def gen(cfg: PMPConfig, addr: UInt, mask: UInt) = {
277b6982e83SLemover    require(addr.getWidth == this.addr.getWidth)
278b6982e83SLemover    this.cfg := cfg
279b6982e83SLemover    this.addr := addr
280b6982e83SLemover    this.mask := mask
281b6982e83SLemover  }
282ca2f90a6SLemover}
283b6982e83SLemover
28498c71602SJiawei Lintrait PMPMethod extends PMPConst {
285ca2f90a6SLemover  def pmp_init() : (Vec[UInt], Vec[UInt], Vec[UInt])= {
28698c71602SJiawei Lin    val cfg = WireInit(0.U.asTypeOf(Vec(NumPMP/8, UInt(PMXLEN.W))))
28798c71602SJiawei Lin    val addr = Wire(Vec(NumPMP, UInt((PMPAddrBits-PMPOffBits).W)))
28898c71602SJiawei Lin    val mask = Wire(Vec(NumPMP, UInt(PMPAddrBits.W)))
289ca2f90a6SLemover    addr := DontCare
290ca2f90a6SLemover    mask := DontCare
291ca2f90a6SLemover    (cfg, addr, mask)
292ca2f90a6SLemover  }
293ca2f90a6SLemover
294ca2f90a6SLemover  def pmp_gen_mapping
295ca2f90a6SLemover  (
296ca2f90a6SLemover    init: () => (Vec[UInt], Vec[UInt], Vec[UInt]),
297ca2f90a6SLemover    num: Int = 16,
298ca2f90a6SLemover    cfgBase: Int,
299ca2f90a6SLemover    addrBase: Int,
300ca2f90a6SLemover    entries: Vec[PMPEntry]
301ca2f90a6SLemover  ) = {
30298c71602SJiawei Lin    val pmpCfgPerCSR = PMXLEN / new PMPConfig().getWidth
30398c71602SJiawei Lin    def pmpCfgIndex(i: Int) = (PMXLEN / 32) * (i / pmpCfgPerCSR)
304ca2f90a6SLemover    val init_value = init()
305ca2f90a6SLemover    /** to fit MaskedRegMap's write, declare cfgs as Merged CSRs and split them into each pmp */
30698c71602SJiawei Lin    val cfgMerged = RegInit(init_value._1) //(Vec(num / pmpCfgPerCSR, UInt(PMXLEN.W))) // RegInit(VecInit(Seq.fill(num / pmpCfgPerCSR)(0.U(PMXLEN.W))))
307ca2f90a6SLemover    val cfgs = WireInit(cfgMerged).asTypeOf(Vec(num, new PMPConfig()))
30898c71602SJiawei Lin    val addr = RegInit(init_value._2) // (Vec(num, UInt((PMPAddrBits-PMPOffBits).W)))
30998c71602SJiawei Lin    val mask = RegInit(init_value._3) // (Vec(num, UInt(PMPAddrBits.W)))
310ca2f90a6SLemover
311ca2f90a6SLemover    for (i <- entries.indices) {
312ca2f90a6SLemover      entries(i).gen(cfgs(i), addr(i), mask(i))
313ca2f90a6SLemover    }
314ca2f90a6SLemover
315ca2f90a6SLemover    val cfg_mapping = (0 until num by pmpCfgPerCSR).map(i => {Map(
316ca2f90a6SLemover      MaskedRegMap(
317ca2f90a6SLemover        addr = cfgBase + pmpCfgIndex(i),
318ca2f90a6SLemover        reg = cfgMerged(i/pmpCfgPerCSR),
319ca2f90a6SLemover        wmask = WritableMask,
320ca2f90a6SLemover        wfn = new PMPBase().write_cfg_vec(mask, addr, i)
321ca2f90a6SLemover      ))
322ca2f90a6SLemover    }).fold(Map())((a, b) => a ++ b) // ugly code, hit me if u have better codes
323ca2f90a6SLemover
324ca2f90a6SLemover    val addr_mapping = (0 until num).map(i => {Map(
325ca2f90a6SLemover      MaskedRegMap(
326ca2f90a6SLemover        addr = addrBase + i,
327ca2f90a6SLemover        reg = addr(i),
328ca2f90a6SLemover        wmask = WritableMask,
32998c71602SJiawei Lin        wfn = { if (i != num-1) entries(i).write_addr(entries(i+1).cfg, mask(i)) else entries(i).write_addr(mask(i)) },
330ca2f90a6SLemover        rmask = WritableMask,
331ca2f90a6SLemover        rfn = new PMPBase().read_addr(entries(i).cfg)
332ca2f90a6SLemover      ))
333ca2f90a6SLemover    }).fold(Map())((a, b) => a ++ b) // ugly code, hit me if u have better codes.
334ca2f90a6SLemover
335ca2f90a6SLemover    cfg_mapping ++ addr_mapping
336b6982e83SLemover  }
337b6982e83SLemover}
338b6982e83SLemover
33998c71602SJiawei Linclass PMP(implicit p: Parameters) extends PMPXSModule with HasXSParameter with PMPMethod with PMAMethod with HasCSRConst {
340b6982e83SLemover  val io = IO(new Bundle {
341b6982e83SLemover    val distribute_csr = Flipped(new DistributedCSRIO())
342b6982e83SLemover    val pmp = Output(Vec(NumPMP, new PMPEntry()))
343ca2f90a6SLemover    val pma = Output(Vec(NumPMA, new PMPEntry()))
344b6982e83SLemover  })
345b6982e83SLemover
346b6982e83SLemover  val w = io.distribute_csr.w
347b6982e83SLemover
348b6982e83SLemover  val pmp = Wire(Vec(NumPMP, new PMPEntry()))
349ca2f90a6SLemover  val pma = Wire(Vec(NumPMA, new PMPEntry()))
350b6982e83SLemover
351ca2f90a6SLemover  val pmpMapping = pmp_gen_mapping(pmp_init, NumPMP, PmpcfgBase, PmpaddrBase, pmp)
352ca2f90a6SLemover  val pmaMapping = pmp_gen_mapping(pma_init, NumPMA, PmacfgBase, PmaaddrBase, pma)
353ca2f90a6SLemover  val mapping = pmpMapping ++ pmaMapping
354b6982e83SLemover
35598c71602SJiawei Lin  val rdata = Wire(UInt(PMXLEN.W))
356ca2f90a6SLemover  MaskedRegMap.generate(mapping, w.bits.addr, rdata, w.valid, w.bits.data)
357b6982e83SLemover
358b6982e83SLemover  io.pmp := pmp
359ca2f90a6SLemover  io.pma := pma
360b6982e83SLemover}
361b6982e83SLemover
362b6982e83SLemoverclass PMPReqBundle(lgMaxSize: Int = 3)(implicit p: Parameters) extends PMPBundle {
36398c71602SJiawei Lin  val addr = Output(UInt(PMPAddrBits.W))
364b6982e83SLemover  val size = Output(UInt(log2Ceil(lgMaxSize+1).W))
365b6982e83SLemover  val cmd = Output(TlbCmd())
366b6982e83SLemover
36798c71602SJiawei Lin  def apply(addr: UInt, size: UInt, cmd: UInt) {
36898c71602SJiawei Lin    this.addr := addr
36998c71602SJiawei Lin    this.size := size
37098c71602SJiawei Lin    this.cmd := cmd
37198c71602SJiawei Lin  }
37298c71602SJiawei Lin
37398c71602SJiawei Lin  def apply(addr: UInt) { // req minimal permission and req align size
37498c71602SJiawei Lin    apply(addr, lgMaxSize.U, TlbCmd.read)
37598c71602SJiawei Lin  }
37698c71602SJiawei Lin
377b6982e83SLemover}
378b6982e83SLemover
37998c71602SJiawei Linclass PMPRespBundle(implicit p: Parameters) extends PMPBundle {
38098c71602SJiawei Lin  val ld = Output(Bool())
38198c71602SJiawei Lin  val st = Output(Bool())
38298c71602SJiawei Lin  val instr = Output(Bool())
383ca2f90a6SLemover  val mmio = Output(Bool())
38437225120Ssfencevma  val atomic = Output(Bool())
385b6982e83SLemover
386ca2f90a6SLemover  def |(resp: PMPRespBundle): PMPRespBundle = {
387ca2f90a6SLemover    val res = Wire(new PMPRespBundle())
388ca2f90a6SLemover    res.ld := this.ld || resp.ld
389ca2f90a6SLemover    res.st := this.st || resp.st
390ca2f90a6SLemover    res.instr := this.instr || resp.instr
391ca2f90a6SLemover    res.mmio := this.mmio || resp.mmio
39237225120Ssfencevma    res.atomic := this.atomic || resp.atomic
393ca2f90a6SLemover    res
394ca2f90a6SLemover  }
395ca2f90a6SLemover}
396b6982e83SLemover
39798c71602SJiawei Lintrait PMPCheckMethod extends PMPConst {
39898c71602SJiawei Lin  def pmp_check(cmd: UInt, cfg: PMPConfig) = {
399ca2f90a6SLemover    val resp = Wire(new PMPRespBundle)
4000fedb24cSWilliam Wang    resp.ld := TlbCmd.isRead(cmd) && !TlbCmd.isAmo(cmd) && !cfg.r
4010fedb24cSWilliam Wang    resp.st := (TlbCmd.isWrite(cmd) || TlbCmd.isAmo(cmd)) && !cfg.w
402ca2f90a6SLemover    resp.instr := TlbCmd.isExec(cmd) && !cfg.x
403ca2f90a6SLemover    resp.mmio := false.B
40437225120Ssfencevma    resp.atomic := false.B
405ca2f90a6SLemover    resp
406ca2f90a6SLemover  }
407b6982e83SLemover
4085cf62c1aSLemover  def pmp_match_res(leaveHitMux: Boolean = false, valid: Bool = true.B)(
4095cf62c1aSLemover    addr: UInt,
4105cf62c1aSLemover    size: UInt,
4115cf62c1aSLemover    pmpEntries: Vec[PMPEntry],
4125cf62c1aSLemover    mode: UInt,
4135cf62c1aSLemover    lgMaxSize: Int
4145cf62c1aSLemover  ) = {
415ca2f90a6SLemover    val num = pmpEntries.size
416ca2f90a6SLemover    require(num == NumPMP)
417ca2f90a6SLemover
41898c71602SJiawei Lin    val passThrough = if (pmpEntries.isEmpty) true.B else (mode > 1.U)
419a15116bdSLemover    val pmpDefault = WireInit(0.U.asTypeOf(new PMPEntry()))
420a15116bdSLemover    pmpDefault.cfg.r := passThrough
421a15116bdSLemover    pmpDefault.cfg.w := passThrough
422a15116bdSLemover    pmpDefault.cfg.x := passThrough
423b6982e83SLemover
424a15116bdSLemover    val match_vec = Wire(Vec(num+1, Bool()))
425a15116bdSLemover    val cfg_vec = Wire(Vec(num+1, new PMPEntry()))
426a15116bdSLemover
427a15116bdSLemover    pmpEntries.zip(pmpDefault +: pmpEntries.take(num-1)).zipWithIndex.foreach{ case ((pmp, last_pmp), i) =>
428ca2f90a6SLemover      val is_match = pmp.is_match(addr, size, lgMaxSize, last_pmp)
429b6982e83SLemover      val ignore = passThrough && !pmp.cfg.l
430ca2f90a6SLemover      val aligned = pmp.aligned(addr, size, lgMaxSize, last_pmp)
431b6982e83SLemover
432b6982e83SLemover      val cur = WireInit(pmp)
433b6982e83SLemover      cur.cfg.r := aligned && (pmp.cfg.r || ignore)
434b6982e83SLemover      cur.cfg.w := aligned && (pmp.cfg.w || ignore)
435b6982e83SLemover      cur.cfg.x := aligned && (pmp.cfg.x || ignore)
436b6982e83SLemover
437a15116bdSLemover//      Mux(is_match, cur, prev)
438a15116bdSLemover      match_vec(i) := is_match
439a15116bdSLemover      cfg_vec(i) := cur
440b6982e83SLemover    }
441a15116bdSLemover
442a15116bdSLemover    // default value
443a15116bdSLemover    match_vec(num) := true.B
444a15116bdSLemover    cfg_vec(num) := pmpDefault
445a15116bdSLemover
4465cf62c1aSLemover    if (leaveHitMux) {
447005e809bSJiuyang Liu      ParallelPriorityMux(match_vec.map(RegEnable(_, false.B, valid)), RegEnable(cfg_vec, valid))
4485cf62c1aSLemover    } else {
449a15116bdSLemover      ParallelPriorityMux(match_vec, cfg_vec)
450ca2f90a6SLemover    }
451ca2f90a6SLemover  }
4525cf62c1aSLemover}
453b6982e83SLemover
45498c71602SJiawei Linclass PMPCheckerEnv(implicit p: Parameters) extends PMPBundle {
45598c71602SJiawei Lin  val mode = UInt(2.W)
45698c71602SJiawei Lin  val pmp = Vec(NumPMP, new PMPEntry())
45798c71602SJiawei Lin  val pma = Vec(NumPMA, new PMPEntry())
45898c71602SJiawei Lin
45998c71602SJiawei Lin  def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry]): Unit = {
46098c71602SJiawei Lin    this.mode := mode
46198c71602SJiawei Lin    this.pmp := pmp
46298c71602SJiawei Lin    this.pma := pma
46398c71602SJiawei Lin  }
46498c71602SJiawei Lin}
46598c71602SJiawei Lin
46698c71602SJiawei Linclass PMPCheckIO(lgMaxSize: Int)(implicit p: Parameters) extends PMPBundle {
46798c71602SJiawei Lin  val check_env = Input(new PMPCheckerEnv())
46898c71602SJiawei Lin  val req = Flipped(Valid(new PMPReqBundle(lgMaxSize))) // usage: assign the valid to fire signal
46998c71602SJiawei Lin  val resp = new PMPRespBundle()
47098c71602SJiawei Lin
47198c71602SJiawei Lin  def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], req: Valid[PMPReqBundle]) = {
47298c71602SJiawei Lin    check_env.apply(mode, pmp, pma)
47398c71602SJiawei Lin    this.req := req
47498c71602SJiawei Lin    resp
47598c71602SJiawei Lin  }
47698c71602SJiawei Lin
47798c71602SJiawei Lin  def req_apply(valid: Bool, addr: UInt): Unit = {
47898c71602SJiawei Lin    this.req.valid := valid
47998c71602SJiawei Lin    this.req.bits.apply(addr)
48098c71602SJiawei Lin  }
48198c71602SJiawei Lin
48298c71602SJiawei Lin  def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], valid: Bool, addr: UInt) = {
48398c71602SJiawei Lin    check_env.apply(mode, pmp, pma)
48498c71602SJiawei Lin    req_apply(valid, addr)
48598c71602SJiawei Lin    resp
48698c71602SJiawei Lin  }
48798c71602SJiawei Lin}
48898c71602SJiawei Lin
4895b7ef044SLemoverclass PMPCheckv2IO(lgMaxSize: Int)(implicit p: Parameters) extends PMPBundle {
4905b7ef044SLemover  val check_env = Input(new PMPCheckerEnv())
4915b7ef044SLemover  val req = Flipped(Valid(new PMPReqBundle(lgMaxSize))) // usage: assign the valid to fire signal
4925b7ef044SLemover  val resp = Output(new PMPConfig())
4935b7ef044SLemover
4945b7ef044SLemover  def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], req: Valid[PMPReqBundle]) = {
4955b7ef044SLemover    check_env.apply(mode, pmp, pma)
4965b7ef044SLemover    this.req := req
4975b7ef044SLemover    resp
4985b7ef044SLemover  }
4995b7ef044SLemover
5005b7ef044SLemover  def req_apply(valid: Bool, addr: UInt): Unit = {
5015b7ef044SLemover    this.req.valid := valid
5025b7ef044SLemover    this.req.bits.apply(addr)
5035b7ef044SLemover  }
5045b7ef044SLemover
5055b7ef044SLemover  def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], valid: Bool, addr: UInt) = {
5065b7ef044SLemover    check_env.apply(mode, pmp, pma)
5075b7ef044SLemover    req_apply(valid, addr)
5085b7ef044SLemover    resp
5095b7ef044SLemover  }
5105b7ef044SLemover}
5115b7ef044SLemover
512ca2f90a6SLemoverclass PMPChecker
513ca2f90a6SLemover(
514ca2f90a6SLemover  lgMaxSize: Int = 3,
5155cf62c1aSLemover  sameCycle: Boolean = false,
51698c71602SJiawei Lin  leaveHitMux: Boolean = false,
51798c71602SJiawei Lin  pmpUsed: Boolean = true
51898c71602SJiawei Lin)(implicit p: Parameters) extends PMPModule
519ca2f90a6SLemover  with PMPCheckMethod
520ca2f90a6SLemover  with PMACheckMethod
521ca2f90a6SLemover{
5225cf62c1aSLemover  require(!(leaveHitMux && sameCycle))
52398c71602SJiawei Lin  val io = IO(new PMPCheckIO(lgMaxSize))
524ca2f90a6SLemover
525ca2f90a6SLemover  val req = io.req.bits
526ca2f90a6SLemover
52798c71602SJiawei Lin  val res_pmp = pmp_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pmp, io.check_env.mode, lgMaxSize)
52898c71602SJiawei Lin  val res_pma = pma_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pma, io.check_env.mode, lgMaxSize)
529ca2f90a6SLemover
530ca2f90a6SLemover  val resp_pmp = pmp_check(req.cmd, res_pmp.cfg)
531ca2f90a6SLemover  val resp_pma = pma_check(req.cmd, res_pma.cfg)
53298c71602SJiawei Lin  val resp = if (pmpUsed) (resp_pmp | resp_pma) else resp_pma
533ca2f90a6SLemover
5345cf62c1aSLemover  if (sameCycle || leaveHitMux) {
535ca2f90a6SLemover    io.resp := resp
536b6982e83SLemover  } else {
537ca2f90a6SLemover    io.resp := RegEnable(resp, io.req.valid)
538b6982e83SLemover  }
539b6982e83SLemover}
5405b7ef044SLemover
5415b7ef044SLemover/* get config with check */
5425b7ef044SLemoverclass PMPCheckerv2
5435b7ef044SLemover(
5445b7ef044SLemover  lgMaxSize: Int = 3,
5455b7ef044SLemover  sameCycle: Boolean = false,
5465b7ef044SLemover  leaveHitMux: Boolean = false
5475b7ef044SLemover)(implicit p: Parameters) extends PMPModule
5485b7ef044SLemover  with PMPCheckMethod
5495b7ef044SLemover  with PMACheckMethod
5505b7ef044SLemover{
5515b7ef044SLemover  require(!(leaveHitMux && sameCycle))
5525b7ef044SLemover  val io = IO(new PMPCheckv2IO(lgMaxSize))
5535b7ef044SLemover
5545b7ef044SLemover  val req = io.req.bits
5555b7ef044SLemover
5565b7ef044SLemover  val res_pmp = pmp_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pmp, io.check_env.mode, lgMaxSize)
5575b7ef044SLemover  val res_pma = pma_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pma, io.check_env.mode, lgMaxSize)
5585b7ef044SLemover
5595b7ef044SLemover  val resp = and(res_pmp, res_pma)
5605b7ef044SLemover
5615b7ef044SLemover  if (sameCycle || leaveHitMux) {
5625b7ef044SLemover    io.resp := resp
5635b7ef044SLemover  } else {
5645b7ef044SLemover    io.resp := RegEnable(resp, io.req.valid)
5655b7ef044SLemover  }
5665b7ef044SLemover
5675b7ef044SLemover  def and(pmp: PMPEntry, pma: PMPEntry): PMPConfig = {
5685b7ef044SLemover    val tmp_res = Wire(new PMPConfig)
5695b7ef044SLemover    tmp_res.l := DontCare
5705b7ef044SLemover    tmp_res.a := DontCare
5715b7ef044SLemover    tmp_res.r := pmp.cfg.r && pma.cfg.r
5725b7ef044SLemover    tmp_res.w := pmp.cfg.w && pma.cfg.w
5735b7ef044SLemover    tmp_res.x := pmp.cfg.x && pma.cfg.x
5745b7ef044SLemover    tmp_res.c := pma.cfg.c
5755b7ef044SLemover    tmp_res.atomic := pma.cfg.atomic
5765b7ef044SLemover    tmp_res
5775b7ef044SLemover  }
5785b7ef044SLemover}
579