1b6982e83SLemover/*************************************************************************************** 2b6982e83SLemover* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3b6982e83SLemover* Copyright (c) 2020-2021 Peng Cheng Laboratory 4b6982e83SLemover* 5b6982e83SLemover* XiangShan is licensed under Mulan PSL v2. 6b6982e83SLemover* You can use this software according to the terms and conditions of the Mulan PSL v2. 7b6982e83SLemover* You may obtain a copy of Mulan PSL v2 at: 8b6982e83SLemover* http://license.coscl.org.cn/MulanPSL2 9b6982e83SLemover* 10b6982e83SLemover* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11b6982e83SLemover* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12b6982e83SLemover* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13b6982e83SLemover* 14b6982e83SLemover* See the Mulan PSL v2 for more details. 15b6982e83SLemover***************************************************************************************/ 16b6982e83SLemover 17a15116bdSLemover// See LICENSE.SiFive for license details. 18a15116bdSLemover 19b6982e83SLemoverpackage xiangshan.backend.fu 20b6982e83SLemover 21b6982e83SLemoverimport chipsalliance.rocketchip.config.Parameters 22b6982e83SLemoverimport chisel3._ 23b6982e83SLemoverimport chisel3.internal.naming.chiselName 24b6982e83SLemoverimport chisel3.util._ 253c02ee8fSwakafaimport utility.MaskedRegMap.WritableMask 26b6982e83SLemoverimport xiangshan._ 27b6982e83SLemoverimport xiangshan.backend.fu.util.HasCSRConst 28b6982e83SLemoverimport utils._ 293c02ee8fSwakafaimport utility._ 30b6982e83SLemoverimport xiangshan.cache.mmu.{TlbCmd, TlbExceptionBundle} 31b6982e83SLemover 3298c71602SJiawei Lintrait PMPConst extends HasPMParameters { 33b6982e83SLemover val PMPOffBits = 2 // minimal 4bytes 34b6982e83SLemover val CoarserGrain: Boolean = PlatformGrain > PMPOffBits 35b6982e83SLemover} 36b6982e83SLemover 3798c71602SJiawei Linabstract class PMPBundle(implicit val p: Parameters) extends Bundle with PMPConst 3898c71602SJiawei Linabstract class PMPModule(implicit val p: Parameters) extends Module with PMPConst 3998c71602SJiawei Linabstract class PMPXSModule(implicit p: Parameters) extends XSModule with PMPConst 40b6982e83SLemover 41b6982e83SLemover@chiselName 42b6982e83SLemoverclass PMPConfig(implicit p: Parameters) extends PMPBundle { 43b6982e83SLemover val l = Bool() 44ca2f90a6SLemover val c = Bool() // res(1), unuse in pmp 45ca2f90a6SLemover val atomic = Bool() // res(0), unuse in pmp 46b6982e83SLemover val a = UInt(2.W) 47b6982e83SLemover val x = Bool() 48b6982e83SLemover val w = Bool() 49b6982e83SLemover val r = Bool() 50b6982e83SLemover 51ca2f90a6SLemover def res: UInt = Cat(c, atomic) // in pmp, unused 52b6982e83SLemover def off = a === 0.U 53b6982e83SLemover def tor = a === 1.U 54b6982e83SLemover def na4 = { if (CoarserGrain) false.B else a === 2.U } 55b6982e83SLemover def napot = { if (CoarserGrain) a(1).asBool else a === 3.U } 56b6982e83SLemover def off_tor = !a(1) 57b6982e83SLemover def na4_napot = a(1) 58b6982e83SLemover 59b6982e83SLemover def locked = l 60b6982e83SLemover def addr_locked: Bool = locked 61b6982e83SLemover def addr_locked(next: PMPConfig): Bool = locked || (next.locked && next.tor) 62ca2f90a6SLemover} 63b6982e83SLemover 64*67ba96b4SYinan Xuobject PMPConfigUInt { 65*67ba96b4SYinan Xu def apply( 66*67ba96b4SYinan Xu l: Boolean = false, 67*67ba96b4SYinan Xu c: Boolean = false, 68*67ba96b4SYinan Xu atomic: Boolean = false, 69*67ba96b4SYinan Xu a: Int = 0, 70*67ba96b4SYinan Xu x: Boolean = false, 71*67ba96b4SYinan Xu w: Boolean = false, 72*67ba96b4SYinan Xu r: Boolean = false)(implicit p: Parameters): UInt = { 73*67ba96b4SYinan Xu var config = 0 74*67ba96b4SYinan Xu if (l) { config += (1 << 7) } 75*67ba96b4SYinan Xu if (c) { config += (1 << 6) } 76*67ba96b4SYinan Xu if (atomic) { config += (1 << 5) } 77*67ba96b4SYinan Xu if (a > 0) { config += (a << 3) } 78*67ba96b4SYinan Xu if (x) { config += (1 << 2) } 79*67ba96b4SYinan Xu if (w) { config += (1 << 1) } 80*67ba96b4SYinan Xu if (r) { config += (1 << 0) } 81*67ba96b4SYinan Xu config.U(8.W) 82*67ba96b4SYinan Xu } 83*67ba96b4SYinan Xu} 8498c71602SJiawei Lintrait PMPReadWriteMethodBare extends PMPConst { 8598c71602SJiawei Lin def match_mask(cfg: PMPConfig, paddr: UInt) = { 8698c71602SJiawei Lin val match_mask_c_addr = Cat(paddr, cfg.a(0)) | (((1 << PlatformGrain) - 1) >> PMPOffBits).U((paddr.getWidth + 1).W) 8798c71602SJiawei Lin Cat(match_mask_c_addr & ~(match_mask_c_addr + 1.U), ((1 << PMPOffBits) - 1).U(PMPOffBits.W)) 88b6982e83SLemover } 89b6982e83SLemover 90b6982e83SLemover def write_cfg_vec(mask: Vec[UInt], addr: Vec[UInt], index: Int)(cfgs: UInt): UInt = { 91b6982e83SLemover val cfgVec = Wire(Vec(cfgs.getWidth/8, new PMPConfig)) 92b6982e83SLemover for (i <- cfgVec.indices) { 93ca2f90a6SLemover val cfg_w_m_tmp = cfgs((i+1)*8-1, i*8).asUInt.asTypeOf(new PMPConfig) 94ca2f90a6SLemover cfgVec(i) := cfg_w_m_tmp 95ff1b5dbbSLemover when (!cfg_w_m_tmp.l) { 96ca2f90a6SLemover cfgVec(i).w := cfg_w_m_tmp.w && cfg_w_m_tmp.r 97ca2f90a6SLemover if (CoarserGrain) { cfgVec(i).a := Cat(cfg_w_m_tmp.a(1), cfg_w_m_tmp.a.orR) } 98b6982e83SLemover when (cfgVec(i).na4_napot) { 9998c71602SJiawei Lin mask(index + i) := match_mask(cfgVec(i), addr(index + i)) 100b6982e83SLemover } 101b6982e83SLemover } 102ff1b5dbbSLemover } 103b6982e83SLemover cfgVec.asUInt 104b6982e83SLemover } 105b6982e83SLemover 106b6982e83SLemover def read_addr(cfg: PMPConfig)(addr: UInt): UInt = { 107b6982e83SLemover val G = PlatformGrain - PMPOffBits 108b6982e83SLemover require(G >= 0) 109b6982e83SLemover if (G == 0) { 110b6982e83SLemover addr 111b6982e83SLemover } else if (G >= 2) { 112b6982e83SLemover Mux(cfg.na4_napot, set_low_bits(addr, G-1), clear_low_bits(addr, G)) 113b6982e83SLemover } else { // G is 1 114b6982e83SLemover Mux(cfg.off_tor, clear_low_bits(addr, G), addr) 115b6982e83SLemover } 116b6982e83SLemover } 11798c71602SJiawei Lin 11898c71602SJiawei Lin def write_addr(next: PMPConfig, mask: UInt)(paddr: UInt, cfg: PMPConfig, addr: UInt): UInt = { 11998c71602SJiawei Lin val locked = cfg.addr_locked(next) 12098c71602SJiawei Lin mask := Mux(!locked, match_mask(cfg, paddr), mask) 12198c71602SJiawei Lin Mux(!locked, paddr, addr) 122b6982e83SLemover } 123b6982e83SLemover 124b6982e83SLemover def set_low_bits(data: UInt, num: Int): UInt = { 125b6982e83SLemover require(num >= 0) 126b6982e83SLemover data | ((1 << num)-1).U 127b6982e83SLemover } 128b6982e83SLemover 129b6982e83SLemover /** mask the data's low num bits (lsb) */ 130b6982e83SLemover def clear_low_bits(data: UInt, num: Int): UInt = { 131b6982e83SLemover require(num >= 0) 132b6982e83SLemover // use Cat instead of & with mask to avoid "Signal Width" problem 133b6982e83SLemover if (num == 0) { data } 134b6982e83SLemover else { Cat(data(data.getWidth-1, num), 0.U(num.W)) } 135b6982e83SLemover } 136ca2f90a6SLemover} 137ca2f90a6SLemover 13898c71602SJiawei Lintrait PMPReadWriteMethod extends PMPReadWriteMethodBare { this: PMPBase => 13998c71602SJiawei Lin def write_cfg_vec(cfgs: UInt): UInt = { 14098c71602SJiawei Lin val cfgVec = Wire(Vec(cfgs.getWidth/8, new PMPConfig)) 14198c71602SJiawei Lin for (i <- cfgVec.indices) { 14298c71602SJiawei Lin val cfg_w_tmp = cfgs((i+1)*8-1, i*8).asUInt.asTypeOf(new PMPConfig) 14398c71602SJiawei Lin cfgVec(i) := cfg_w_tmp 144ff1b5dbbSLemover when (!cfg_w_tmp.l) { 14598c71602SJiawei Lin cfgVec(i).w := cfg_w_tmp.w && cfg_w_tmp.r 14698c71602SJiawei Lin if (CoarserGrain) { cfgVec(i).a := Cat(cfg_w_tmp.a(1), cfg_w_tmp.a.orR) } 14798c71602SJiawei Lin } 148ff1b5dbbSLemover } 14998c71602SJiawei Lin cfgVec.asUInt 15098c71602SJiawei Lin } 15198c71602SJiawei Lin 15298c71602SJiawei Lin /** In general, the PMP grain is 2**{G+2} bytes. when G >= 1, na4 is not selectable. 15398c71602SJiawei Lin * When G >= 2 and cfg.a(1) is set(then the mode is napot), the bits addr(G-2, 0) read as zeros. 15498c71602SJiawei Lin * When G >= 1 and cfg.a(1) is clear(the mode is off or tor), the addr(G-1, 0) read as zeros. 15598c71602SJiawei Lin * The low OffBits is dropped 15698c71602SJiawei Lin */ 15798c71602SJiawei Lin def read_addr(): UInt = { 15898c71602SJiawei Lin read_addr(cfg)(addr) 15998c71602SJiawei Lin } 16098c71602SJiawei Lin 16198c71602SJiawei Lin /** addr for inside addr, drop OffBits with. 16298c71602SJiawei Lin * compare_addr for inside addr for comparing. 16398c71602SJiawei Lin * paddr for outside addr. 16498c71602SJiawei Lin */ 16598c71602SJiawei Lin def write_addr(next: PMPConfig)(paddr: UInt): UInt = { 16698c71602SJiawei Lin Mux(!cfg.addr_locked(next), paddr, addr) 16798c71602SJiawei Lin } 16898c71602SJiawei Lin def write_addr(paddr: UInt): UInt = { 16998c71602SJiawei Lin Mux(!cfg.addr_locked, paddr, addr) 17098c71602SJiawei Lin } 17198c71602SJiawei Lin} 17298c71602SJiawei Lin 173ca2f90a6SLemover/** PMPBase for CSR unit 174ca2f90a6SLemover * with only read and write logic 175ca2f90a6SLemover */ 176ca2f90a6SLemover@chiselName 177ca2f90a6SLemoverclass PMPBase(implicit p: Parameters) extends PMPBundle with PMPReadWriteMethod { 178ca2f90a6SLemover val cfg = new PMPConfig 17998c71602SJiawei Lin val addr = UInt((PMPAddrBits - PMPOffBits).W) 180b6982e83SLemover 181b6982e83SLemover def gen(cfg: PMPConfig, addr: UInt) = { 182b6982e83SLemover require(addr.getWidth == this.addr.getWidth) 183b6982e83SLemover this.cfg := cfg 184b6982e83SLemover this.addr := addr 185b6982e83SLemover } 186b6982e83SLemover} 187b6982e83SLemover 188ca2f90a6SLemovertrait PMPMatchMethod extends PMPConst { this: PMPEntry => 189b6982e83SLemover /** compare_addr is used to compare with input addr */ 19098c71602SJiawei Lin def compare_addr: UInt = ((addr << PMPOffBits) & ~(((1 << PlatformGrain) - 1).U(PMPAddrBits.W))).asUInt 191b6982e83SLemover 192b6982e83SLemover /** size and maxSize are all log2 Size 19398c71602SJiawei Lin * for dtlb, the maxSize is bPMXLEN which is 8 194b6982e83SLemover * for itlb and ptw, the maxSize is log2(512) ? 195b6982e83SLemover * but we may only need the 64 bytes? how to prevent the bugs? 19698c71602SJiawei Lin * TODO: handle the special case that itlb & ptw & dcache access wider size than PMXLEN 197b6982e83SLemover */ 198b6982e83SLemover def is_match(paddr: UInt, lgSize: UInt, lgMaxSize: Int, last_pmp: PMPEntry): Bool = { 199b6982e83SLemover Mux(cfg.na4_napot, napotMatch(paddr, lgSize, lgMaxSize), 200b6982e83SLemover Mux(cfg.tor, torMatch(paddr, lgSize, lgMaxSize, last_pmp), false.B)) 201b6982e83SLemover } 202b6982e83SLemover 203b6982e83SLemover /** generate match mask to help match in napot mode */ 20498c71602SJiawei Lin def match_mask(paddr: UInt): UInt = { 20598c71602SJiawei Lin match_mask(cfg, paddr) 206b6982e83SLemover } 207b6982e83SLemover 208ca2f90a6SLemover def boundMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int): Bool = { 209b6982e83SLemover if (lgMaxSize <= PlatformGrain) { 210ca2f90a6SLemover (paddr < compare_addr) 211b6982e83SLemover } else { 212b6982e83SLemover val highLess = (paddr >> lgMaxSize) < (compare_addr >> lgMaxSize) 213b6982e83SLemover val highEqual = (paddr >> lgMaxSize) === (compare_addr >> lgMaxSize) 214b6982e83SLemover val lowLess = (paddr(lgMaxSize-1, 0) | OneHot.UIntToOH1(lgSize, lgMaxSize)) < compare_addr(lgMaxSize-1, 0) 215b6982e83SLemover highLess || (highEqual && lowLess) 216b6982e83SLemover } 217b6982e83SLemover } 218b6982e83SLemover 219ca2f90a6SLemover def lowerBoundMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int): Bool = { 220b6982e83SLemover !boundMatch(paddr, lgSize, lgMaxSize) 221b6982e83SLemover } 222b6982e83SLemover 223b6982e83SLemover def higherBoundMatch(paddr: UInt, lgMaxSize: Int) = { 224b6982e83SLemover boundMatch(paddr, 0.U, lgMaxSize) 225b6982e83SLemover } 226b6982e83SLemover 227ca2f90a6SLemover def torMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int, last_pmp: PMPEntry): Bool = { 228b6982e83SLemover last_pmp.lowerBoundMatch(paddr, lgSize, lgMaxSize) && higherBoundMatch(paddr, lgMaxSize) 229b6982e83SLemover } 230b6982e83SLemover 231b6982e83SLemover def unmaskEqual(a: UInt, b: UInt, m: UInt) = { 232b6982e83SLemover (a & ~m) === (b & ~m) 233b6982e83SLemover } 234b6982e83SLemover 235b6982e83SLemover def napotMatch(paddr: UInt, lgSize: UInt, lgMaxSize: Int) = { 236b6982e83SLemover if (lgMaxSize <= PlatformGrain) { 237b6982e83SLemover unmaskEqual(paddr, compare_addr, mask) 238b6982e83SLemover } else { 239b6982e83SLemover val lowMask = mask | OneHot.UIntToOH1(lgSize, lgMaxSize) 240b6982e83SLemover val highMatch = unmaskEqual(paddr >> lgMaxSize, compare_addr >> lgMaxSize, mask >> lgMaxSize) 241b6982e83SLemover val lowMatch = unmaskEqual(paddr(lgMaxSize-1, 0), compare_addr(lgMaxSize-1, 0), lowMask(lgMaxSize-1, 0)) 242b6982e83SLemover highMatch && lowMatch 243b6982e83SLemover } 244b6982e83SLemover } 245b6982e83SLemover 246b6982e83SLemover def aligned(paddr: UInt, lgSize: UInt, lgMaxSize: Int, last: PMPEntry) = { 247b6982e83SLemover if (lgMaxSize <= PlatformGrain) { 248b6982e83SLemover true.B 249b6982e83SLemover } else { 250b6982e83SLemover val lowBitsMask = OneHot.UIntToOH1(lgSize, lgMaxSize) 251b6982e83SLemover val lowerBound = ((paddr >> lgMaxSize) === (last.compare_addr >> lgMaxSize)) && 252b6982e83SLemover ((~paddr(lgMaxSize-1, 0) & last.compare_addr(lgMaxSize-1, 0)) =/= 0.U) 253b6982e83SLemover val upperBound = ((paddr >> lgMaxSize) === (compare_addr >> lgMaxSize)) && 254b6982e83SLemover ((compare_addr(lgMaxSize-1, 0) & (paddr(lgMaxSize-1, 0) | lowBitsMask)) =/= 0.U) 255b6982e83SLemover val torAligned = !(lowerBound || upperBound) 256b6982e83SLemover val napotAligned = (lowBitsMask & ~mask(lgMaxSize-1, 0)) === 0.U 257b6982e83SLemover Mux(cfg.na4_napot, napotAligned, torAligned) 258b6982e83SLemover } 259b6982e83SLemover } 260ca2f90a6SLemover} 261ca2f90a6SLemover 262ca2f90a6SLemover/** PMPEntry for outside pmp copies 263ca2f90a6SLemover * with one more elements mask to help napot match 264ca2f90a6SLemover * TODO: make mask an element, not an method, for timing opt 265ca2f90a6SLemover */ 266ca2f90a6SLemover@chiselName 267ca2f90a6SLemoverclass PMPEntry(implicit p: Parameters) extends PMPBase with PMPMatchMethod { 26898c71602SJiawei Lin val mask = UInt(PMPAddrBits.W) // help to match in napot 269ca2f90a6SLemover 27098c71602SJiawei Lin def write_addr(next: PMPConfig, mask: UInt)(paddr: UInt) = { 27198c71602SJiawei Lin mask := Mux(!cfg.addr_locked(next), match_mask(paddr), mask) 27298c71602SJiawei Lin Mux(!cfg.addr_locked(next), paddr, addr) 273ca2f90a6SLemover } 274ca2f90a6SLemover 275ca2f90a6SLemover def write_addr(mask: UInt)(paddr: UInt) = { 276ca2f90a6SLemover mask := Mux(!cfg.addr_locked, match_mask(paddr), mask) 277ca2f90a6SLemover Mux(!cfg.addr_locked, paddr, addr) 278ca2f90a6SLemover } 279b6982e83SLemover 280b6982e83SLemover def gen(cfg: PMPConfig, addr: UInt, mask: UInt) = { 281b6982e83SLemover require(addr.getWidth == this.addr.getWidth) 282b6982e83SLemover this.cfg := cfg 283b6982e83SLemover this.addr := addr 284b6982e83SLemover this.mask := mask 285b6982e83SLemover } 286ca2f90a6SLemover} 287b6982e83SLemover 28898c71602SJiawei Lintrait PMPMethod extends PMPConst { 289ca2f90a6SLemover def pmp_init() : (Vec[UInt], Vec[UInt], Vec[UInt])= { 29098c71602SJiawei Lin val cfg = WireInit(0.U.asTypeOf(Vec(NumPMP/8, UInt(PMXLEN.W)))) 29198c71602SJiawei Lin val addr = Wire(Vec(NumPMP, UInt((PMPAddrBits-PMPOffBits).W))) 29298c71602SJiawei Lin val mask = Wire(Vec(NumPMP, UInt(PMPAddrBits.W))) 293ca2f90a6SLemover addr := DontCare 294ca2f90a6SLemover mask := DontCare 295ca2f90a6SLemover (cfg, addr, mask) 296ca2f90a6SLemover } 297ca2f90a6SLemover 298ca2f90a6SLemover def pmp_gen_mapping 299ca2f90a6SLemover ( 300ca2f90a6SLemover init: () => (Vec[UInt], Vec[UInt], Vec[UInt]), 301ca2f90a6SLemover num: Int = 16, 302ca2f90a6SLemover cfgBase: Int, 303ca2f90a6SLemover addrBase: Int, 304ca2f90a6SLemover entries: Vec[PMPEntry] 305ca2f90a6SLemover ) = { 30698c71602SJiawei Lin val pmpCfgPerCSR = PMXLEN / new PMPConfig().getWidth 30798c71602SJiawei Lin def pmpCfgIndex(i: Int) = (PMXLEN / 32) * (i / pmpCfgPerCSR) 308ca2f90a6SLemover val init_value = init() 309ca2f90a6SLemover /** to fit MaskedRegMap's write, declare cfgs as Merged CSRs and split them into each pmp */ 31098c71602SJiawei Lin val cfgMerged = RegInit(init_value._1) //(Vec(num / pmpCfgPerCSR, UInt(PMXLEN.W))) // RegInit(VecInit(Seq.fill(num / pmpCfgPerCSR)(0.U(PMXLEN.W)))) 311ca2f90a6SLemover val cfgs = WireInit(cfgMerged).asTypeOf(Vec(num, new PMPConfig())) 31298c71602SJiawei Lin val addr = RegInit(init_value._2) // (Vec(num, UInt((PMPAddrBits-PMPOffBits).W))) 31398c71602SJiawei Lin val mask = RegInit(init_value._3) // (Vec(num, UInt(PMPAddrBits.W))) 314ca2f90a6SLemover 315ca2f90a6SLemover for (i <- entries.indices) { 316ca2f90a6SLemover entries(i).gen(cfgs(i), addr(i), mask(i)) 317ca2f90a6SLemover } 318ca2f90a6SLemover 319ca2f90a6SLemover val cfg_mapping = (0 until num by pmpCfgPerCSR).map(i => {Map( 320ca2f90a6SLemover MaskedRegMap( 321ca2f90a6SLemover addr = cfgBase + pmpCfgIndex(i), 322ca2f90a6SLemover reg = cfgMerged(i/pmpCfgPerCSR), 323ca2f90a6SLemover wmask = WritableMask, 324ca2f90a6SLemover wfn = new PMPBase().write_cfg_vec(mask, addr, i) 325ca2f90a6SLemover )) 326ca2f90a6SLemover }).fold(Map())((a, b) => a ++ b) // ugly code, hit me if u have better codes 327ca2f90a6SLemover 328ca2f90a6SLemover val addr_mapping = (0 until num).map(i => {Map( 329ca2f90a6SLemover MaskedRegMap( 330ca2f90a6SLemover addr = addrBase + i, 331ca2f90a6SLemover reg = addr(i), 332ca2f90a6SLemover wmask = WritableMask, 33398c71602SJiawei Lin wfn = { if (i != num-1) entries(i).write_addr(entries(i+1).cfg, mask(i)) else entries(i).write_addr(mask(i)) }, 334ca2f90a6SLemover rmask = WritableMask, 335ca2f90a6SLemover rfn = new PMPBase().read_addr(entries(i).cfg) 336ca2f90a6SLemover )) 337ca2f90a6SLemover }).fold(Map())((a, b) => a ++ b) // ugly code, hit me if u have better codes. 338ca2f90a6SLemover 339ca2f90a6SLemover cfg_mapping ++ addr_mapping 340b6982e83SLemover } 341b6982e83SLemover} 342b6982e83SLemover 343b6982e83SLemover@chiselName 34498c71602SJiawei Linclass PMP(implicit p: Parameters) extends PMPXSModule with HasXSParameter with PMPMethod with PMAMethod with HasCSRConst { 345b6982e83SLemover val io = IO(new Bundle { 346b6982e83SLemover val distribute_csr = Flipped(new DistributedCSRIO()) 347b6982e83SLemover val pmp = Output(Vec(NumPMP, new PMPEntry())) 348ca2f90a6SLemover val pma = Output(Vec(NumPMA, new PMPEntry())) 349b6982e83SLemover }) 350b6982e83SLemover 351b6982e83SLemover val w = io.distribute_csr.w 352b6982e83SLemover 353b6982e83SLemover val pmp = Wire(Vec(NumPMP, new PMPEntry())) 354ca2f90a6SLemover val pma = Wire(Vec(NumPMA, new PMPEntry())) 355b6982e83SLemover 356ca2f90a6SLemover val pmpMapping = pmp_gen_mapping(pmp_init, NumPMP, PmpcfgBase, PmpaddrBase, pmp) 357ca2f90a6SLemover val pmaMapping = pmp_gen_mapping(pma_init, NumPMA, PmacfgBase, PmaaddrBase, pma) 358ca2f90a6SLemover val mapping = pmpMapping ++ pmaMapping 359b6982e83SLemover 36098c71602SJiawei Lin val rdata = Wire(UInt(PMXLEN.W)) 361ca2f90a6SLemover MaskedRegMap.generate(mapping, w.bits.addr, rdata, w.valid, w.bits.data) 362b6982e83SLemover 363b6982e83SLemover io.pmp := pmp 364ca2f90a6SLemover io.pma := pma 365b6982e83SLemover} 366b6982e83SLemover 367b6982e83SLemoverclass PMPReqBundle(lgMaxSize: Int = 3)(implicit p: Parameters) extends PMPBundle { 36898c71602SJiawei Lin val addr = Output(UInt(PMPAddrBits.W)) 369b6982e83SLemover val size = Output(UInt(log2Ceil(lgMaxSize+1).W)) 370b6982e83SLemover val cmd = Output(TlbCmd()) 371b6982e83SLemover 37298c71602SJiawei Lin def apply(addr: UInt, size: UInt, cmd: UInt) { 37398c71602SJiawei Lin this.addr := addr 37498c71602SJiawei Lin this.size := size 37598c71602SJiawei Lin this.cmd := cmd 37698c71602SJiawei Lin } 37798c71602SJiawei Lin 37898c71602SJiawei Lin def apply(addr: UInt) { // req minimal permission and req align size 37998c71602SJiawei Lin apply(addr, lgMaxSize.U, TlbCmd.read) 38098c71602SJiawei Lin } 38198c71602SJiawei Lin 382b6982e83SLemover} 383b6982e83SLemover 38498c71602SJiawei Linclass PMPRespBundle(implicit p: Parameters) extends PMPBundle { 38598c71602SJiawei Lin val ld = Output(Bool()) 38698c71602SJiawei Lin val st = Output(Bool()) 38798c71602SJiawei Lin val instr = Output(Bool()) 388ca2f90a6SLemover val mmio = Output(Bool()) 38937225120Ssfencevma val atomic = Output(Bool()) 390b6982e83SLemover 391ca2f90a6SLemover def |(resp: PMPRespBundle): PMPRespBundle = { 392ca2f90a6SLemover val res = Wire(new PMPRespBundle()) 393ca2f90a6SLemover res.ld := this.ld || resp.ld 394ca2f90a6SLemover res.st := this.st || resp.st 395ca2f90a6SLemover res.instr := this.instr || resp.instr 396ca2f90a6SLemover res.mmio := this.mmio || resp.mmio 39737225120Ssfencevma res.atomic := this.atomic || resp.atomic 398ca2f90a6SLemover res 399ca2f90a6SLemover } 400ca2f90a6SLemover} 401b6982e83SLemover 40298c71602SJiawei Lintrait PMPCheckMethod extends PMPConst { 40398c71602SJiawei Lin def pmp_check(cmd: UInt, cfg: PMPConfig) = { 404ca2f90a6SLemover val resp = Wire(new PMPRespBundle) 4050fedb24cSWilliam Wang resp.ld := TlbCmd.isRead(cmd) && !TlbCmd.isAmo(cmd) && !cfg.r 4060fedb24cSWilliam Wang resp.st := (TlbCmd.isWrite(cmd) || TlbCmd.isAmo(cmd)) && !cfg.w 407ca2f90a6SLemover resp.instr := TlbCmd.isExec(cmd) && !cfg.x 408ca2f90a6SLemover resp.mmio := false.B 40937225120Ssfencevma resp.atomic := false.B 410ca2f90a6SLemover resp 411ca2f90a6SLemover } 412b6982e83SLemover 4135cf62c1aSLemover def pmp_match_res(leaveHitMux: Boolean = false, valid: Bool = true.B)( 4145cf62c1aSLemover addr: UInt, 4155cf62c1aSLemover size: UInt, 4165cf62c1aSLemover pmpEntries: Vec[PMPEntry], 4175cf62c1aSLemover mode: UInt, 4185cf62c1aSLemover lgMaxSize: Int 4195cf62c1aSLemover ) = { 420ca2f90a6SLemover val num = pmpEntries.size 421ca2f90a6SLemover require(num == NumPMP) 422ca2f90a6SLemover 42398c71602SJiawei Lin val passThrough = if (pmpEntries.isEmpty) true.B else (mode > 1.U) 424a15116bdSLemover val pmpDefault = WireInit(0.U.asTypeOf(new PMPEntry())) 425a15116bdSLemover pmpDefault.cfg.r := passThrough 426a15116bdSLemover pmpDefault.cfg.w := passThrough 427a15116bdSLemover pmpDefault.cfg.x := passThrough 428b6982e83SLemover 429a15116bdSLemover val match_vec = Wire(Vec(num+1, Bool())) 430a15116bdSLemover val cfg_vec = Wire(Vec(num+1, new PMPEntry())) 431a15116bdSLemover 432a15116bdSLemover pmpEntries.zip(pmpDefault +: pmpEntries.take(num-1)).zipWithIndex.foreach{ case ((pmp, last_pmp), i) => 433ca2f90a6SLemover val is_match = pmp.is_match(addr, size, lgMaxSize, last_pmp) 434b6982e83SLemover val ignore = passThrough && !pmp.cfg.l 435ca2f90a6SLemover val aligned = pmp.aligned(addr, size, lgMaxSize, last_pmp) 436b6982e83SLemover 437b6982e83SLemover val cur = WireInit(pmp) 438b6982e83SLemover cur.cfg.r := aligned && (pmp.cfg.r || ignore) 439b6982e83SLemover cur.cfg.w := aligned && (pmp.cfg.w || ignore) 440b6982e83SLemover cur.cfg.x := aligned && (pmp.cfg.x || ignore) 441b6982e83SLemover 442a15116bdSLemover// Mux(is_match, cur, prev) 443a15116bdSLemover match_vec(i) := is_match 444a15116bdSLemover cfg_vec(i) := cur 445b6982e83SLemover } 446a15116bdSLemover 447a15116bdSLemover // default value 448a15116bdSLemover match_vec(num) := true.B 449a15116bdSLemover cfg_vec(num) := pmpDefault 450a15116bdSLemover 4515cf62c1aSLemover if (leaveHitMux) { 452005e809bSJiuyang Liu ParallelPriorityMux(match_vec.map(RegEnable(_, false.B, valid)), RegEnable(cfg_vec, valid)) 4535cf62c1aSLemover } else { 454a15116bdSLemover ParallelPriorityMux(match_vec, cfg_vec) 455ca2f90a6SLemover } 456ca2f90a6SLemover } 4575cf62c1aSLemover} 458b6982e83SLemover 45998c71602SJiawei Linclass PMPCheckerEnv(implicit p: Parameters) extends PMPBundle { 46098c71602SJiawei Lin val mode = UInt(2.W) 46198c71602SJiawei Lin val pmp = Vec(NumPMP, new PMPEntry()) 46298c71602SJiawei Lin val pma = Vec(NumPMA, new PMPEntry()) 46398c71602SJiawei Lin 46498c71602SJiawei Lin def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry]): Unit = { 46598c71602SJiawei Lin this.mode := mode 46698c71602SJiawei Lin this.pmp := pmp 46798c71602SJiawei Lin this.pma := pma 46898c71602SJiawei Lin } 46998c71602SJiawei Lin} 47098c71602SJiawei Lin 47198c71602SJiawei Linclass PMPCheckIO(lgMaxSize: Int)(implicit p: Parameters) extends PMPBundle { 47298c71602SJiawei Lin val check_env = Input(new PMPCheckerEnv()) 47398c71602SJiawei Lin val req = Flipped(Valid(new PMPReqBundle(lgMaxSize))) // usage: assign the valid to fire signal 47498c71602SJiawei Lin val resp = new PMPRespBundle() 47598c71602SJiawei Lin 47698c71602SJiawei Lin def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], req: Valid[PMPReqBundle]) = { 47798c71602SJiawei Lin check_env.apply(mode, pmp, pma) 47898c71602SJiawei Lin this.req := req 47998c71602SJiawei Lin resp 48098c71602SJiawei Lin } 48198c71602SJiawei Lin 48298c71602SJiawei Lin def req_apply(valid: Bool, addr: UInt): Unit = { 48398c71602SJiawei Lin this.req.valid := valid 48498c71602SJiawei Lin this.req.bits.apply(addr) 48598c71602SJiawei Lin } 48698c71602SJiawei Lin 48798c71602SJiawei Lin def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], valid: Bool, addr: UInt) = { 48898c71602SJiawei Lin check_env.apply(mode, pmp, pma) 48998c71602SJiawei Lin req_apply(valid, addr) 49098c71602SJiawei Lin resp 49198c71602SJiawei Lin } 49298c71602SJiawei Lin} 49398c71602SJiawei Lin 4945b7ef044SLemoverclass PMPCheckv2IO(lgMaxSize: Int)(implicit p: Parameters) extends PMPBundle { 4955b7ef044SLemover val check_env = Input(new PMPCheckerEnv()) 4965b7ef044SLemover val req = Flipped(Valid(new PMPReqBundle(lgMaxSize))) // usage: assign the valid to fire signal 4975b7ef044SLemover val resp = Output(new PMPConfig()) 4985b7ef044SLemover 4995b7ef044SLemover def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], req: Valid[PMPReqBundle]) = { 5005b7ef044SLemover check_env.apply(mode, pmp, pma) 5015b7ef044SLemover this.req := req 5025b7ef044SLemover resp 5035b7ef044SLemover } 5045b7ef044SLemover 5055b7ef044SLemover def req_apply(valid: Bool, addr: UInt): Unit = { 5065b7ef044SLemover this.req.valid := valid 5075b7ef044SLemover this.req.bits.apply(addr) 5085b7ef044SLemover } 5095b7ef044SLemover 5105b7ef044SLemover def apply(mode: UInt, pmp: Vec[PMPEntry], pma: Vec[PMPEntry], valid: Bool, addr: UInt) = { 5115b7ef044SLemover check_env.apply(mode, pmp, pma) 5125b7ef044SLemover req_apply(valid, addr) 5135b7ef044SLemover resp 5145b7ef044SLemover } 5155b7ef044SLemover} 5165b7ef044SLemover 517ca2f90a6SLemover@chiselName 518ca2f90a6SLemoverclass PMPChecker 519ca2f90a6SLemover( 520ca2f90a6SLemover lgMaxSize: Int = 3, 5215cf62c1aSLemover sameCycle: Boolean = false, 52298c71602SJiawei Lin leaveHitMux: Boolean = false, 52398c71602SJiawei Lin pmpUsed: Boolean = true 52498c71602SJiawei Lin)(implicit p: Parameters) extends PMPModule 525ca2f90a6SLemover with PMPCheckMethod 526ca2f90a6SLemover with PMACheckMethod 527ca2f90a6SLemover{ 5285cf62c1aSLemover require(!(leaveHitMux && sameCycle)) 52998c71602SJiawei Lin val io = IO(new PMPCheckIO(lgMaxSize)) 530ca2f90a6SLemover 531ca2f90a6SLemover val req = io.req.bits 532ca2f90a6SLemover 53398c71602SJiawei Lin val res_pmp = pmp_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pmp, io.check_env.mode, lgMaxSize) 53498c71602SJiawei Lin val res_pma = pma_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pma, io.check_env.mode, lgMaxSize) 535ca2f90a6SLemover 536ca2f90a6SLemover val resp_pmp = pmp_check(req.cmd, res_pmp.cfg) 537ca2f90a6SLemover val resp_pma = pma_check(req.cmd, res_pma.cfg) 53898c71602SJiawei Lin val resp = if (pmpUsed) (resp_pmp | resp_pma) else resp_pma 539ca2f90a6SLemover 5405cf62c1aSLemover if (sameCycle || leaveHitMux) { 541ca2f90a6SLemover io.resp := resp 542b6982e83SLemover } else { 543ca2f90a6SLemover io.resp := RegEnable(resp, io.req.valid) 544b6982e83SLemover } 545b6982e83SLemover} 5465b7ef044SLemover 5475b7ef044SLemover/* get config with check */ 5485b7ef044SLemover@chiselName 5495b7ef044SLemoverclass PMPCheckerv2 5505b7ef044SLemover( 5515b7ef044SLemover lgMaxSize: Int = 3, 5525b7ef044SLemover sameCycle: Boolean = false, 5535b7ef044SLemover leaveHitMux: Boolean = false 5545b7ef044SLemover)(implicit p: Parameters) extends PMPModule 5555b7ef044SLemover with PMPCheckMethod 5565b7ef044SLemover with PMACheckMethod 5575b7ef044SLemover{ 5585b7ef044SLemover require(!(leaveHitMux && sameCycle)) 5595b7ef044SLemover val io = IO(new PMPCheckv2IO(lgMaxSize)) 5605b7ef044SLemover 5615b7ef044SLemover val req = io.req.bits 5625b7ef044SLemover 5635b7ef044SLemover val res_pmp = pmp_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pmp, io.check_env.mode, lgMaxSize) 5645b7ef044SLemover val res_pma = pma_match_res(leaveHitMux, io.req.valid)(req.addr, req.size, io.check_env.pma, io.check_env.mode, lgMaxSize) 5655b7ef044SLemover 5665b7ef044SLemover val resp = and(res_pmp, res_pma) 5675b7ef044SLemover 5685b7ef044SLemover if (sameCycle || leaveHitMux) { 5695b7ef044SLemover io.resp := resp 5705b7ef044SLemover } else { 5715b7ef044SLemover io.resp := RegEnable(resp, io.req.valid) 5725b7ef044SLemover } 5735b7ef044SLemover 5745b7ef044SLemover def and(pmp: PMPEntry, pma: PMPEntry): PMPConfig = { 5755b7ef044SLemover val tmp_res = Wire(new PMPConfig) 5765b7ef044SLemover tmp_res.l := DontCare 5775b7ef044SLemover tmp_res.a := DontCare 5785b7ef044SLemover tmp_res.r := pmp.cfg.r && pma.cfg.r 5795b7ef044SLemover tmp_res.w := pmp.cfg.w && pma.cfg.w 5805b7ef044SLemover tmp_res.x := pmp.cfg.x && pma.cfg.x 5815b7ef044SLemover tmp_res.c := pma.cfg.c 5825b7ef044SLemover tmp_res.atomic := pma.cfg.atomic 5835b7ef044SLemover tmp_res 5845b7ef044SLemover } 5855b7ef044SLemover} 586