1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.diplomacy.IdRange 23import freechips.rocketchip.tilelink.ClientStates._ 24import freechips.rocketchip.tilelink.TLPermissions._ 25import freechips.rocketchip.tilelink._ 26import xiangshan._ 27import xiangshan.cache._ 28import utils._ 29import utility._ 30import difftest._ 31 32 33abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule 34 with HasICacheParameters 35 36abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle 37 with HasICacheParameters 38 39class ICacheMissReq(implicit p: Parameters) extends ICacheBundle 40{ 41 val paddr = UInt(PAddrBits.W) 42 val vaddr = UInt(VAddrBits.W) 43 val waymask = UInt(nWays.W) 44 45 def getVirSetIdx = get_idx(vaddr) 46 def getPhyTag = get_phy_tag(paddr) 47} 48 49 50class ICacheMissResp(implicit p: Parameters) extends ICacheBundle 51{ 52 val data = UInt(blockBits.W) 53 val corrupt = Bool() 54} 55 56class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{ 57 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 58 val resp = Vec(2,ValidIO(new ICacheMissResp)) 59 val flush = Input(Bool()) 60} 61 62 63class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule 64 with MemoryOpConstants 65{ 66 val io = IO(new Bundle { 67 val id = Input(UInt(log2Ceil(PortNumber).W)) 68 69 val req = Flipped(DecoupledIO(new ICacheMissReq)) 70 val resp = ValidIO(new ICacheMissResp) 71 72 //tilelink channel 73 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 74 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 75 76 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 77 val data_write = DecoupledIO(new ICacheDataWriteBundle) 78 79 val ongoing_req = ValidIO(UInt(PAddrBits.W)) 80 val fencei = Input(Bool()) 81 }) 82 83 /** default value for control signals */ 84 io.resp := DontCare 85 io.mem_acquire.bits := DontCare 86 io.mem_grant.ready := true.B 87 io.meta_write.bits := DontCare 88 io.data_write.bits := DontCare 89 90 val s_idle :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_wait_resp :: Nil = Enum(5) 91 val state = RegInit(s_idle) 92 /** control logic transformation */ 93 //request register 94 val req = Reg(new ICacheMissReq) 95 val req_idx = req.getVirSetIdx //virtual index 96 val req_tag = req.getPhyTag //physical tag 97 val req_waymask = req.waymask 98 val req_corrupt = RegInit(false.B) 99 100 val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant) 101 102 val needflush_r = RegInit(false.B) 103 when (state === s_idle) { needflush_r := false.B } 104 when (state =/= s_idle && io.fencei) { needflush_r := true.B } 105 val needflush = needflush_r | io.fencei 106 107 //cacheline register 108 val readBeatCnt = Reg(UInt(log2Up(refillCycles).W)) 109 val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W))) 110 111 //initial 112 io.resp.bits := DontCare 113 io.mem_acquire.bits := DontCare 114 io.mem_grant.ready := true.B 115 io.meta_write.bits := DontCare 116 io.data_write.bits := DontCare 117 118 io.req.ready := (state === s_idle) 119 io.mem_acquire.valid := (state === s_send_mem_aquire) 120 121 io.ongoing_req.valid := (state =/= s_idle) 122 io.ongoing_req.bits := addrAlign(req.paddr, blockBytes, PAddrBits) 123 124 //state change 125 switch(state) { 126 is(s_idle) { 127 when(io.req.fire()) { 128 readBeatCnt := 0.U 129 state := s_send_mem_aquire 130 req := io.req.bits 131 } 132 } 133 134 // memory request 135 is(s_send_mem_aquire) { 136 when(io.mem_acquire.fire()) { 137 state := s_wait_mem_grant 138 } 139 } 140 141 is(s_wait_mem_grant) { 142 when(edge.hasData(io.mem_grant.bits)) { 143 when(io.mem_grant.fire()) { 144 readBeatCnt := readBeatCnt + 1.U 145 respDataReg(readBeatCnt) := io.mem_grant.bits.data 146 req_corrupt := io.mem_grant.bits.corrupt // TODO: seems has bug 147 when(readBeatCnt === (refillCycles - 1).U) { 148 assert(refill_done, "refill not done!") 149 state := s_write_back 150 } 151 } 152 } 153 } 154 155 is(s_write_back) { 156 state := Mux(io.meta_write.fire() && io.data_write.fire() || needflush, s_wait_resp, s_write_back) 157 } 158 159 is(s_wait_resp) { 160 io.resp.bits.data := respDataReg.asUInt 161 io.resp.bits.corrupt := req_corrupt 162 when(io.resp.fire()) { 163 state := s_idle 164 } 165 } 166 } 167 168 /** refill write and meta write */ 169 170 val getBlock = edge.Get( 171 fromSource = io.id, 172 toAddress = addrAlign(req.paddr, blockBytes, PAddrBits), 173 lgSize = (log2Up(cacheParams.blockBytes)).U 174 )._2 175 176 io.mem_acquire.bits := getBlock // getBlock 177 // req source 178 io.mem_acquire.bits.user.lift(ReqSourceKey).foreach(_ := MemReqSource.CPUInst.id.U) 179 require(nSets <= 256) // icache size should not be more than 128KB 180 181 //resp to ifu 182 io.resp.valid := state === s_wait_resp 183 184 io.meta_write.valid := (state === s_write_back && !needflush) 185 io.meta_write.bits.generate(tag = req_tag, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0)) 186 187 io.data_write.valid := (state === s_write_back && !needflush) 188 io.data_write.bits.generate(data = respDataReg.asUInt, 189 idx = req_idx, 190 waymask = req_waymask, 191 bankIdx = req_idx(0), 192 paddr = req.paddr) 193 194 XSPerfAccumulate( 195 "entryPenalty" + Integer.toString(id, 10), 196 BoolStopWatch( 197 start = io.req.fire(), 198 stop = io.resp.valid, 199 startHighPriority = true) 200 ) 201 XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire()) 202} 203 204 205class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule 206{ 207 val io = IO(new Bundle{ 208 val hartId = Input(UInt(8.W)) 209 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 210 val resp = Vec(2, ValidIO(new ICacheMissResp)) 211 212 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 213 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 214 215 val fdip_acquire = Flipped(DecoupledIO(new TLBundleA(edge.bundle))) 216 val fdip_grant = DecoupledIO(new TLBundleD(edge.bundle)) 217 218 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 219 val data_write = DecoupledIO(new ICacheDataWriteBundle) 220 221 val mshrInfo = Vec(PortNumber, ValidIO(UInt(PAddrBits.W))) 222 223 val fencei = Input(Bool()) 224 }) 225 // assign default values to output signals 226 io.mem_grant.ready := false.B 227 228 val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle, PortNumber)) 229 val refill_arb = Module(new Arbiter(new ICacheDataWriteBundle, PortNumber)) 230 231 io.mem_grant.ready := true.B 232 233 val entries = (0 until PortNumber) map { i => 234 val entry = Module(new ICacheMissEntry(edge, i)) 235 236 entry.io.id := i.U 237 238 // entry req 239 entry.io.req.valid := io.req(i).valid 240 entry.io.req.bits := io.req(i).bits 241 io.req(i).ready := entry.io.req.ready 242 243 // entry resp 244 meta_write_arb.io.in(i) <> entry.io.meta_write 245 refill_arb.io.in(i) <> entry.io.data_write 246 247 entry.io.mem_grant.valid := false.B 248 entry.io.mem_grant.bits := DontCare 249 when (io.mem_grant.bits.source === i.U) { 250 entry.io.mem_grant <> io.mem_grant 251 } 252 253 io.resp(i) <> entry.io.resp 254 io.mshrInfo(i) <> entry.io.ongoing_req 255 entry.io.fencei := io.fencei 256// XSPerfAccumulate( 257// "entryPenalty" + Integer.toString(i, 10), 258// BoolStopWatch( 259// start = entry.io.req.fire(), 260// stop = entry.io.resp.fire(), 261// startHighPriority = true) 262// ) 263// XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire()) 264 265 entry 266 } 267 268 io.fdip_grant.valid := false.B 269 io.fdip_grant.bits := DontCare 270 when (io.mem_grant.bits.source === PortNumber.U) { 271 io.fdip_grant <> io.mem_grant 272 } 273 274 val tl_a_chanel = entries.map(_.io.mem_acquire) :+ io.fdip_acquire 275 TLArbiter.lowest(edge, io.mem_acquire, tl_a_chanel:_*) 276 277 io.meta_write <> meta_write_arb.io.out 278 io.data_write <> refill_arb.io.out 279 280 if (env.EnableDifftest) { 281 val difftest = DifftestModule(new DiffRefillEvent, dontCare = true) 282 difftest.coreid := io.hartId 283 difftest.index := 0.U 284 difftest.valid := refill_arb.io.out.valid 285 difftest.addr := refill_arb.io.out.bits.paddr 286 difftest.data := refill_arb.io.out.bits.data.asTypeOf(difftest.data) 287 } 288 289 (0 until nWays).map{ w => 290 XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10), entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask) === w.U) 291 XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10), entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask) === w.U) 292 } 293 294} 295