1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.diplomacy.IdRange 23import freechips.rocketchip.tilelink.ClientStates._ 24import freechips.rocketchip.tilelink.TLPermissions._ 25import freechips.rocketchip.tilelink._ 26import xiangshan._ 27import coupledL2.AliasKey 28import xiangshan.cache._ 29import utils._ 30import utility._ 31import difftest._ 32 33 34abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule 35 with HasICacheParameters 36 37abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle 38 with HasICacheParameters 39 40class ICacheMissReq(implicit p: Parameters) extends ICacheBundle 41{ 42 val paddr = UInt(PAddrBits.W) 43 val vaddr = UInt(VAddrBits.W) 44 val waymask = UInt(nWays.W) 45 46 def getVirSetIdx = get_idx(vaddr) 47 def getPhyTag = get_phy_tag(paddr) 48} 49 50 51class ICacheMissResp(implicit p: Parameters) extends ICacheBundle 52{ 53 val data = UInt(blockBits.W) 54 val corrupt = Bool() 55} 56 57class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{ 58 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 59 val resp = Vec(2,ValidIO(new ICacheMissResp)) 60 val flush = Input(Bool()) 61} 62 63 64class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule 65 with MemoryOpConstants 66{ 67 val io = IO(new Bundle { 68 val id = Input(UInt(log2Ceil(PortNumber).W)) 69 70 val req = Flipped(DecoupledIO(new ICacheMissReq)) 71 val resp = ValidIO(new ICacheMissResp) 72 73 //tilelink channel 74 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 75 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 76 77 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 78 val data_write = DecoupledIO(new ICacheDataWriteBundle) 79 80 val ongoing_req = ValidIO(UInt(PAddrBits.W)) 81 val fencei = Input(Bool()) 82 }) 83 84 /** default value for control signals */ 85 io.resp := DontCare 86 io.mem_acquire.bits := DontCare 87 io.mem_grant.ready := true.B 88 io.meta_write.bits := DontCare 89 io.data_write.bits := DontCare 90 91 val s_idle :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_wait_resp :: Nil = Enum(5) 92 val state = RegInit(s_idle) 93 /** control logic transformation */ 94 //request register 95 val req = Reg(new ICacheMissReq) 96 val req_idx = req.getVirSetIdx //virtual index 97 val req_tag = req.getPhyTag //physical tag 98 val req_waymask = req.waymask 99 val req_corrupt = RegInit(false.B) 100 101 val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant) 102 103 val needflush_r = RegInit(false.B) 104 when (state === s_idle) { needflush_r := false.B } 105 when (state =/= s_idle && io.fencei) { needflush_r := true.B } 106 val needflush = needflush_r | io.fencei 107 108 //cacheline register 109 val readBeatCnt = Reg(UInt(log2Up(refillCycles).W)) 110 val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W))) 111 112 //initial 113 io.resp.bits := DontCare 114 io.mem_acquire.bits := DontCare 115 io.mem_grant.ready := true.B 116 io.meta_write.bits := DontCare 117 io.data_write.bits := DontCare 118 119 io.req.ready := (state === s_idle) 120 io.mem_acquire.valid := (state === s_send_mem_aquire) 121 122 io.ongoing_req.valid := (state =/= s_idle) 123 io.ongoing_req.bits := addrAlign(req.paddr, blockBytes, PAddrBits) 124 125 //state change 126 switch(state) { 127 is(s_idle) { 128 when(io.req.fire()) { 129 readBeatCnt := 0.U 130 state := s_send_mem_aquire 131 req := io.req.bits 132 } 133 } 134 135 // memory request 136 is(s_send_mem_aquire) { 137 when(io.mem_acquire.fire()) { 138 state := s_wait_mem_grant 139 } 140 } 141 142 is(s_wait_mem_grant) { 143 when(edge.hasData(io.mem_grant.bits)) { 144 when(io.mem_grant.fire()) { 145 readBeatCnt := readBeatCnt + 1.U 146 respDataReg(readBeatCnt) := io.mem_grant.bits.data 147 req_corrupt := io.mem_grant.bits.corrupt // TODO: seems has bug 148 when(readBeatCnt === (refillCycles - 1).U) { 149 assert(refill_done, "refill not done!") 150 state := s_write_back 151 } 152 } 153 } 154 } 155 156 is(s_write_back) { 157 state := Mux(io.meta_write.fire() && io.data_write.fire() || needflush, s_wait_resp, s_write_back) 158 } 159 160 is(s_wait_resp) { 161 io.resp.bits.data := respDataReg.asUInt 162 io.resp.bits.corrupt := req_corrupt 163 when(io.resp.fire()) { 164 state := s_idle 165 } 166 } 167 } 168 169 /** refill write and meta write */ 170 171 val getBlock = edge.Get( 172 fromSource = io.id, 173 toAddress = addrAlign(req.paddr, blockBytes, PAddrBits), 174 lgSize = (log2Up(cacheParams.blockBytes)).U 175 )._2 176 177 io.mem_acquire.bits := getBlock // getBlock 178 require(nSets <= 256) // icache size should not be more than 128KB 179 180 //resp to ifu 181 io.resp.valid := state === s_wait_resp 182 183 io.meta_write.valid := (state === s_write_back && !needflush) 184 io.meta_write.bits.generate(tag = req_tag, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0)) 185 186 io.data_write.valid := (state === s_write_back && !needflush) 187 io.data_write.bits.generate(data = respDataReg.asUInt, 188 idx = req_idx, 189 waymask = req_waymask, 190 bankIdx = req_idx(0), 191 paddr = req.paddr) 192 193 XSPerfAccumulate( 194 "entryPenalty" + Integer.toString(id, 10), 195 BoolStopWatch( 196 start = io.req.fire(), 197 stop = io.resp.valid, 198 startHighPriority = true) 199 ) 200 XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire()) 201} 202 203 204class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule 205{ 206 val io = IO(new Bundle{ 207 val hartId = Input(UInt(8.W)) 208 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 209 val resp = Vec(2, ValidIO(new ICacheMissResp)) 210 211 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 212 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 213 214 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 215 val data_write = DecoupledIO(new ICacheDataWriteBundle) 216 217 val prefetch_req = Flipped(DecoupledIO(new PIQReq)) 218 val mshr_info = Vec(totalMSHRNum,ValidIO(UInt(PAddrBits.W))) 219 val freePIQEntry = Output(UInt(log2Ceil(nPrefetchEntries).W)) 220 221 val fencei = Input(Bool()) 222 223 val piq_write_ipbuffer = ValidIO(new IPFBufferWrite) 224 225 val to_main_pipe = Vec(nPrefetchEntries, new PIQToMainPipe) 226 }) 227 // assign default values to output signals 228 io.mem_grant.ready := false.B 229 230 val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle, PortNumber)) 231 val refill_arb = Module(new Arbiter(new ICacheDataWriteBundle, PortNumber)) 232 val ipf_write_arb = Module(new Arbiter(new IPFBufferWrite, nPrefetchEntries)) 233 234 io.mem_grant.ready := true.B 235 236 val entries = (0 until PortNumber) map { i => 237 val entry = Module(new ICacheMissEntry(edge, i)) 238 239 entry.io.id := i.U 240 241 // entry req 242 entry.io.req.valid := io.req(i).valid 243 entry.io.req.bits := io.req(i).bits 244 io.req(i).ready := entry.io.req.ready 245 246 // entry resp 247 meta_write_arb.io.in(i) <> entry.io.meta_write 248 refill_arb.io.in(i) <> entry.io.data_write 249 250 entry.io.mem_grant.valid := false.B 251 entry.io.mem_grant.bits := DontCare 252 when (io.mem_grant.bits.source === i.U) { 253 entry.io.mem_grant <> io.mem_grant 254 } 255 256 io.resp(i) <> entry.io.resp 257 io.mshr_info(i) <> entry.io.ongoing_req 258 entry.io.fencei := io.fencei 259// XSPerfAccumulate( 260// "entryPenalty" + Integer.toString(i, 10), 261// BoolStopWatch( 262// start = entry.io.req.fire(), 263// stop = entry.io.resp.fire(), 264// startHighPriority = true) 265// ) 266// XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire()) 267 268 entry 269 } 270 271 val alloc = Wire(UInt(log2Ceil(nPrefetchEntries).W)) 272 val toMainPipe = io.to_main_pipe.map(_.info) 273 274 val prefEntries = (PortNumber until PortNumber + nPrefetchEntries) map { i => 275 val prefetchEntry = Module(new PIQEntry(edge, i)) 276 277 prefetchEntry.io.mem_grant.valid := false.B 278 prefetchEntry.io.mem_grant.bits := DontCare 279 prefetchEntry.io.fencei := io.fencei 280 281 ipf_write_arb.io.in(i - PortNumber) <> prefetchEntry.io.piq_write_ipbuffer 282 283 when(io.mem_grant.bits.source === i.U) { 284 prefetchEntry.io.mem_grant <> io.mem_grant 285 } 286 287 prefetchEntry.io.req.valid := io.prefetch_req.valid && ((i-PortNumber).U === alloc) 288 prefetchEntry.io.req.bits := io.prefetch_req.bits 289 290 prefetchEntry.io.id := i.U 291 292 io.mshr_info(i) := prefetchEntry.io.ongoing_req 293 294 prefetchEntry 295 } 296 297 alloc := PriorityEncoder(prefEntries.map(_.io.req.ready)) 298 io.prefetch_req.ready := ParallelOR(prefEntries.map(_.io.req.ready)) 299 io.freePIQEntry := PriorityEncoder(prefEntries.map(_.io.req.ready)) 300 (0 until nPrefetchEntries).foreach(i => toMainPipe(i) <> prefEntries(i).io.prefetch_entry_data) 301 val tl_a_chanel = entries.map(_.io.mem_acquire) ++ prefEntries.map(_.io.mem_acquire) 302 TLArbiter.lowest(edge, io.mem_acquire, tl_a_chanel:_*) 303 304 io.meta_write <> meta_write_arb.io.out 305 io.data_write <> refill_arb.io.out 306 307 io.piq_write_ipbuffer.valid := ipf_write_arb.io.out.valid 308 io.piq_write_ipbuffer.bits := ipf_write_arb.io.out.bits 309 ipf_write_arb.io.out.ready := true.B 310 311 XSPerfAccumulate("refill_ipf_num", io.piq_write_ipbuffer.fire) 312 313 if (env.EnableDifftest) { 314 val diffipfrefill = Module(new DifftestRefillEvent) 315 diffipfrefill.io.clock := clock 316 diffipfrefill.io.coreid := io.hartId 317 diffipfrefill.io.cacheid := 3.U 318 diffipfrefill.io.valid := ipf_write_arb.io.out.valid 319 diffipfrefill.io.addr := ipf_write_arb.io.out.bits.meta.paddr 320 diffipfrefill.io.data := ipf_write_arb.io.out.bits.data.asTypeOf(diffipfrefill.io.data) 321 } 322 323 if (env.EnableDifftest) { 324 val difftest = Module(new DifftestRefillEvent) 325 difftest.io.clock := clock 326 difftest.io.coreid := io.hartId 327 difftest.io.cacheid := 0.U 328 difftest.io.valid := refill_arb.io.out.valid 329 difftest.io.addr := refill_arb.io.out.bits.paddr 330 difftest.io.data := refill_arb.io.out.bits.data.asTypeOf(difftest.io.data) 331 } 332 333 (0 until nWays).map{ w => 334 XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10), entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask) === w.U) 335 XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10), entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask) === w.U) 336 } 337 338} 339 340 341 342