1/*************************************************************************************** 2 * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3 * Copyright (c) 2020-2021 Peng Cheng Laboratory 4 * 5 * XiangShan is licensed under Mulan PSL v2. 6 * You can use this software according to the terms and conditions of the Mulan PSL v2. 7 * You may obtain a copy of Mulan PSL v2 at: 8 * http://license.coscl.org.cn/MulanPSL2 9 * 10 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11 * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12 * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13 * 14 * See the Mulan PSL v2 for more details. 15 ***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.tilelink._ 23import utils._ 24import xiangshan.cache.mmu._ 25import xiangshan.frontend._ 26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle} 27import huancun.{PreferCacheKey} 28 29 30abstract class IPrefetchBundle(implicit p: Parameters) extends ICacheBundle 31abstract class IPrefetchModule(implicit p: Parameters) extends ICacheModule 32 33class PIQReq(implicit p: Parameters) extends IPrefetchBundle { 34 val paddr = UInt(PAddrBits.W) 35} 36 37 38class IPrefetchToMissUnit(implicit p: Parameters) extends IPrefetchBundle{ 39 val enqReq = DecoupledIO(new PIQReq) 40} 41 42class IPredfetchIO(implicit p: Parameters) extends IPrefetchBundle { 43 val fromFtq = Flipped(new FtqPrefechBundle) 44 val iTLBInter = new TlbRequestIO 45 val pmp = new ICachePMPBundle 46 val toIMeta = DecoupledIO(new ICacheReadBundle) 47 val fromIMeta = Input(new ICacheMetaRespBundle) 48 val toMissUnit = new IPrefetchToMissUnit 49 val fromMSHR = Flipped(Vec(PortNumber,ValidIO(UInt(PAddrBits.W)))) 50 51 val prefetchEnable = Input(Bool()) 52 val prefetchDisable = Input(Bool()) 53} 54 55class IPrefetchPipe(implicit p: Parameters) extends IPrefetchModule 56{ 57 val io = IO(new IPredfetchIO) 58 59 val enableBit = RegInit(false.B) 60 val maxPrefetchCoutner = RegInit(0.U(log2Ceil(nPrefetchEntries + 1).W)) 61 62 val reachMaxSize = maxPrefetchCoutner === nPrefetchEntries.U 63 64 when(io.prefetchEnable){ 65 enableBit := true.B 66 }.elsewhen((enableBit && io.prefetchDisable) || (enableBit && reachMaxSize)){ 67 enableBit := false.B 68 } 69 70 class PrefetchDir(implicit p: Parameters) extends IPrefetchBundle 71 { 72 val valid = Bool() 73 val paddr = UInt(PAddrBits.W) 74 } 75 76 val prefetch_dir = RegInit(VecInit(Seq.fill(nPrefetchEntries)(0.U.asTypeOf(new PrefetchDir)))) 77 78 val fromFtq = io.fromFtq 79 val (toITLB, fromITLB) = (io.iTLBInter.req, io.iTLBInter.resp) 80 io.iTLBInter.req_kill := false.B 81 val (toIMeta, fromIMeta) = (io.toIMeta, io.fromIMeta.metaData(0)) 82 val (toPMP, fromPMP) = (io.pmp.req, io.pmp.resp) 83 val toMissUnit = io.toMissUnit 84 85 val p0_fire, p1_fire, p2_fire, p3_fire = WireInit(false.B) 86 val p1_discard, p2_discard, p3_discard = WireInit(false.B) 87 val p0_ready, p1_ready, p2_ready, p3_ready = WireInit(false.B) 88 89 /** Prefetch Stage 0: req from Ftq */ 90 val p0_valid = fromFtq.req.valid 91 val p0_vaddr = addrAlign(fromFtq.req.bits.target, blockBytes, VAddrBits) 92 p0_fire := p0_valid && p1_ready && toITLB.fire() && !fromITLB.bits.miss && toIMeta.ready && enableBit 93 //discard req when source not ready 94 // p0_discard := p0_valid && ((toITLB.fire() && fromITLB.bits.miss) || !toIMeta.ready || !enableBit) 95 96 toIMeta.valid := p0_valid 97 toIMeta.bits.vSetIdx(0) := get_idx(p0_vaddr) 98 toIMeta.bits.vSetIdx(1) := DontCare 99 toIMeta.bits.isDoubleLine := false.B 100 101 toITLB.valid := p0_valid 102 toITLB.bits.size := 3.U // TODO: fix the size 103 toITLB.bits.vaddr := p0_vaddr 104 toITLB.bits.debug.pc := p0_vaddr 105 106 toITLB.bits.kill := DontCare 107 toITLB.bits.cmd := TlbCmd.exec 108 toITLB.bits.debug.robIdx := DontCare 109 toITLB.bits.debug.isFirstIssue := DontCare 110 111 112 fromITLB.ready := true.B 113 114 fromFtq.req.ready := true.B 115 116 /** Prefetch Stage 1: cache probe filter */ 117 val p1_valid = generatePipeControl(lastFire = p0_fire, thisFire = p1_fire || p1_discard, thisFlush = false.B, lastFlush = false.B) 118 119 val p1_vaddr = RegEnable(p0_vaddr, p0_fire) 120 121 //tlb resp 122 val tlb_resp_valid = RegInit(false.B) 123 when(p0_fire) {tlb_resp_valid := true.B} 124 .elsewhen(tlb_resp_valid && (p1_fire || p1_discard)) {tlb_resp_valid := false.B} 125 126 val tlb_resp_paddr = ResultHoldBypass(valid = RegNext(p0_fire), data = fromITLB.bits.paddr(0)) 127 val tlb_resp_pf = ResultHoldBypass(valid = RegNext(p0_fire), data = fromITLB.bits.excp(0).pf.instr && tlb_resp_valid) 128 val tlb_resp_af = ResultHoldBypass(valid = RegNext(p0_fire), data = fromITLB.bits.excp(0).af.instr && tlb_resp_valid) 129 130 val p1_exception = VecInit(Seq(tlb_resp_pf, tlb_resp_af)) 131 val p1_has_except = p1_exception.reduce(_ || _) 132 133 val p1_ptag = get_phy_tag(tlb_resp_paddr) 134 135 val p1_meta_ptags = ResultHoldBypass(data = VecInit(fromIMeta.map(way => way.tag)),valid = RegNext(p0_fire)) 136 val p1_meta_cohs = ResultHoldBypass(data = VecInit(fromIMeta.map(way => way.coh)),valid = RegNext(p0_fire)) 137 138 val p1_tag_eq_vec = VecInit(p1_meta_ptags.map(_ === p1_ptag )) 139 val p1_tag_match_vec = VecInit(p1_tag_eq_vec.zipWithIndex.map{ case(way_tag_eq, w) => way_tag_eq && p1_meta_cohs(w).isValid()}) 140 val p1_tag_match = ParallelOR(p1_tag_match_vec) 141 val (p1_hit, p1_miss) = (p1_valid && p1_tag_match && !p1_has_except, p1_valid && !p1_tag_match && !p1_has_except) 142 143 //overriding the invalid req 144 val p1_req_cancle = (p1_hit || (tlb_resp_valid && p1_exception.reduce(_ || _))) && p1_valid 145 val p1_req_accept = p1_valid && tlb_resp_valid && p1_miss 146 147 p1_ready := p1_fire || p1_req_cancle || !p1_valid 148 p1_fire := p1_valid && p1_req_accept && p2_ready && enableBit 149 p1_discard := p1_valid && p1_req_cancle 150 151 /** Prefetch Stage 2: filtered req PIQ enqueue */ 152 val p2_valid = generatePipeControl(lastFire = p1_fire, thisFire = p2_fire || p2_discard, thisFlush = false.B, lastFlush = false.B) 153 154 val p2_paddr = RegEnable(next = tlb_resp_paddr, enable = p1_fire) 155 val p2_except_pf = RegEnable(next =tlb_resp_pf, enable = p1_fire) 156 val p2_except_tlb_af = RegEnable(next = tlb_resp_af, enable = p1_fire) 157 158 /*when a prefetch req meet with a miss req in MSHR cancle the prefetch req */ 159 val p2_check_in_mshr = VecInit(io.fromMSHR.map(mshr => mshr.valid && mshr.bits === addrAlign(p2_paddr, blockBytes, PAddrBits))).reduce(_||_) 160 161 //TODO wait PMP logic 162 val p2_exception = VecInit(Seq(p2_except_tlb_af, p2_except_pf)).reduce(_||_) 163 164 p2_ready := p2_fire || p2_discard || !p2_valid 165 p2_fire := p2_valid && !p2_exception && p3_ready 166 p2_discard := p2_valid && p2_exception 167 168 /** Prefetch Stage 2: filtered req PIQ enqueue */ 169 val p3_valid = generatePipeControl(lastFire = p2_fire, thisFire = p3_fire || p3_discard, thisFlush = false.B, lastFlush = false.B) 170 171 val p3_pmp_fire = p3_valid 172 val pmpExcpAF = fromPMP.instr 173 val p3_paddr = RegEnable(next = p2_paddr, enable = p2_fire) 174 175 io.pmp.req.valid := p3_pmp_fire 176 io.pmp.req.bits.addr := p3_paddr 177 io.pmp.req.bits.size := 3.U 178 io.pmp.req.bits.cmd := TlbCmd.exec 179 180 val p3_except_pmp_af = DataHoldBypass(pmpExcpAF, p3_pmp_fire) 181 val p3_check_in_mshr = RegEnable(next = p2_check_in_mshr, enable = p2_fire) 182 val p3_mmio = DataHoldBypass(io.pmp.resp.mmio && !p3_except_pmp_af, p3_pmp_fire) 183 184 val p3_exception = VecInit(Seq(p3_except_pmp_af, p3_mmio)).reduce(_||_) 185 186 val p3_hit_dir = VecInit((0 until nPrefetchEntries).map(i => prefetch_dir(i).valid && prefetch_dir(i).paddr === p3_paddr )).reduce(_||_) 187 188 p3_discard := p3_exception || p3_hit_dir || p3_check_in_mshr || (p3_valid && enableBit && !toMissUnit.enqReq.ready) 189 190 toMissUnit.enqReq.valid := p3_valid && enableBit && !p3_discard 191 toMissUnit.enqReq.bits.paddr := p3_paddr 192 193 when(reachMaxSize){ 194 maxPrefetchCoutner := 0.U 195 196 prefetch_dir.foreach(_.valid := false.B) 197 }.elsewhen(toMissUnit.enqReq.fire()){ 198 maxPrefetchCoutner := maxPrefetchCoutner + 1.U 199 200 prefetch_dir(maxPrefetchCoutner).valid := true.B 201 prefetch_dir(maxPrefetchCoutner).paddr := p3_paddr 202 } 203 204 p3_ready := toMissUnit.enqReq.ready || !enableBit 205 p3_fire := toMissUnit.enqReq.fire() 206 207} 208 209class IPrefetchEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule 210{ 211 val io = IO(new Bundle { 212 val id = Input(UInt(log2Ceil(PortNumber + nPrefetchEntries).W)) 213 214 val req = Flipped(DecoupledIO(new PIQReq)) 215 216 //tilelink channel 217 val mem_hint = DecoupledIO(new TLBundleA(edge.bundle)) 218 val mem_hint_ack = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 219 220 }) 221 222 /** default value for control signals */ 223 io.mem_hint.bits := DontCare 224 io.mem_hint_ack.ready := true.B 225 226 227 val s_idle :: s_send_hint :: s_wait_hint_ack :: Nil = Enum(3) 228 val state = RegInit(s_idle) 229 /** control logic transformation */ 230 //request register 231 val req = Reg(new PIQReq) 232 //initial 233 io.mem_hint.bits := DontCare 234 io.mem_hint_ack.ready := true.B 235 236 io.req.ready := (state === s_idle) 237 io.mem_hint.valid := (state === s_send_hint) 238 239 //state change 240 switch(state) { 241 is(s_idle) { 242 when(io.req.fire()) { 243 state := s_send_hint 244 req := io.req.bits 245 } 246 } 247 248 // memory request 249 is(s_send_hint) { 250 when(io.mem_hint.fire()) { 251 state := s_idle 252 } 253 } 254 } 255 256 /** refill write and meta write */ 257 val hint = edge.Hint( 258 fromSource = io.id, 259 toAddress = addrAlign(req.paddr, blockBytes, PAddrBits) + blockBytes.U, 260 lgSize = (log2Up(cacheParams.blockBytes)).U, 261 param = TLHints.PREFETCH_READ 262 )._2 263 io.mem_hint.bits := hint 264 io.mem_hint.bits.user.lift(PreferCacheKey).foreach(_ := true.B) 265 266 267 XSPerfAccumulate("PrefetchEntryReq" + Integer.toString(id, 10), io.req.fire()) 268 269} 270