1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 22import utils._ 23import utility._ 24import xiangshan._ 25import xiangshan.backend.fu.{PFEvent, PMP, PMPChecker,PMPReqBundle} 26import xiangshan.cache.mmu._ 27import xiangshan.frontend.icache._ 28 29 30class Frontend()(implicit p: Parameters) extends LazyModule with HasXSParameter{ 31 32 val instrUncache = LazyModule(new InstrUncache()) 33 val icache = LazyModule(new ICache()) 34 35 lazy val module = new FrontendImp(this) 36} 37 38 39class FrontendImp (outer: Frontend) extends LazyModuleImp(outer) 40 with HasXSParameter 41 with HasPerfEvents 42{ 43 val io = IO(new Bundle() { 44 val hartId = Input(UInt(8.W)) 45 val reset_vector = Input(UInt(PAddrBits.W)) 46 val fencei = Input(Bool()) 47 val ptw = new TlbPtwIO() 48 val backend = new FrontendToCtrlIO 49 val sfence = Input(new SfenceBundle) 50 val tlbCsr = Input(new TlbCsrBundle) 51 val csrCtrl = Input(new CustomCSRCtrlIO) 52 val csrUpdate = new DistributedCSRUpdateReq 53 val error = new L1CacheErrorInfo 54 val frontendInfo = new Bundle { 55 val ibufFull = Output(Bool()) 56 val bpuInfo = new Bundle { 57 val bpRight = Output(UInt(XLEN.W)) 58 val bpWrong = Output(UInt(XLEN.W)) 59 } 60 } 61 }) 62 63 //decouped-frontend modules 64 val instrUncache = outer.instrUncache.module 65 val icache = outer.icache.module 66 val bpu = Module(new Predictor) 67 val ifu = Module(new NewIFU) 68 val ibuffer = Module(new Ibuffer) 69 val ftq = Module(new Ftq) 70 71 val needFlush = RegNext(io.backend.toFtq.redirect.valid) 72 val FlushControlRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsCtrl) 73 val FlushMemVioRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsMemVio) 74 val FlushControlBTBMiss = Wire(Bool()) 75 val FlushTAGEMiss = Wire(Bool()) 76 val FlushSCMiss = Wire(Bool()) 77 val FlushITTAGEMiss = Wire(Bool()) 78 val FlushRASMiss = Wire(Bool()) 79 80 val tlbCsr = DelayN(io.tlbCsr, 2) 81 val csrCtrl = DelayN(io.csrCtrl, 2) 82 val sfence = RegNext(RegNext(io.sfence)) 83 84 // trigger 85 ifu.io.frontendTrigger := csrCtrl.frontend_trigger 86 val triggerEn = csrCtrl.trigger_enable 87 ifu.io.csrTriggerEnable := VecInit(triggerEn(0), triggerEn(1), triggerEn(6), triggerEn(8)) 88 89 // bpu ctrl 90 bpu.io.ctrl := csrCtrl.bp_ctrl 91 bpu.io.reset_vector := io.reset_vector 92 93// pmp 94 val prefetchPipeNum = ICacheParameters().prefetchPipeNum 95 val pmp = Module(new PMP()) 96 val pmp_check = VecInit(Seq.fill(coreParams.ipmpPortNum)(Module(new PMPChecker(3, sameCycle = true)).io)) 97 pmp.io.distribute_csr := csrCtrl.distribute_csr 98 val pmp_req_vec = Wire(Vec(coreParams.ipmpPortNum, Valid(new PMPReqBundle()))) 99 (0 until 2 + prefetchPipeNum).foreach(i => pmp_req_vec(i) <> icache.io.pmp(i).req) 100 pmp_req_vec.last <> ifu.io.pmp.req 101 102 for (i <- pmp_check.indices) { 103 pmp_check(i).apply(tlbCsr.priv.imode, pmp.io.pmp, pmp.io.pma, pmp_req_vec(i)) 104 } 105 (0 until 2 + prefetchPipeNum).foreach(i => icache.io.pmp(i).resp <> pmp_check(i).resp) 106 ifu.io.pmp.resp <> pmp_check.last.resp 107 108 val itlb = Module(new TLB(coreParams.itlbPortNum, nRespDups = 1, 109 Seq(false, false) ++ Seq.fill(prefetchPipeNum)(false) ++ Seq(true), itlbParams)) 110 itlb.io.requestor.take(2 + prefetchPipeNum) zip icache.io.itlb foreach {case (a,b) => a <> b} 111 itlb.io.requestor.last <> ifu.io.iTLBInter // mmio may need re-tlb, blocked 112 itlb.io.base_connect(sfence, tlbCsr) 113 itlb.io.ptw_replenish <> DontCare 114 itlb.io.flushPipe.map(_ := needFlush) 115 116 val itlb_ptw = Wire(new VectorTlbPtwIO(coreParams.itlbPortNum)) 117 itlb_ptw.connect(itlb.io.ptw) 118 val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay, itlb_ptw, sfence, tlbCsr, l2tlbParams.ifilterSize) 119 io.ptw <> itlbRepeater1.io.ptw 120 121 icache.io.prefetch <> ftq.io.toPrefetch 122 123 124 //IFU-Ftq 125 ifu.io.ftqInter.fromFtq <> ftq.io.toIfu 126 ftq.io.toIfu.req.ready := ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready 127 128 ftq.io.fromIfu <> ifu.io.ftqInter.toFtq 129 bpu.io.ftq_to_bpu <> ftq.io.toBpu 130 ftq.io.fromBpu <> bpu.io.bpu_to_ftq 131 132 ftq.io.mmioCommitRead <> ifu.io.mmioCommitRead 133 //IFU-ICache 134 135 icache.io.fetch.req <> ftq.io.toICache.req 136 ftq.io.toICache.req.ready := ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready 137 138 ifu.io.icacheInter.resp <> icache.io.fetch.resp 139 ifu.io.icacheInter.icacheReady := icache.io.toIFU 140 ifu.io.icacheInter.topdownIcacheMiss := icache.io.fetch.topdownIcacheMiss 141 ifu.io.icacheInter.topdownItlbMiss := icache.io.fetch.topdownItlbMiss 142 icache.io.stop := ifu.io.icacheStop 143 144 ifu.io.icachePerfInfo := icache.io.perfInfo 145 146 icache.io.csr.distribute_csr <> DontCare 147 io.csrUpdate := DontCare 148 149 icache.io.csr_pf_enable := RegNext(csrCtrl.l1I_pf_enable) 150 icache.io.csr_parity_enable := RegNext(csrCtrl.icache_parity_enable) 151 152 icache.io.fencei := io.fencei 153 154 //IFU-Ibuffer 155 ifu.io.toIbuffer <> ibuffer.io.in 156 157 ftq.io.fromBackend <> io.backend.toFtq 158 io.backend.fromFtq <> ftq.io.toBackend 159 io.frontendInfo.bpuInfo <> ftq.io.bpuInfo 160 161 val checkPcMem = Reg(Vec(FtqSize, new Ftq_RF_Components)) 162 when (ftq.io.toBackend.pc_mem_wen) { 163 checkPcMem(ftq.io.toBackend.pc_mem_waddr) := ftq.io.toBackend.pc_mem_wdata 164 } 165 166 val checkTargetIdx = Wire(Vec(DecodeWidth, UInt(log2Up(FtqSize).W))) 167 val checkTarget = Wire(Vec(DecodeWidth, UInt(VAddrBits.W))) 168 169 for (i <- 0 until DecodeWidth) { 170 checkTargetIdx(i) := ibuffer.io.out(i).bits.ftqPtr.value 171 checkTarget(i) := Mux(ftq.io.toBackend.newest_entry_ptr.value === checkTargetIdx(i), 172 ftq.io.toBackend.newest_entry_target, 173 checkPcMem(checkTargetIdx(i) + 1.U).startAddr) 174 } 175 176 // commented out for this br could be the last instruction in the fetch block 177 def checkNotTakenConsecutive = { 178 val prevNotTakenValid = RegInit(0.B) 179 val prevNotTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W)) 180 for (i <- 0 until DecodeWidth - 1) { 181 // for instrs that is not the last, if a not-taken br, the next instr should have the same ftqPtr 182 // for instrs that is the last, record and check next request 183 when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr) { 184 when (ibuffer.io.out(i+1).fire) { 185 // not last br, check now 186 XSError(checkTargetIdx(i) =/= checkTargetIdx(i+1), "not-taken br should have same ftqPtr\n") 187 } .otherwise { 188 // last br, record its info 189 prevNotTakenValid := true.B 190 prevNotTakenFtqIdx := checkTargetIdx(i) 191 } 192 } 193 } 194 when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr) { 195 // last instr is a br, record its info 196 prevNotTakenValid := true.B 197 prevNotTakenFtqIdx := checkTargetIdx(DecodeWidth - 1) 198 } 199 when (prevNotTakenValid && ibuffer.io.out(0).fire) { 200 XSError(prevNotTakenFtqIdx =/= checkTargetIdx(0), "not-taken br should have same ftqPtr\n") 201 prevNotTakenValid := false.B 202 } 203 when (needFlush) { 204 prevNotTakenValid := false.B 205 } 206 } 207 208 def checkTakenNotConsecutive = { 209 val prevTakenValid = RegInit(0.B) 210 val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W)) 211 for (i <- 0 until DecodeWidth - 1) { 212 // for instrs that is not the last, if a taken br, the next instr should not have the same ftqPtr 213 // for instrs that is the last, record and check next request 214 when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && ibuffer.io.out(i).bits.pred_taken) { 215 when (ibuffer.io.out(i+1).fire) { 216 // not last br, check now 217 XSError(checkTargetIdx(i) + 1.U =/= checkTargetIdx(i+1), "taken br should have consecutive ftqPtr\n") 218 } .otherwise { 219 // last br, record its info 220 prevTakenValid := true.B 221 prevTakenFtqIdx := checkTargetIdx(i) 222 } 223 } 224 } 225 when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) { 226 // last instr is a br, record its info 227 prevTakenValid := true.B 228 prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1) 229 } 230 when (prevTakenValid && ibuffer.io.out(0).fire) { 231 XSError(prevTakenFtqIdx + 1.U =/= checkTargetIdx(0), "taken br should have consecutive ftqPtr\n") 232 prevTakenValid := false.B 233 } 234 when (needFlush) { 235 prevTakenValid := false.B 236 } 237 } 238 239 def checkNotTakenPC = { 240 val prevNotTakenPC = Reg(UInt(VAddrBits.W)) 241 val prevIsRVC = Reg(Bool()) 242 val prevNotTakenValid = RegInit(0.B) 243 244 for (i <- 0 until DecodeWidth - 1) { 245 when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && !ibuffer.io.out(i).bits.pred_taken) { 246 when (ibuffer.io.out(i+1).fire) { 247 XSError(ibuffer.io.out(i).bits.pc + Mux(ibuffer.io.out(i).bits.pd.isRVC, 2.U, 4.U) =/= ibuffer.io.out(i+1).bits.pc, "not-taken br should have consecutive pc\n") 248 } .otherwise { 249 prevNotTakenValid := true.B 250 prevIsRVC := ibuffer.io.out(i).bits.pd.isRVC 251 prevNotTakenPC := ibuffer.io.out(i).bits.pc 252 } 253 } 254 } 255 when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && !ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) { 256 prevNotTakenValid := true.B 257 prevIsRVC := ibuffer.io.out(DecodeWidth - 1).bits.pd.isRVC 258 prevNotTakenPC := ibuffer.io.out(DecodeWidth - 1).bits.pc 259 } 260 when (prevNotTakenValid && ibuffer.io.out(0).fire) { 261 XSError(prevNotTakenPC + Mux(prevIsRVC, 2.U, 4.U) =/= ibuffer.io.out(0).bits.pc, "not-taken br should have same pc\n") 262 prevNotTakenValid := false.B 263 } 264 when (needFlush) { 265 prevNotTakenValid := false.B 266 } 267 } 268 269 def checkTakenPC = { 270 val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W)) 271 val prevTakenValid = RegInit(0.B) 272 val prevTakenTarget = Wire(UInt(VAddrBits.W)) 273 prevTakenTarget := checkPcMem(prevTakenFtqIdx + 1.U).startAddr 274 275 for (i <- 0 until DecodeWidth - 1) { 276 when (ibuffer.io.out(i).fire && !ibuffer.io.out(i).bits.pd.notCFI && ibuffer.io.out(i).bits.pred_taken) { 277 when (ibuffer.io.out(i+1).fire) { 278 XSError(checkTarget(i) =/= ibuffer.io.out(i+1).bits.pc, "taken instr should follow target pc\n") 279 } .otherwise { 280 prevTakenValid := true.B 281 prevTakenFtqIdx := checkTargetIdx(i) 282 } 283 } 284 } 285 when (ibuffer.io.out(DecodeWidth - 1).fire && !ibuffer.io.out(DecodeWidth - 1).bits.pd.notCFI && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) { 286 prevTakenValid := true.B 287 prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1) 288 } 289 when (prevTakenValid && ibuffer.io.out(0).fire) { 290 XSError(prevTakenTarget =/= ibuffer.io.out(0).bits.pc, "taken instr should follow target pc\n") 291 prevTakenValid := false.B 292 } 293 when (needFlush) { 294 prevTakenValid := false.B 295 } 296 } 297 298 //checkNotTakenConsecutive 299 checkTakenNotConsecutive 300 checkTakenPC 301 checkNotTakenPC 302 303 ifu.io.rob_commits <> io.backend.toFtq.rob_commits 304 305 ibuffer.io.flush := needFlush 306 ibuffer.io.ControlRedirect := FlushControlRedirect 307 ibuffer.io.MemVioRedirect := FlushMemVioRedirect 308 ibuffer.io.ControlBTBMissBubble := FlushControlBTBMiss 309 ibuffer.io.TAGEMissBubble := FlushTAGEMiss 310 ibuffer.io.SCMissBubble := FlushSCMiss 311 ibuffer.io.ITTAGEMissBubble := FlushITTAGEMiss 312 ibuffer.io.RASMissBubble := FlushRASMiss 313 314 FlushControlBTBMiss := ftq.io.ControlBTBMissBubble 315 FlushTAGEMiss := ftq.io.TAGEMissBubble 316 FlushSCMiss := ftq.io.SCMissBubble 317 FlushITTAGEMiss := ftq.io.ITTAGEMissBubble 318 FlushRASMiss := ftq.io.RASMissBubble 319 320 io.backend.cfVec <> ibuffer.io.out 321 io.backend.stallReason <> ibuffer.io.stallReason 322 dontTouch(io.backend.stallReason) 323 324 instrUncache.io.req <> ifu.io.uncacheInter.toUncache 325 ifu.io.uncacheInter.fromUncache <> instrUncache.io.resp 326 instrUncache.io.flush := false.B 327 io.error <> RegNext(RegNext(icache.io.error)) 328 329 icache.io.hartId := io.hartId 330 331 val frontendBubble = PopCount((0 until DecodeWidth).map(i => io.backend.cfVec(i).ready && !ibuffer.io.out(i).valid)) 332 XSPerfAccumulate("FrontendBubble", frontendBubble) 333 io.frontendInfo.ibufFull := RegNext(ibuffer.io.full) 334 335 // PFEvent 336 val pfevent = Module(new PFEvent) 337 pfevent.io.distribute_csr := io.csrCtrl.distribute_csr 338 val csrevents = pfevent.io.hpmevent.take(8) 339 340 val allPerfEvents = Seq(ifu, ibuffer, icache, ftq, bpu).flatMap(_.getPerf) 341 override val perfEvents = HPerfMonitor(csrevents, allPerfEvents).getPerfEvents 342 generatePerfEvent() 343} 344