1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import utility._ 24import xiangshan.ExceptionNO._ 25import xiangshan._ 26import xiangshan.backend.fu.PMPRespBundle 27import xiangshan.backend.rob.DebugLsInfoBundle 28import xiangshan.cache.mmu.{TlbCmd, TlbReq, TlbRequestIO, TlbResp} 29import xiangshan.cache.{DcacheStoreRequestIO, DCacheStoreIO, MemoryOpConstants, HasDCacheParameters, StorePrefetchReq} 30 31class StoreUnit(implicit p: Parameters) extends XSModule with HasDCacheParameters { 32 val io = IO(new Bundle() { 33 val redirect = Flipped(ValidIO(new Redirect)) 34 val stin = Flipped(Decoupled(new ExuInput)) 35 val issue = Valid(new ExuInput) 36 val tlb = new TlbRequestIO() 37 val dcache = new DCacheStoreIO 38 val pmp = Flipped(new PMPRespBundle()) 39 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 40 val isFirstIssue = Input(Bool()) 41 val lsq = ValidIO(new LsPipelineBundle) 42 val lsq_replenish = Output(new LsPipelineBundle()) 43 val feedback_slow = ValidIO(new RSFeedback) 44 val prefetch_req = Flipped(DecoupledIO(new StorePrefetchReq)) 45 // provide prefetch info to sms 46 val prefetch_train = ValidIO(new StPrefetchTrainBundle()) 47 val stld_nuke_query = Valid(new StoreNukeQueryIO) 48 val stout = DecoupledIO(new ExuOutput) // writeback store 49 // store mask, send to sq in store_s0 50 val st_mask_out = Valid(new StoreMaskBundle) 51 val debug_ls = Output(new DebugLsInfoBundle) 52 }) 53 54 val s1_ready, s2_ready, s3_ready = WireInit(false.B) 55 56 // Pipeline 57 // -------------------------------------------------------------------------------- 58 // stage 0 59 // -------------------------------------------------------------------------------- 60 // generate addr, use addr to query DCache and DTLB 61 val s0_iss_valid = io.stin.valid 62 val s0_prf_valid = io.prefetch_req.valid && io.dcache.req.ready 63 val s0_valid = s0_iss_valid || s0_prf_valid 64 val s0_use_flow_rs = s0_iss_valid 65 val s0_use_flow_prf = !s0_iss_valid && s0_prf_valid 66 val s0_in = Mux(s0_use_flow_rs, io.stin.bits, 0.U.asTypeOf(io.stin.bits)) 67 val s0_isFirstIssue = Mux(s0_use_flow_rs, io.isFirstIssue, false.B) 68 val s0_rsIdx = Mux(s0_use_flow_rs, io.rsIdx, 0.U) 69 val s0_size = Mux(s0_use_flow_rs, LSUOpType.size(s0_in.uop.ctrl.fuOpType), 3.U) 70 val s0_mem_idx = Mux(s0_use_flow_rs, s0_in.uop.sqIdx.value, 0.U) 71 val s0_rob_idx = Mux(s0_use_flow_rs, s0_in.uop.robIdx, 0.U.asTypeOf(s0_in.uop.robIdx)) 72 val s0_pc = Mux(s0_use_flow_rs, s0_in.uop.cf.pc, 0.U) 73 val s0_instr_type = Mux(s0_use_flow_rs, STORE_SOURCE.U, DCACHE_PREFETCH_SOURCE.U) 74 val s0_wlineflag = Mux(s0_use_flow_rs, s0_in.uop.ctrl.fuOpType === LSUOpType.cbo_zero, false.B) 75 val s0_out = Wire(new LsPipelineBundle) 76 val s0_kill = s0_in.uop.robIdx.needFlush(io.redirect) 77 val s0_can_go = s1_ready 78 val s0_fire = s0_valid && !s0_kill && s0_can_go 79 80 // generate addr 81 // val saddr = s0_in.bits.src(0) + SignExt(s0_in.bits.uop.ctrl.imm(11,0), VAddrBits) 82 val imm12 = WireInit(s0_in.uop.ctrl.imm(11,0)) 83 val saddr_lo = s0_in.src(0)(11,0) + Cat(0.U(1.W), imm12) 84 val saddr_hi = Mux(saddr_lo(12), 85 Mux(imm12(11), s0_in.src(0)(VAddrBits-1, 12), s0_in.src(0)(VAddrBits-1, 12)+1.U), 86 Mux(imm12(11), s0_in.src(0)(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12), s0_in.src(0)(VAddrBits-1, 12)), 87 ) 88 val s0_saddr = Cat(saddr_hi, saddr_lo(11,0)) 89 val isHsv = WireInit(LSUOpType.isHsv(io.in.bits.uop.ctrl.fuOpType)) 90 val s0_vaddr = Mux(s0_use_flow_rs, s0_saddr, io.prefetch_req.bits.vaddr) 91 val s0_mask = Mux(s0_use_flow_rs, genVWmask(s0_saddr, s0_in.uop.ctrl.fuOpType(1,0)), 3.U) 92 93 io.tlb.req.valid := s0_valid 94 io.tlb.req.bits.vaddr := s0_vaddr 95 io.tlb.req.bits.cmd := TlbCmd.write 96 io.dtlbReq.bits.hyperinst := isHsv 97 io.dtlbReq.bits.hlvx := false.B 98 io.tlb.req.bits.size := s0_size 99 io.tlb.req.bits.kill := false.B 100 io.tlb.req.bits.memidx.is_ld := false.B 101 io.tlb.req.bits.memidx.is_st := true.B 102 io.tlb.req.bits.memidx.idx := s0_mem_idx 103 io.tlb.req.bits.debug.robIdx := s0_rob_idx 104 io.tlb.req.bits.no_translate := false.B 105 io.tlb.req.bits.debug.pc := s0_pc 106 io.tlb.req.bits.debug.isFirstIssue := s0_isFirstIssue 107 io.tlb.req_kill := false.B 108 io.tlb.req.bits.hyperinst := LSUOpType.isHsv(s0_in.uop.ctrl.fuOpType) 109 io.tlb.req.bits.hlvx := false.B 110 111 // Dcache access here: not **real** dcache write 112 // just read meta and tag in dcache, to find out the store will hit or miss 113 114 // NOTE: The store request does not wait for the dcache to be ready. 115 // If the dcache is not ready at this time, the dcache is not queried. 116 // But, store prefetch request will always wait for dcache to be ready to make progress. 117 io.dcache.req.valid := s0_fire 118 io.dcache.req.bits.cmd := MemoryOpConstants.M_PFW 119 io.dcache.req.bits.vaddr := s0_vaddr 120 io.dcache.req.bits.instrtype := s0_instr_type 121 io.tlb.req.bits.hyperinst := LSUOpType.isHsv(s0_in.uop.ctrl.fuOpType) 122 io.tlb.req.bits.hlvx := false.B 123 124 s0_out := DontCare 125 s0_out.vaddr := s0_vaddr 126 // Now data use its own io 127 // s1_out.data := genWdata(s1_in.src(1), s1_in.uop.ctrl.fuOpType(1,0)) 128 s0_out.data := s0_in.src(1) // FIXME: remove data from pipeline 129 s0_out.uop := s0_in.uop 130 s0_out.miss := false.B 131 s0_out.rsIdx := s0_rsIdx 132 s0_out.mask := s0_mask 133 s0_out.isFirstIssue := s0_isFirstIssue 134 s0_out.isHWPrefetch := s0_use_flow_prf 135 s0_out.wlineflag := s0_wlineflag 136 when(s0_valid && s0_isFirstIssue) { 137 s0_out.uop.debugInfo.tlbFirstReqTime := GTimer() 138 } 139 140 // exception check 141 val s0_addr_aligned = LookupTree(s0_in.uop.ctrl.fuOpType(1,0), List( 142 "b00".U -> true.B, //b 143 "b01".U -> (s0_out.vaddr(0) === 0.U), //h 144 "b10".U -> (s0_out.vaddr(1,0) === 0.U), //w 145 "b11".U -> (s0_out.vaddr(2,0) === 0.U) //d 146 )) 147 s0_out.uop.cf.exceptionVec(storeAddrMisaligned) := Mux(s0_use_flow_rs, !s0_addr_aligned, false.B) 148 149 io.st_mask_out.valid := s0_use_flow_rs 150 io.st_mask_out.bits.mask := s0_out.mask 151 io.st_mask_out.bits.sqIdx := s0_out.uop.sqIdx 152 153 io.stin.ready := s1_ready 154 io.prefetch_req.ready := s1_ready && io.dcache.req.ready && !s0_iss_valid 155 156 // Pipeline 157 // -------------------------------------------------------------------------------- 158 // stage 1 159 // -------------------------------------------------------------------------------- 160 // TLB resp (send paddr to dcache) 161 val s1_valid = RegInit(false.B) 162 val s1_in = RegEnable(s0_out, s0_fire) 163 val s1_out = Wire(new LsPipelineBundle) 164 val s1_kill = Wire(Bool()) 165 val s1_can_go = s2_ready 166 val s1_fire = s1_valid && !s1_kill && s1_can_go 167 168 // mmio cbo decoder 169 val s1_mmio_cbo = s1_in.uop.ctrl.fuOpType === LSUOpType.cbo_clean || 170 s1_in.uop.ctrl.fuOpType === LSUOpType.cbo_flush || 171 s1_in.uop.ctrl.fuOpType === LSUOpType.cbo_inval 172 val s1_paddr = io.tlb.resp.bits.paddr(0) 173 val s1_gpaddr = io.tlb.resp.bits.gpaddr(0) 174 val s1_tlb_miss = io.tlb.resp.bits.miss 175 val s1_mmio = s1_mmio_cbo 176 val s1_exception = ExceptionNO.selectByFu(s1_out.uop.cf.exceptionVec, staCfg).asUInt.orR 177 s1_kill := s1_in.uop.robIdx.needFlush(io.redirect) || s1_tlb_miss 178 179 s1_ready := !s1_valid || s1_kill || s2_ready 180 io.tlb.resp.ready := true.B // TODO: why dtlbResp needs a ready? 181 when (s0_fire) { s1_valid := true.B } 182 .elsewhen (s1_fire) { s1_valid := false.B } 183 .elsewhen (s1_kill) { s1_valid := false.B } 184 185 // st-ld violation dectect request. 186 io.stld_nuke_query.valid := s1_valid && !s1_tlb_miss && !s1_in.isHWPrefetch 187 io.stld_nuke_query.bits.robIdx := s1_in.uop.robIdx 188 io.stld_nuke_query.bits.paddr := s1_paddr 189 io.stld_nuke_query.bits.mask := s1_in.mask 190 191 // issue 192 io.issue.valid := s1_valid && !s1_tlb_miss && !s1_in.isHWPrefetch 193 io.issue.bits := RegEnable(s0_in, s0_valid) 194 195 196 // Send TLB feedback to store issue queue 197 // Store feedback is generated in store_s1, sent to RS in store_s2 198 val s1_feedback = Wire(Valid(new RSFeedback)) 199 s1_feedback.valid := s1_valid & !s1_in.isHWPrefetch 200 s1_feedback.bits.hit := !s1_tlb_miss 201 s1_feedback.bits.flushState := io.tlb.resp.bits.ptwBack 202 s1_feedback.bits.rsIdx := s1_out.rsIdx 203 s1_feedback.bits.sourceType := RSFeedbackType.tlbMiss 204 s1_feedback.bits.dataInvalidSqIdx := DontCare 205 XSDebug(s1_feedback.valid, 206 "S1 Store: tlbHit: %d robIdx: %d\n", 207 s1_feedback.bits.hit, 208 s1_feedback.bits.rsIdx 209 ) 210 211 io.feedback_slow := s1_feedback 212 213 // get paddr from dtlb, check if rollback is needed 214 // writeback store inst to lsq 215 s1_out := s1_in 216 s1_out.paddr := s1_paddr 217 s1_out.gpaddr := s1_gpaddr 218 s1_out.miss := false.B 219 s1_out.mmio := s1_mmio 220 s1_out.tlbMiss := s1_tlb_miss 221 s1_out.atomic := s1_mmio 222 s1_out.uop.cf.exceptionVec(storePageFault) := io.tlb.resp.bits.excp(0).pf.st 223 s1_out.uop.cf.exceptionVec(storeAccessFault) := io.tlb.resp.bits.excp(0).af.st 224 s1_out.uop.cf.exceptionVec(storeGuestPageFault) := io.tlb.resp.bits.excp(0).gpf.st 225 226 io.lsq.valid := s1_valid && !s1_in.isHWPrefetch 227 io.lsq.bits := s1_out 228 io.lsq.bits.miss := s1_tlb_miss 229 230 // kill dcache write intent request when tlb miss or exception 231 io.dcache.s1_kill := (s1_tlb_miss || s1_exception || s1_mmio || s1_in.uop.robIdx.needFlush(io.redirect)) 232 io.dcache.s1_paddr := s1_paddr 233 234 // write below io.out.bits assign sentence to prevent overwriting values 235 val s1_tlb_memidx = io.tlb.resp.bits.memidx 236 when(s1_tlb_memidx.is_st && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_out.uop.sqIdx.value) { 237 // printf("Store idx = %d\n", s1_tlb_memidx.idx) 238 s1_out.uop.debugInfo.tlbRespTime := GTimer() 239 } 240 241 // Pipeline 242 // -------------------------------------------------------------------------------- 243 // stage 2 244 // -------------------------------------------------------------------------------- 245 // mmio check 246 val s2_valid = RegInit(false.B) 247 val s2_in = RegEnable(s1_out, s1_fire) 248 val s2_out = Wire(new LsPipelineBundle) 249 val s2_kill = Wire(Bool()) 250 val s2_can_go = s3_ready 251 val s2_fire = s2_valid && !s2_kill && s2_can_go 252 253 s2_ready := !s2_valid || s2_kill || s3_ready 254 when (s1_fire) { s2_valid := true.B } 255 .elsewhen (s2_fire) { s2_valid := false.B } 256 .elsewhen (s2_kill) { s2_valid := false.B } 257 258 val s2_pmp = WireInit(io.pmp) 259 260 val s2_exception = ExceptionNO.selectByFu(s2_out.uop.cf.exceptionVec, staCfg).asUInt.orR 261 val s2_mmio = s2_in.mmio || s2_pmp.mmio 262 s2_kill := (s2_mmio && !s2_exception) || s2_in.uop.robIdx.needFlush(io.redirect) 263 264 s2_out := s2_in 265 s2_out.mmio := s2_mmio && !s2_exception 266 s2_out.atomic := s2_in.atomic || s2_pmp.atomic 267 s2_out.uop.cf.exceptionVec(storeAccessFault) := s2_in.uop.cf.exceptionVec(storeAccessFault) || s2_pmp.st 268 269 // kill dcache write intent request when mmio or exception 270 io.dcache.s2_kill := (s2_mmio || s2_exception || s2_in.uop.robIdx.needFlush(io.redirect)) 271 io.dcache.s2_pc := s2_out.uop.cf.pc 272 // TODO: dcache resp 273 io.dcache.resp.ready := true.B 274 275 // feedback tlb miss to RS in store_s2 276 io.feedback_slow.valid := RegNext(s1_feedback.valid && !s1_out.uop.robIdx.needFlush(io.redirect)) 277 io.feedback_slow.bits := RegNext(s1_feedback.bits) 278 279 // mmio and exception 280 io.lsq_replenish := s2_out 281 282 // prefetch related 283 io.lsq_replenish.miss := io.dcache.resp.fire && io.dcache.resp.bits.miss // miss info 284 285 // RegNext prefetch train for better timing 286 // ** Now, prefetch train is valid at store s3 ** 287 io.prefetch_train.bits.fromLsPipelineBundle(s2_in, latch = true) 288 // override miss bit 289 io.prefetch_train.bits.miss := RegNext(io.dcache.resp.bits.miss) 290 // TODO: add prefetch and access bit 291 io.prefetch_train.bits.meta_prefetch := false.B 292 io.prefetch_train.bits.meta_access := false.B 293 if(EnableStorePrefetchSMS) { 294 io.prefetch_train.valid := RegNext(s2_valid && io.dcache.resp.fire && !s2_out.mmio && !s2_in.tlbMiss && !s2_in.isHWPrefetch) 295 }else { 296 io.prefetch_train.valid := false.B 297 } 298 299 // Pipeline 300 // -------------------------------------------------------------------------------- 301 // stage 3 302 // -------------------------------------------------------------------------------- 303 // store write back 304 val s3_valid = RegInit(false.B) 305 val s3_in = RegEnable(s2_out, s2_fire) 306 val s3_out = Wire(new ExuOutput) 307 val s3_kill = s3_in.uop.robIdx.needFlush(io.redirect) 308 val s3_can_go = s3_ready 309 val s3_fire = s3_valid && !s3_kill && s3_can_go 310 311 when (s2_fire) { s3_valid := (!s2_mmio || s2_exception) && !s2_out.isHWPrefetch } 312 .elsewhen (s3_fire) { s3_valid := false.B } 313 .elsewhen (s3_kill) { s3_valid := false.B } 314 315 // wb: writeback 316 val SelectGroupSize = RollbackGroupSize 317 val lgSelectGroupSize = log2Ceil(SelectGroupSize) 318 val TotalSelectCycles = scala.math.ceil(log2Ceil(LoadQueueRAWSize).toFloat / lgSelectGroupSize).toInt + 1 319 320 s3_out := DontCare 321 s3_out.uop := s3_in.uop 322 s3_out.data := DontCare 323 s3_out.redirectValid := false.B 324 s3_out.redirect := DontCare 325 s3_out.debug.isMMIO := s3_in.mmio 326 s3_out.debug.paddr := s3_in.paddr 327 s3_out.debug.vaddr := s3_in.vaddr 328 s3_out.debug.isPerfCnt := false.B 329 s3_out.fflags := DontCare 330 331 // Pipeline 332 // -------------------------------------------------------------------------------- 333 // stage x 334 // -------------------------------------------------------------------------------- 335 // delay TotalSelectCycles - 2 cycle(s) 336 val TotalDelayCycles = TotalSelectCycles - 2 337 val sx_valid = Wire(Vec(TotalDelayCycles + 1, Bool())) 338 val sx_ready = Wire(Vec(TotalDelayCycles + 1, Bool())) 339 val sx_in = Wire(Vec(TotalDelayCycles + 1, new ExuOutput)) 340 341 // backward ready signal 342 s3_ready := sx_ready.head 343 for (i <- 0 until TotalDelayCycles + 1) { 344 if (i == 0) { 345 sx_valid(i) := s3_valid 346 sx_in(i) := s3_out 347 sx_ready(i) := !s3_valid(i) || sx_in(i).uop.robIdx.needFlush(io.redirect) || (if (TotalDelayCycles == 0) io.stout.ready else sx_ready(i+1)) 348 } else { 349 val cur_kill = sx_in(i).uop.robIdx.needFlush(io.redirect) 350 val cur_can_go = (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1)) 351 val cur_fire = sx_valid(i) && !cur_kill && cur_can_go 352 val prev_fire = sx_valid(i-1) && !sx_in(i-1).uop.robIdx.needFlush(io.redirect) && sx_ready(i) 353 354 sx_ready(i) := !sx_valid(i) || cur_kill || (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1)) 355 val sx_valid_can_go = prev_fire || cur_fire || cur_kill 356 sx_valid(i) := RegEnable(Mux(prev_fire, true.B, false.B), false.B, sx_valid_can_go) 357 sx_in(i) := RegEnable(sx_in(i-1), prev_fire) 358 } 359 } 360 val sx_last_valid = sx_valid.takeRight(1).head 361 val sx_last_ready = sx_ready.takeRight(1).head 362 val sx_last_in = sx_in.takeRight(1).head 363 sx_last_ready := !sx_last_valid || sx_last_in.uop.robIdx.needFlush(io.redirect) || io.stout.ready 364 365 io.stout.valid := sx_last_valid && !sx_last_in.uop.robIdx.needFlush(io.redirect) 366 io.stout.bits := sx_last_in 367 io.stout.bits.redirectValid := false.B 368 369 io.debug_ls := DontCare 370 io.debug_ls.s1.isTlbFirstMiss := io.tlb.resp.valid && io.tlb.resp.bits.miss && io.tlb.resp.bits.debug.isFirstIssue && !s1_in.isHWPrefetch 371 io.debug_ls.s1_robIdx := s1_in.uop.robIdx.value 372 373 private def printPipeLine(pipeline: LsPipelineBundle, cond: Bool, name: String): Unit = { 374 XSDebug(cond, 375 p"$name" + p" pc ${Hexadecimal(pipeline.uop.cf.pc)} " + 376 p"addr ${Hexadecimal(pipeline.vaddr)} -> ${Hexadecimal(pipeline.paddr)} " + 377 p"op ${Binary(pipeline.uop.ctrl.fuOpType)} " + 378 p"data ${Hexadecimal(pipeline.data)} " + 379 p"mask ${Hexadecimal(pipeline.mask)}\n" 380 ) 381 } 382 383 printPipeLine(s0_out, s0_valid, "S0") 384 printPipeLine(s1_out, s1_valid, "S1") 385 386 // perf cnt 387 XSPerfAccumulate("s0_in_valid", s0_valid) 388 XSPerfAccumulate("s0_in_fire", s0_fire) 389 XSPerfAccumulate("s0_in_fire_first_issue", s0_fire && s0_isFirstIssue) 390 XSPerfAccumulate("s0_addr_spec_success", s0_fire && s0_saddr(VAddrBits-1, 12) === s0_in.src(0)(VAddrBits-1, 12)) 391 XSPerfAccumulate("s0_addr_spec_failed", s0_fire && s0_saddr(VAddrBits-1, 12) =/= s0_in.src(0)(VAddrBits-1, 12)) 392 XSPerfAccumulate("s0_addr_spec_success_once", s0_fire && s0_saddr(VAddrBits-1, 12) === s0_in.src(0)(VAddrBits-1, 12) && s0_isFirstIssue) 393 XSPerfAccumulate("s0_addr_spec_failed_once", s0_fire && s0_saddr(VAddrBits-1, 12) =/= s0_in.src(0)(VAddrBits-1, 12) && s0_isFirstIssue) 394 395 XSPerfAccumulate("s1_in_valid", s1_valid) 396 XSPerfAccumulate("s1_in_fire", s1_fire) 397 XSPerfAccumulate("s1_in_fire_first_issue", s1_fire && s1_in.isFirstIssue) 398 XSPerfAccumulate("s1_tlb_miss", s1_fire && s1_tlb_miss) 399 XSPerfAccumulate("s1_tlb_miss_first_issue", s1_fire && s1_tlb_miss && s1_in.isFirstIssue) 400 // end 401}