1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import utility._ 24import xiangshan.ExceptionNO._ 25import xiangshan._ 26import xiangshan.backend.Bundles.{DynInst, MemExuInput, MemExuOutput} 27import xiangshan.backend.fu.PMPRespBundle 28import xiangshan.backend.fu.FuConfig._ 29import xiangshan.backend.ctrlblock.{DebugLsInfoBundle, LsTopdownInfo} 30import xiangshan.backend.rob.RobPtr 31import xiangshan.backend.fu._ 32import xiangshan.backend.fu.util.SdtrigExt 33import xiangshan.cache._ 34import xiangshan.cache.wpu.ReplayCarry 35import xiangshan.cache.mmu.{TlbCmd, TlbReq, TlbRequestIO, TlbResp} 36import xiangshan.mem.mdp._ 37 38class HybridUnit(implicit p: Parameters) extends XSModule 39 with HasLoadHelper 40 with HasPerfEvents 41 with HasDCacheParameters 42 with HasCircularQueuePtrHelper 43 with HasVLSUParameters 44 with SdtrigExt 45{ 46 val io = IO(new Bundle() { 47 // control 48 val redirect = Flipped(ValidIO(new Redirect)) 49 val csrCtrl = Flipped(new CustomCSRCtrlIO) 50 51 // flow in 52 val lsin = Flipped(Decoupled(new MemExuInput)) 53 54 // flow out 55 val ldout = DecoupledIO(new MemExuOutput) 56 val stout = DecoupledIO(new MemExuOutput) 57 58 val ldu_io = new Bundle() { 59 // dcache 60 val dcache = new DCacheLoadIO 61 62 // data path 63 val sbuffer = new LoadForwardQueryIO 64 val vec_forward = new LoadForwardQueryIO 65 val lsq = new LoadToLsqIO 66 val tl_d_channel = Input(new DcacheToLduForwardIO) 67 val forward_mshr = Flipped(new LduToMissqueueForwardIO) 68 val refill = Flipped(ValidIO(new Refill)) 69 val l2_hint = Input(Valid(new L2ToL1Hint)) 70 71 // fast wakeup 72 val fast_uop = ValidIO(new DynInst) // early wakeup signal generated in load_s1, send to RS in load_s2 73 74 // trigger 75 val trigger = Vec(TriggerNum, new LoadUnitTriggerIO) 76 77 // load to load fast path 78 val l2l_fwd_in = Input(new LoadToLoadIO) 79 val l2l_fwd_out = Output(new LoadToLoadIO) 80 81 val ld_fast_match = Input(Bool()) 82 val ld_fast_fuOpType = Input(UInt()) 83 val ld_fast_imm = Input(UInt(12.W)) 84 85 // hardware prefetch to l1 cache req 86 val prefetch_req = Flipped(ValidIO(new L1PrefetchReq)) 87 88 // iq cancel 89 val ldCancel = Output(new LoadCancelIO()) // use to cancel the uops waked by this load, and cancel load 90 91 // load ecc error 92 val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different 93 94 // schedule error query 95 val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO))) 96 97 // queue-based replay 98 val replay = Flipped(Decoupled(new LsPipelineBundle)) 99 val lq_rep_full = Input(Bool()) 100 101 // misc 102 val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch 103 104 // Load fast replay path 105 val fast_rep_in = Flipped(Decoupled(new LqWriteBundle)) 106 val fast_rep_out = Decoupled(new LqWriteBundle) 107 108 // perf 109 val debug_ls = Output(new DebugLsInfoBundle) 110 val lsTopdownInfo = Output(new LsTopdownInfo) 111 } 112 113 val stu_io = new Bundle() { 114 val dcache = new DCacheStoreIO 115 val prefetch_req = Flipped(DecoupledIO(new StorePrefetchReq)) 116 val issue = Valid(new MemExuInput) 117 val lsq = ValidIO(new LsPipelineBundle) 118 val lsq_replenish = Output(new LsPipelineBundle()) 119 val stld_nuke_query = Valid(new StoreNukeQueryIO) 120 val st_mask_out = Valid(new StoreMaskBundle) 121 val debug_ls = Output(new DebugLsInfoBundle) 122 } 123 124 val vec_stu_io = new Bundle() { 125 val in = Flipped(DecoupledIO(new VecStorePipeBundle())) 126 val isFirstIssue = Input(Bool()) 127 val lsq = ValidIO(new LsPipelineBundle()) 128 val feedbackSlow = ValidIO(new VSFQFeedback) 129 } 130 131 // prefetch 132 val prefetch_train = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms 133 val prefetch_train_l1 = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride 134 val canAcceptLowConfPrefetch = Output(Bool()) 135 val canAcceptHighConfPrefetch = Output(Bool()) 136 val correctMissTrain = Input(Bool()) 137 138 // data path 139 val tlb = new TlbRequestIO(2) 140 val pmp = Flipped(new PMPRespBundle()) // arrive same to tlb now 141 142 // rs feedback 143 val feedback_fast = ValidIO(new RSFeedback) // stage 2 144 val feedback_slow = ValidIO(new RSFeedback) // stage 3 145 }) 146 147 val StorePrefetchL1Enabled = EnableStorePrefetchAtCommit || EnableStorePrefetchAtIssue || EnableStorePrefetchSPB 148 val s1_ready, s2_ready, s3_ready, sx_can_go = WireInit(false.B) 149 150 // Pipeline 151 // -------------------------------------------------------------------------------- 152 // stage 0 153 // -------------------------------------------------------------------------------- 154 // generate addr, use addr to query DCache and DTLB 155 val s0_valid = Wire(Bool()) 156 val s0_dcache_ready = Wire(Bool()) 157 val s0_kill = Wire(Bool()) 158 val s0_vaddr = Wire(UInt(VAddrBits.W)) 159 val s0_mask = Wire(UInt((VLEN/8).W)) 160 val s0_uop = Wire(new DynInst) 161 val s0_has_rob_entry = Wire(Bool()) 162 val s0_rsIdx = Wire(UInt(log2Up(MemIQSizeMax).W)) 163 val s0_mshrid = Wire(UInt()) 164 val s0_try_l2l = Wire(Bool()) 165 val s0_rep_carry = Wire(new ReplayCarry(nWays)) 166 val s0_isFirstIssue = Wire(Bool()) 167 val s0_fast_rep = Wire(Bool()) 168 val s0_ld_rep = Wire(Bool()) 169 val s0_l2l_fwd = Wire(Bool()) 170 val s0_sched_idx = Wire(UInt()) 171 val s0_deqPortIdx = Wire(UInt(log2Ceil(LoadPipelineWidth).W)) 172 val s0_can_go = s1_ready 173 val s0_fire = s0_valid && s0_dcache_ready && s0_can_go 174 val s0_out = Wire(new LqWriteBundle) 175 // vector 176 val s0_isvec = WireInit(false.B) 177 val s0_exp = WireInit(true.B) 178 val s0_flowPtr = WireInit(0.U.asTypeOf(new VsFlowPtr)) 179 180 // load flow select/gen 181 // src0: super load replayed by LSQ (cache miss replay) (io.ldu_io.replay) 182 // src1: fast load replay (io.ldu_io.fast_rep_in) 183 // src2: load replayed by LSQ (io.ldu_io.replay) 184 // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch) 185 // src4: int read / software prefetch first issue from RS (io.in) 186 // src5: vec read first issue from RS (TODO) 187 // src6: load try pointchaising when no issued or replayed load (io.fastpath) 188 // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch) 189 // priority: high to low 190 val s0_ld_flow = FuType.isLoad(s0_uop.fuType) || FuType.isVLoad(s0_uop.fuType) 191 val s0_rep_stall = io.lsin.valid && isAfter(io.ldu_io.replay.bits.uop.robIdx, io.lsin.bits.uop.robIdx) 192 val s0_super_ld_rep_valid = io.ldu_io.replay.valid && io.ldu_io.replay.bits.forward_tlDchannel 193 val s0_ld_fast_rep_valid = io.ldu_io.fast_rep_in.valid 194 val s0_ld_rep_valid = io.ldu_io.replay.valid && !io.ldu_io.replay.bits.forward_tlDchannel && !s0_rep_stall 195 val s0_high_conf_prf_valid = io.ldu_io.prefetch_req.valid && io.ldu_io.prefetch_req.bits.confidence > 0.U 196 val s0_int_iss_valid = io.lsin.valid // int flow first issue or software prefetch 197 val s0_vec_iss_valid = io.vec_stu_io.in.valid 198 val s0_l2l_fwd_valid = io.ldu_io.l2l_fwd_in.valid && io.ldu_io.ld_fast_match 199 val s0_low_conf_prf_valid = io.ldu_io.prefetch_req.valid && io.ldu_io.prefetch_req.bits.confidence === 0.U 200 dontTouch(s0_super_ld_rep_valid) 201 dontTouch(s0_ld_fast_rep_valid) 202 dontTouch(s0_ld_rep_valid) 203 dontTouch(s0_high_conf_prf_valid) 204 dontTouch(s0_int_iss_valid) 205 dontTouch(s0_vec_iss_valid) 206 dontTouch(s0_l2l_fwd_valid) 207 dontTouch(s0_low_conf_prf_valid) 208 209 // load flow source ready 210 val s0_super_ld_rep_ready = WireInit(true.B) 211 val s0_ld_fast_rep_ready = !s0_super_ld_rep_valid 212 val s0_ld_rep_ready = !s0_super_ld_rep_valid && 213 !s0_ld_fast_rep_valid 214 val s0_high_conf_prf_ready = !s0_super_ld_rep_valid && 215 !s0_ld_fast_rep_valid && 216 !s0_ld_rep_valid 217 218 val s0_int_iss_ready = !s0_super_ld_rep_valid && 219 !s0_ld_fast_rep_valid && 220 !s0_ld_rep_valid && 221 !s0_high_conf_prf_valid 222 223 val s0_vec_iss_ready = !s0_super_ld_rep_valid && 224 !s0_ld_fast_rep_valid && 225 !s0_ld_rep_valid && 226 !s0_high_conf_prf_valid && 227 !s0_int_iss_valid 228 229 val s0_l2l_fwd_ready = !s0_super_ld_rep_valid && 230 !s0_ld_fast_rep_valid && 231 !s0_ld_rep_valid && 232 !s0_high_conf_prf_valid && 233 !s0_int_iss_valid && 234 !s0_vec_iss_valid 235 236 val s0_low_conf_prf_ready = !s0_super_ld_rep_valid && 237 !s0_ld_fast_rep_valid && 238 !s0_ld_rep_valid && 239 !s0_high_conf_prf_valid && 240 !s0_int_iss_valid && 241 !s0_vec_iss_valid && 242 !s0_l2l_fwd_valid 243 dontTouch(s0_super_ld_rep_ready) 244 dontTouch(s0_ld_fast_rep_ready) 245 dontTouch(s0_ld_rep_ready) 246 dontTouch(s0_high_conf_prf_ready) 247 dontTouch(s0_int_iss_ready) 248 dontTouch(s0_vec_iss_ready) 249 dontTouch(s0_l2l_fwd_ready) 250 dontTouch(s0_low_conf_prf_ready) 251 252 // load flow source select (OH) 253 val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready 254 val s0_ld_fast_rep_select = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready 255 val s0_ld_rep_select = s0_ld_rep_valid && s0_ld_rep_ready 256 val s0_hw_prf_select = s0_high_conf_prf_ready && s0_high_conf_prf_valid || 257 s0_low_conf_prf_ready && s0_low_conf_prf_valid 258 val s0_int_iss_select = s0_int_iss_ready && s0_int_iss_valid 259 val s0_vec_iss_select = s0_vec_iss_ready && s0_vec_iss_valid 260 val s0_l2l_fwd_select = s0_l2l_fwd_ready && s0_l2l_fwd_valid 261 dontTouch(s0_super_ld_rep_select) 262 dontTouch(s0_ld_fast_rep_select) 263 dontTouch(s0_ld_rep_select) 264 dontTouch(s0_hw_prf_select) 265 dontTouch(s0_int_iss_select) 266 dontTouch(s0_vec_iss_select) 267 dontTouch(s0_l2l_fwd_select) 268 269 s0_valid := (s0_super_ld_rep_valid || 270 s0_ld_fast_rep_valid || 271 s0_ld_rep_valid || 272 s0_high_conf_prf_valid || 273 s0_int_iss_valid || 274 s0_vec_iss_valid || 275 s0_l2l_fwd_valid || 276 s0_low_conf_prf_valid) && !s0_kill 277 278 // which is S0's out is ready and dcache is ready 279 val s0_try_ptr_chasing = s0_l2l_fwd_select 280 val s0_do_try_ptr_chasing = s0_try_ptr_chasing && s0_can_go && io.ldu_io.dcache.req.ready 281 val s0_ptr_chasing_vaddr = io.ldu_io.l2l_fwd_in.data(5, 0) +& io.ldu_io.ld_fast_imm(5, 0) 282 val s0_ptr_chasing_canceled = WireInit(false.B) 283 s0_kill := s0_ptr_chasing_canceled || (s0_out.uop.robIdx.needFlush(io.redirect) && !s0_try_ptr_chasing) 284 285 // prefetch related ctrl signal 286 val s0_prf = Wire(Bool()) 287 val s0_prf_rd = Wire(Bool()) 288 val s0_prf_wr = Wire(Bool()) 289 val s0_hw_prf = s0_hw_prf_select 290 291 io.canAcceptLowConfPrefetch := s0_low_conf_prf_ready 292 io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready 293 294 if (StorePrefetchL1Enabled) { 295 s0_dcache_ready := Mux(s0_ld_flow, io.ldu_io.dcache.req.ready, io.stu_io.dcache.req.ready) 296 } else { 297 s0_dcache_ready := Mux(s0_ld_flow, io.ldu_io.dcache.req.ready, true.B) 298 } 299 300 // query DTLB 301 io.tlb.req.valid := s0_valid && s0_dcache_ready 302 io.tlb.req.bits.cmd := Mux(s0_prf, 303 Mux(s0_prf_wr, TlbCmd.write, TlbCmd.read), 304 Mux(s0_ld_flow, TlbCmd.read, TlbCmd.write) 305 ) 306 io.tlb.req.bits.vaddr := Mux(s0_hw_prf_select, io.ldu_io.prefetch_req.bits.paddr, s0_vaddr) 307 io.tlb.req.bits.size := Mux(s0_isvec, io.vec_stu_io.in.bits.alignedType, LSUOpType.size(s0_uop.fuOpType)) 308 io.tlb.req.bits.kill := s0_kill 309 io.tlb.req.bits.memidx.is_ld := s0_ld_flow 310 io.tlb.req.bits.memidx.is_st := !s0_ld_flow 311 io.tlb.req.bits.memidx.idx := s0_uop.lqIdx.value 312 io.tlb.req.bits.debug.robIdx := s0_uop.robIdx 313 io.tlb.req.bits.no_translate := s0_hw_prf_select // hw b.reqetch addr does not need to be translated 314 io.tlb.req.bits.debug.pc := s0_uop.pc 315 io.tlb.req.bits.debug.isFirstIssue := s0_isFirstIssue 316 317 // query DCache 318 // for load 319 io.ldu_io.dcache.req.valid := s0_valid && s0_dcache_ready && s0_ld_flow 320 io.ldu_io.dcache.req.bits.cmd := Mux(s0_prf_rd, MemoryOpConstants.M_PFR, 321 Mux(s0_prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD)) 322 io.ldu_io.dcache.req.bits.vaddr := s0_vaddr 323 io.ldu_io.dcache.req.bits.mask := s0_mask 324 io.ldu_io.dcache.req.bits.data := DontCare 325 io.ldu_io.dcache.req.bits.isFirstIssue := s0_isFirstIssue 326 io.ldu_io.dcache.req.bits.instrtype := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U) 327 io.ldu_io.dcache.req.bits.debug_robIdx := s0_uop.robIdx.value 328 io.ldu_io.dcache.req.bits.replayCarry := s0_rep_carry 329 io.ldu_io.dcache.req.bits.id := DontCare // TODO: update cache meta 330 io.ldu_io.dcache.pf_source := Mux(s0_hw_prf_select, io.ldu_io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL) 331 332 // for store 333 io.stu_io.dcache.req.valid := s0_valid && s0_dcache_ready && !s0_ld_flow && !s0_prf 334 io.stu_io.dcache.req.bits.cmd := MemoryOpConstants.M_PFW 335 io.stu_io.dcache.req.bits.vaddr := s0_vaddr 336 io.stu_io.dcache.req.bits.instrtype := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U, STORE_SOURCE.U) 337 338 // load flow priority mux 339 def fromNullSource() = { 340 s0_vaddr := 0.U 341 s0_mask := 0.U 342 s0_uop := 0.U.asTypeOf(new DynInst) 343 s0_try_l2l := false.B 344 s0_has_rob_entry := false.B 345 s0_rsIdx := 0.U 346 s0_rep_carry := 0.U.asTypeOf(s0_rep_carry.cloneType) 347 s0_mshrid := 0.U 348 s0_isFirstIssue := false.B 349 s0_fast_rep := false.B 350 s0_ld_rep := false.B 351 s0_l2l_fwd := false.B 352 s0_prf := false.B 353 s0_prf_rd := false.B 354 s0_prf_wr := false.B 355 s0_sched_idx := 0.U 356 s0_deqPortIdx := 0.U 357 } 358 359 def fromFastReplaySource(src: LqWriteBundle) = { 360 s0_vaddr := src.vaddr 361 s0_mask := src.mask 362 s0_uop := src.uop 363 s0_try_l2l := false.B 364 s0_has_rob_entry := src.hasROBEntry 365 s0_rep_carry := src.rep_info.rep_carry 366 s0_mshrid := src.rep_info.mshr_id 367 s0_rsIdx := src.rsIdx 368 s0_isFirstIssue := false.B 369 s0_fast_rep := true.B 370 s0_ld_rep := src.isLoadReplay 371 s0_l2l_fwd := false.B 372 s0_prf := LSUOpType.isPrefetch(src.uop.fuOpType) 373 s0_prf_rd := src.uop.fuOpType === LSUOpType.prefetch_r 374 s0_prf_wr := src.uop.fuOpType === LSUOpType.prefetch_w 375 s0_sched_idx := src.schedIndex 376 s0_deqPortIdx := src.deqPortIdx 377 } 378 379 def fromNormalReplaySource(src: LsPipelineBundle) = { 380 s0_vaddr := src.vaddr 381 s0_mask := genVWmask(src.vaddr, src.uop.fuOpType(1, 0)) 382 s0_uop := src.uop 383 s0_try_l2l := false.B 384 s0_has_rob_entry := true.B 385 s0_rsIdx := src.rsIdx 386 s0_rep_carry := src.replayCarry 387 s0_mshrid := src.mshrid 388 s0_isFirstIssue := false.B 389 s0_fast_rep := false.B 390 s0_ld_rep := true.B 391 s0_l2l_fwd := false.B 392 s0_prf := LSUOpType.isPrefetch(src.uop.fuOpType) 393 s0_prf_rd := src.uop.fuOpType === LSUOpType.prefetch_r 394 s0_prf_wr := src.uop.fuOpType === LSUOpType.prefetch_w 395 s0_sched_idx := src.schedIndex 396 s0_deqPortIdx := src.deqPortIdx 397 } 398 399 def fromPrefetchSource(src: L1PrefetchReq) = { 400 s0_vaddr := src.getVaddr() 401 s0_mask := 0.U 402 s0_uop := DontCare 403 s0_try_l2l := false.B 404 s0_has_rob_entry := false.B 405 s0_rsIdx := 0.U 406 s0_rep_carry := 0.U.asTypeOf(s0_rep_carry.cloneType) 407 s0_mshrid := 0.U 408 s0_isFirstIssue := false.B 409 s0_fast_rep := false.B 410 s0_ld_rep := false.B 411 s0_l2l_fwd := false.B 412 s0_prf := true.B 413 s0_prf_rd := !src.is_store 414 s0_prf_wr := src.is_store 415 s0_sched_idx := 0.U 416 s0_deqPortIdx := 0.U 417 } 418 419 def fromIntIssueSource(src: MemExuInput) = { 420 s0_vaddr := src.src(0) + SignExt(src.uop.imm(11, 0), VAddrBits) 421 s0_mask := genVWmask(s0_vaddr, src.uop.fuOpType(1,0)) 422 s0_uop := src.uop 423 s0_try_l2l := false.B 424 s0_has_rob_entry := true.B 425 s0_rsIdx := src.iqIdx 426 s0_rep_carry := 0.U.asTypeOf(s0_rep_carry.cloneType) 427 s0_mshrid := 0.U 428 s0_isFirstIssue := true.B 429 s0_fast_rep := false.B 430 s0_ld_rep := false.B 431 s0_l2l_fwd := false.B 432 s0_prf := LSUOpType.isPrefetch(src.uop.fuOpType) 433 s0_prf_rd := src.uop.fuOpType === LSUOpType.prefetch_r 434 s0_prf_wr := src.uop.fuOpType === LSUOpType.prefetch_w 435 s0_sched_idx := 0.U 436 s0_deqPortIdx := src.deqPortIdx 437 } 438 439 def fromVecIssueSource(src: VecStorePipeBundle) = { 440 // For now, vector port handles only vector store flows 441 s0_vaddr := src.vaddr 442 s0_mask := src.mask 443 s0_uop := src.uop 444 s0_try_l2l := false.B 445 s0_has_rob_entry := true.B 446 s0_rsIdx := 0.U 447 s0_rep_carry := 0.U.asTypeOf(s0_rep_carry.cloneType) 448 s0_mshrid := 0.U 449 s0_isFirstIssue := src.isFirstIssue 450 s0_fast_rep := false.B 451 s0_ld_rep := false.B 452 s0_l2l_fwd := false.B 453 s0_prf := false.B 454 s0_prf_rd := false.B 455 s0_prf_wr := false.B 456 s0_sched_idx := 0.U 457 458 s0_isvec := true.B 459 s0_exp := io.vec_stu_io.in.bits.exp 460 s0_flowPtr := io.vec_stu_io.in.bits.flowPtr 461 s0_deqPortIdx := 0.U 462 } 463 464 def fromLoadToLoadSource(src: LoadToLoadIO) = { 465 s0_vaddr := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0)) 466 s0_mask := genVWmask(s0_vaddr, io.ldu_io.ld_fast_fuOpType(1, 0)) 467 // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding. 468 // Assume the pointer chasing is always ld. 469 s0_uop.fuOpType := io.ldu_io.ld_fast_fuOpType 470 s0_try_l2l := true.B 471 // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx and s0_deqPortIdx in S0 when trying pointchasing 472 // because these signals will be updated in S1 473 s0_has_rob_entry := false.B 474 s0_rsIdx := 0.U 475 s0_mshrid := 0.U 476 s0_rep_carry := 0.U.asTypeOf(s0_rep_carry.cloneType) 477 s0_isFirstIssue := true.B 478 s0_fast_rep := false.B 479 s0_ld_rep := false.B 480 s0_l2l_fwd := true.B 481 s0_prf := false.B 482 s0_prf_rd := false.B 483 s0_prf_wr := false.B 484 s0_sched_idx := 0.U 485 s0_deqPortIdx := 0.U 486 } 487 488 // set default 489 s0_uop := DontCare 490 when (s0_super_ld_rep_select) { fromNormalReplaySource(io.ldu_io.replay.bits) } 491 .elsewhen (s0_ld_fast_rep_select) { fromFastReplaySource(io.ldu_io.fast_rep_in.bits) } 492 .elsewhen (s0_ld_rep_select) { fromNormalReplaySource(io.ldu_io.replay.bits) } 493 .elsewhen (s0_hw_prf_select) { fromPrefetchSource(io.ldu_io.prefetch_req.bits) } 494 .elsewhen (s0_int_iss_select) { fromIntIssueSource(io.lsin.bits) } 495 .elsewhen (s0_vec_iss_select) { fromVecIssueSource(io.vec_stu_io.in.bits) } 496 .otherwise { 497 if (EnableLoadToLoadForward) { 498 fromLoadToLoadSource(io.ldu_io.l2l_fwd_in) 499 } else { 500 fromNullSource() 501 } 502 } 503 504 // address align check 505 val s0_addr_aligned = LookupTree(Mux(s0_isvec, io.vec_stu_io.in.bits.alignedType, s0_uop.fuOpType(1, 0)), List( 506 "b00".U -> true.B, //b 507 "b01".U -> (s0_vaddr(0) === 0.U), //h 508 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 509 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 510 )) 511 512 // accept load flow if dcache ready (tlb is always ready) 513 // TODO: prefetch need writeback to loadQueueFlag 514 s0_out := DontCare 515 s0_out.rsIdx := s0_rsIdx 516 s0_out.vaddr := s0_vaddr 517 s0_out.mask := s0_mask 518 s0_out.uop := s0_uop 519 s0_out.isFirstIssue := s0_isFirstIssue 520 s0_out.hasROBEntry := s0_has_rob_entry 521 s0_out.isPrefetch := s0_prf 522 s0_out.isHWPrefetch := s0_hw_prf 523 s0_out.isFastReplay := s0_fast_rep 524 s0_out.isLoadReplay := s0_ld_rep 525 s0_out.isFastPath := s0_l2l_fwd 526 s0_out.mshrid := s0_mshrid 527 s0_out.isvec := s0_isvec 528 s0_out.exp := s0_exp 529 s0_out.sflowPtr := s0_flowPtr 530 s0_out.uop.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned && s0_ld_flow 531 s0_out.uop.exceptionVec(storeAddrMisaligned) := !s0_addr_aligned && !s0_ld_flow 532 s0_out.forward_tlDchannel := s0_super_ld_rep_select 533 when(io.tlb.req.valid && s0_isFirstIssue) { 534 s0_out.uop.debugInfo.tlbFirstReqTime := GTimer() 535 }.otherwise{ 536 s0_out.uop.debugInfo.tlbFirstReqTime := s0_uop.debugInfo.tlbFirstReqTime 537 } 538 s0_out.schedIndex := s0_sched_idx 539 s0_out.deqPortIdx := s0_deqPortIdx 540 541 // load fast replay 542 io.ldu_io.fast_rep_in.ready := (s0_can_go && io.ldu_io.dcache.req.ready && s0_ld_fast_rep_ready) 543 544 // load flow source ready 545 // cache missed load has highest priority 546 // always accept cache missed load flow from load replay queue 547 io.ldu_io.replay.ready := (s0_can_go && io.ldu_io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select)) 548 549 // accept load flow from rs when: 550 // 1) there is no lsq-replayed load 551 // 2) there is no fast replayed load 552 // 3) there is no high confidence prefetch request 553 io.lsin.ready := (s0_can_go && 554 Mux(FuType.isLoad(io.lsin.bits.uop.fuType), io.ldu_io.dcache.req.ready, 555 (if (StorePrefetchL1Enabled) io.stu_io.dcache.req.ready else true.B)) && s0_int_iss_ready) 556 io.vec_stu_io.in.ready := s0_can_go && io.ldu_io.dcache.req.ready && s0_vec_iss_ready 557 558 559 // for hw prefetch load flow feedback, to be added later 560 // io.prefetch_in.ready := s0_hw_prf_select 561 562 // dcache replacement extra info 563 // TODO: should prefetch load update replacement? 564 io.ldu_io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.ldu_io.replay.bits.replacementUpdated, false.B) 565 566 io.stu_io.prefetch_req.ready := s1_ready && io.stu_io.dcache.req.ready && !io.lsin.valid 567 568 // load debug 569 XSDebug(io.ldu_io.dcache.req.fire && s0_ld_flow, 570 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 571 ) 572 XSDebug(s0_valid && s0_ld_flow, 573 p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, lqIdx ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " + 574 p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n") 575 576 // store debug 577 XSDebug(io.stu_io.dcache.req.fire && !s0_ld_flow, 578 p"[DCACHE STORE REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 579 ) 580 XSDebug(s0_valid && !s0_ld_flow, 581 p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, sqIdx ${Hexadecimal(s0_out.uop.sqIdx.asUInt)}, " + 582 p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n") 583 584 585 // Pipeline 586 // -------------------------------------------------------------------------------- 587 // stage 1 588 // -------------------------------------------------------------------------------- 589 // TLB resp (send paddr to dcache) 590 val s1_valid = RegInit(false.B) 591 val s1_in = Wire(new LqWriteBundle) 592 val s1_out = Wire(new LqWriteBundle) 593 val s1_kill = Wire(Bool()) 594 val s1_can_go = s2_ready 595 val s1_fire = s1_valid && !s1_kill && s1_can_go 596 val s1_ld_flow = RegNext(s0_ld_flow) 597 val s1_isvec = RegEnable(s0_out.isvec, false.B, s0_fire) 598 599 s1_ready := !s1_valid || s1_kill || s2_ready 600 when (s0_fire) { s1_valid := true.B } 601 .elsewhen (s1_fire) { s1_valid := false.B } 602 .elsewhen (s1_kill) { s1_valid := false.B } 603 s1_in := RegEnable(s0_out, s0_fire) 604 605 val s1_fast_rep_dly_err = RegNext(io.ldu_io.fast_rep_in.bits.delayedLoadError) 606 val s1_fast_rep_kill = s1_fast_rep_dly_err && s1_in.isFastReplay 607 val s1_l2l_fwd_dly_err = RegNext(io.ldu_io.l2l_fwd_in.dly_ld_err) 608 val s1_l2l_fwd_kill = s1_l2l_fwd_dly_err && s1_in.isFastPath 609 val s1_late_kill = s1_fast_rep_kill || s1_l2l_fwd_kill 610 val s1_vaddr_hi = Wire(UInt()) 611 val s1_vaddr_lo = Wire(UInt()) 612 val s1_vaddr = Wire(UInt()) 613 val s1_paddr_dup_lsu = Wire(UInt()) 614 val s1_paddr_dup_dcache = Wire(UInt()) 615 val s1_ld_exception = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, LduCfg).asUInt.orR // af & pf exception were modified below. 616 val s1_st_exception = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, StaCfg).asUInt.orR // af & pf exception were modified below. 617 val s1_exception = (s1_ld_flow && s1_ld_exception) || (!s1_ld_flow && s1_st_exception) 618 val s1_tlb_miss = io.tlb.resp.bits.miss 619 val s1_prf = s1_in.isPrefetch 620 val s1_hw_prf = s1_in.isHWPrefetch 621 val s1_sw_prf = s1_prf && !s1_hw_prf 622 val s1_tlb_memidx = io.tlb.resp.bits.memidx 623 624 // mmio cbo decoder 625 val s1_mmio_cbo = (s1_in.uop.fuOpType === LSUOpType.cbo_clean || 626 s1_in.uop.fuOpType === LSUOpType.cbo_flush || 627 s1_in.uop.fuOpType === LSUOpType.cbo_inval) && !s1_ld_flow && !s1_prf 628 val s1_mmio = s1_mmio_cbo 629 630 s1_vaddr_hi := s1_in.vaddr(VAddrBits - 1, 6) 631 s1_vaddr_lo := s1_in.vaddr(5, 0) 632 s1_vaddr := Cat(s1_vaddr_hi, s1_vaddr_lo) 633 s1_paddr_dup_lsu := io.tlb.resp.bits.paddr(0) 634 s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1) 635 636 when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && 637 s1_tlb_memidx.idx === s1_in.uop.lqIdx.value && s1_ld_flow) { 638 // printf("Load idx = %d\n", s1_tlb_memidx.idx) 639 s1_out.uop.debugInfo.tlbRespTime := GTimer() 640 } .elsewhen(s1_tlb_memidx.is_st && io.tlb.resp.valid && !s1_tlb_miss && 641 s1_tlb_memidx.idx === s1_out.uop.sqIdx.value && !s1_ld_flow) { 642 // printf("Store idx = %d\n", s1_tlb_memidx.idx) 643 s1_out.uop.debugInfo.tlbRespTime := GTimer() 644 } 645 646 io.tlb.req_kill := s1_kill 647 io.tlb.resp.ready := true.B 648 649 io.ldu_io.dcache.s1_paddr_dup_lsu <> s1_paddr_dup_lsu 650 io.ldu_io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache 651 io.ldu_io.dcache.s1_kill := s1_kill || s1_tlb_miss || s1_exception 652 653 // store to load forwarding 654 io.ldu_io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow) 655 io.ldu_io.sbuffer.vaddr := s1_vaddr 656 io.ldu_io.sbuffer.paddr := s1_paddr_dup_lsu 657 io.ldu_io.sbuffer.uop := s1_in.uop 658 io.ldu_io.sbuffer.sqIdx := s1_in.uop.sqIdx 659 io.ldu_io.sbuffer.mask := s1_in.mask 660 io.ldu_io.sbuffer.pc := s1_in.uop.pc // FIXME: remove it 661 662 io.ldu_io.vec_forward.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow) 663 io.ldu_io.vec_forward.vaddr := s1_vaddr 664 io.ldu_io.vec_forward.paddr := s1_paddr_dup_lsu 665 io.ldu_io.vec_forward.uop := s1_in.uop 666 io.ldu_io.vec_forward.sqIdx := s1_in.uop.sqIdx 667 io.ldu_io.vec_forward.mask := s1_in.mask 668 io.ldu_io.vec_forward.pc := s1_in.uop.pc // FIXME: remove it 669 670 io.ldu_io.lsq.forward.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow) 671 io.ldu_io.lsq.forward.vaddr := s1_vaddr 672 io.ldu_io.lsq.forward.paddr := s1_paddr_dup_lsu 673 io.ldu_io.lsq.forward.uop := s1_in.uop 674 io.ldu_io.lsq.forward.sqIdx := s1_in.uop.sqIdx 675 io.ldu_io.lsq.forward.sqIdxMask := 0.U 676 io.ldu_io.lsq.forward.mask := s1_in.mask 677 io.ldu_io.lsq.forward.pc := s1_in.uop.pc // FIXME: remove it 678 679 // st-ld violation query 680 val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => { 681 io.ldu_io.stld_nuke_query(w).valid && // query valid 682 isAfter(s1_in.uop.robIdx, io.ldu_io.stld_nuke_query(w).bits.robIdx) && // older store 683 // TODO: Fix me when vector instruction 684 (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.ldu_io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match 685 (s1_in.mask & io.ldu_io.stld_nuke_query(w).bits.mask).orR // data mask contain 686 })).asUInt.orR && !s1_tlb_miss && s1_ld_flow 687 688 s1_out := s1_in 689 s1_out.vaddr := s1_vaddr 690 s1_out.paddr := s1_paddr_dup_lsu 691 s1_out.tlbMiss := s1_tlb_miss 692 s1_out.ptwBack := io.tlb.resp.bits.ptwBack 693 s1_out.rsIdx := s1_in.rsIdx 694 s1_out.rep_info.debug := s1_in.uop.debugInfo 695 s1_out.rep_info.nuke := s1_nuke && !s1_sw_prf 696 s1_out.lateKill := s1_late_kill 697 698 when (s1_ld_flow) { 699 when (!s1_late_kill) { 700 // current ori test will cause the case of ldest == 0, below will be modifeid in the future. 701 // af & pf exception were modified 702 s1_out.uop.exceptionVec(loadPageFault) := io.tlb.resp.bits.excp(0).pf.ld 703 s1_out.uop.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld 704 } .otherwise { 705 s1_out.uop.exceptionVec(loadAddrMisaligned) := false.B 706 s1_out.uop.exceptionVec(loadAccessFault) := s1_late_kill 707 } 708 } .otherwise { 709 s1_out.uop.exceptionVec(storePageFault) := io.tlb.resp.bits.excp(0).pf.st 710 s1_out.uop.exceptionVec(storeAccessFault) := io.tlb.resp.bits.excp(0).af.st 711 } 712 713 // pointer chasing 714 val s1_try_ptr_chasing = RegNext(s0_do_try_ptr_chasing, false.B) 715 val s1_ptr_chasing_vaddr = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing) 716 val s1_fu_op_type_not_ld = WireInit(false.B) 717 val s1_not_fast_match = WireInit(false.B) 718 val s1_addr_mismatch = WireInit(false.B) 719 val s1_addr_misaligned = WireInit(false.B) 720 val s1_ptr_chasing_canceled = WireInit(false.B) 721 val s1_cancel_ptr_chasing = WireInit(false.B) 722 723 s1_kill := s1_late_kill || 724 s1_cancel_ptr_chasing || 725 s1_in.uop.robIdx.needFlush(io.redirect) || 726 RegEnable(s0_kill, false.B, io.lsin.valid || io.ldu_io.replay.valid || io.ldu_io.l2l_fwd_in.valid || io.ldu_io.fast_rep_in.valid || io.vec_stu_io.in.valid) 727 728 if (EnableLoadToLoadForward) { 729 // Sometimes, we need to cancel the load-load forwarding. 730 // These can be put at S0 if timing is bad at S1. 731 // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow) 732 s1_addr_mismatch := s1_ptr_chasing_vaddr(6) || RegEnable(io.ldu_io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing) 733 // Case 1: the address is misaligned, kill s1 734 s1_addr_misaligned := LookupTree(s1_in.uop.fuOpType(1, 0), List( 735 "b00".U -> false.B, //b 736 "b01".U -> (s1_vaddr(0) =/= 0.U), //h 737 "b10".U -> (s1_vaddr(1, 0) =/= 0.U), //w 738 "b11".U -> (s1_vaddr(2, 0) =/= 0.U) //d 739 )) 740 // Case 2: this load-load uop is cancelled 741 s1_ptr_chasing_canceled := !io.lsin.valid || FuType.isStore(io.lsin.bits.uop.fuType) 742 743 when (s1_try_ptr_chasing) { 744 s1_cancel_ptr_chasing := s1_addr_mismatch || s1_addr_misaligned || s1_ptr_chasing_canceled 745 746 s1_in.uop := io.lsin.bits.uop 747 s1_in.rsIdx := io.lsin.bits.iqIdx 748 s1_in.isFirstIssue := io.lsin.bits.isFirstIssue 749 s1_vaddr_lo := s1_ptr_chasing_vaddr(5, 0) 750 s1_paddr_dup_lsu := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo) 751 s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo) 752 753 // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb) 754 s1_in.uop.debugInfo.tlbFirstReqTime := GTimer() 755 s1_in.uop.debugInfo.tlbRespTime := GTimer() 756 } 757 when (!s1_cancel_ptr_chasing) { 758 s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.ldu_io.replay.fire && !io.ldu_io.fast_rep_in.fire 759 when (s1_try_ptr_chasing) { 760 io.lsin.ready := true.B 761 } 762 } 763 } 764 765 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 766 val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize)) 767 // to enable load-load, sqIdxMask must be calculated based on lsin.uop 768 // If the timing here is not OK, load-load forwarding has to be disabled. 769 // Or we calculate sqIdxMask at RS?? 770 io.ldu_io.lsq.forward.sqIdxMask := s1_sqIdx_mask 771 if (EnableLoadToLoadForward) { 772 when (s1_try_ptr_chasing) { 773 io.ldu_io.lsq.forward.sqIdxMask := UIntToMask(io.lsin.bits.uop.sqIdx.value, StoreQueueSize) 774 } 775 } 776 777 io.ldu_io.forward_mshr.valid := s1_valid && s1_out.forward_tlDchannel && s1_ld_flow 778 io.ldu_io.forward_mshr.mshrid := s1_out.mshrid 779 io.ldu_io.forward_mshr.paddr := s1_out.paddr 780 781 io.stu_io.dcache.s1_kill := s1_tlb_miss || s1_exception || s1_mmio || s1_in.uop.robIdx.needFlush(io.redirect) 782 io.stu_io.dcache.s1_paddr := s1_paddr_dup_dcache 783 784 785 // load debug 786 XSDebug(s1_valid && s1_ld_flow, 787 p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " + 788 p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n") 789 790 // store debug 791 XSDebug(s1_valid && !s1_ld_flow, 792 p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.sqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " + 793 p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n") 794 795 // store out 796 io.stu_io.lsq.valid := s1_valid && !s1_ld_flow && !s1_prf && !s1_isvec 797 io.stu_io.lsq.bits := s1_out 798 io.stu_io.lsq.bits.miss := s1_tlb_miss 799 800 io.vec_stu_io.lsq.valid := s1_valid && !s1_ld_flow && !s1_prf && s1_isvec 801 io.vec_stu_io.lsq.bits := s1_out 802 io.vec_stu_io.lsq.bits.miss := s1_tlb_miss 803 804 io.stu_io.st_mask_out.valid := s1_valid && !s1_ld_flow && !s1_prf 805 io.stu_io.st_mask_out.bits.mask := s1_out.mask 806 io.stu_io.st_mask_out.bits.sqIdx := s1_out.uop.sqIdx 807 808 io.stu_io.issue.valid := s1_valid && !s1_tlb_miss && !s1_ld_flow && !s1_prf && !s1_isvec 809 io.stu_io.issue.bits := RegEnable(io.lsin.bits, io.lsin.fire) 810 811 // st-ld violation dectect request 812 io.stu_io.stld_nuke_query.valid := s1_valid && !s1_tlb_miss && !s1_ld_flow && !s1_prf 813 io.stu_io.stld_nuke_query.bits.robIdx := s1_in.uop.robIdx 814 io.stu_io.stld_nuke_query.bits.paddr := s1_paddr_dup_lsu 815 io.stu_io.stld_nuke_query.bits.mask := s1_in.mask 816 817 // Pipeline 818 // -------------------------------------------------------------------------------- 819 // stage 2 820 // -------------------------------------------------------------------------------- 821 // s2: DCache resp 822 val s2_valid = RegInit(false.B) 823 val s2_in = Wire(new LqWriteBundle) 824 val s2_out = Wire(new LqWriteBundle) 825 val s2_kill = Wire(Bool()) 826 val s2_can_go = s3_ready 827 val s2_fire = s2_valid && !s2_kill && s2_can_go 828 val s2_isvec = RegEnable(s1_isvec, false.B, s1_fire) 829 val s2_exp = RegEnable(s1_out.exp, true.B, s1_fire) 830 val s2_paddr = RegEnable(s1_paddr_dup_lsu, s1_fire) 831 832 s2_kill := s2_in.uop.robIdx.needFlush(io.redirect) 833 s2_ready := !s2_valid || s2_kill || s3_ready 834 when (s1_fire) { s2_valid := true.B } 835 .elsewhen (s2_fire) { s2_valid := false.B } 836 .elsewhen (s2_kill) { s2_valid := false.B } 837 s2_in := RegEnable(s1_out, s1_fire) 838 839 val s2_pmp = WireInit(io.pmp) 840 841 val s2_prf = s2_in.isPrefetch 842 val s2_hw_prf = s2_in.isHWPrefetch 843 val s2_ld_flow = RegEnable(s1_ld_flow, s1_fire) 844 845 // exception that may cause load addr to be invalid / illegal 846 // if such exception happen, that inst and its exception info 847 // will be force writebacked to rob 848 val s2_exception_vec = WireInit(s2_in.uop.exceptionVec) 849 when (s2_ld_flow) { 850 when (!s2_in.lateKill) { 851 s2_exception_vec(loadAccessFault) := (s2_in.uop.exceptionVec(loadAccessFault) || s2_pmp.ld) && s2_exp 852 // soft prefetch will not trigger any exception (but ecc error interrupt may be triggered) 853 when (s2_prf || s2_in.tlbMiss) { 854 s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType) 855 } 856 } 857 } .otherwise { 858 s2_exception_vec(storeAccessFault) := s2_in.uop.exceptionVec(storeAccessFault) || s2_pmp.st 859 when (s2_prf || s2_in.tlbMiss) { 860 s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType) 861 } 862 } 863 val s2_ld_exception = ExceptionNO.selectByFu(s2_exception_vec, LduCfg).asUInt.orR && s2_ld_flow 864 val s2_st_exception = ExceptionNO.selectByFu(s2_exception_vec, StaCfg).asUInt.orR && !s2_ld_flow 865 val s2_exception = s2_ld_exception || s2_st_exception 866 867 val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.ldu_io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr) 868 val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.ldu_io.forward_mshr.forward() 869 val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr) 870 871 // writeback access fault caused by ecc error / bus error 872 // * ecc data error is slow to generate, so we will not use it until load stage 3 873 // * in load stage 3, an extra signal io.load_error will be used to 874 val s2_actually_mmio = s2_pmp.mmio 875 val s2_ld_mmio = !s2_prf && 876 s2_actually_mmio && 877 !s2_exception && 878 !s2_in.tlbMiss && 879 s2_ld_flow 880 val s2_st_mmio = !s2_prf && 881 (RegNext(s1_mmio) || s2_pmp.mmio) && 882 !s2_exception && 883 !s2_in.tlbMiss && 884 !s2_ld_flow 885 val s2_st_atomic = !s2_prf && 886 (RegNext(s1_mmio) || s2_pmp.atomic) && 887 !s2_exception && 888 !s2_in.tlbMiss && 889 !s2_ld_flow 890 val s2_full_fwd = Wire(Bool()) 891 val s2_mem_amb = s2_in.uop.storeSetHit && 892 io.ldu_io.lsq.forward.addrInvalid 893 894 val s2_tlb_miss = s2_in.tlbMiss 895 val s2_fwd_fail = io.ldu_io.lsq.forward.dataInvalid || io.ldu_io.vec_forward.dataInvalid 896 val s2_dcache_miss = io.ldu_io.dcache.resp.bits.miss && 897 !s2_fwd_frm_d_chan_or_mshr && 898 !s2_full_fwd 899 900 val s2_mq_nack = io.ldu_io.dcache.s2_mq_nack && 901 !s2_fwd_frm_d_chan_or_mshr && 902 !s2_full_fwd 903 904 val s2_bank_conflict = io.ldu_io.dcache.s2_bank_conflict && 905 !s2_fwd_frm_d_chan_or_mshr && 906 !s2_full_fwd 907 908 val s2_wpu_pred_fail = io.ldu_io.dcache.s2_wpu_pred_fail && 909 !s2_fwd_frm_d_chan_or_mshr && 910 !s2_full_fwd 911 912 val s2_rar_nack = io.ldu_io.lsq.ldld_nuke_query.req.valid && 913 !io.ldu_io.lsq.ldld_nuke_query.req.ready 914 915 val s2_raw_nack = io.ldu_io.lsq.stld_nuke_query.req.valid && 916 !io.ldu_io.lsq.stld_nuke_query.req.ready 917 918 // st-ld violation query 919 // NeedFastRecovery Valid when 920 // 1. Fast recovery query request Valid. 921 // 2. Load instruction is younger than requestors(store instructions). 922 // 3. Physical address match. 923 // 4. Data contains. 924 val s2_nuke = VecInit((0 until StorePipelineWidth).map(w => { 925 io.ldu_io.stld_nuke_query(w).valid && // query valid 926 isAfter(s2_in.uop.robIdx, io.ldu_io.stld_nuke_query(w).bits.robIdx) && // older store 927 // TODO: Fix me when vector instruction 928 (s2_in.paddr(PAddrBits-1, 3) === io.ldu_io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match 929 (s2_in.mask & io.ldu_io.stld_nuke_query(w).bits.mask).orR // data mask contain 930 })).asUInt.orR && s2_ld_flow || s2_in.rep_info.nuke 931 932 val s2_cache_handled = io.ldu_io.dcache.resp.bits.handled 933 val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) && 934 io.ldu_io.dcache.resp.bits.tag_error 935 936 val s2_troublem = !s2_exception && 937 !s2_ld_mmio && 938 !s2_prf && 939 !s2_in.lateKill && 940 s2_ld_flow 941 942 io.ldu_io.dcache.resp.ready := true.B 943 io.stu_io.dcache.resp.ready := true.B 944 val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_ld_mmio || s2_prf || s2_in.lateKill) && s2_ld_flow 945 assert(!(s2_valid && (s2_dcache_should_resp && !io.ldu_io.dcache.resp.valid)), "DCache response got lost") 946 947 // fast replay require 948 val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail)) 949 val s2_nuke_fast_rep = !s2_mq_nack && 950 !s2_dcache_miss && 951 !s2_bank_conflict && 952 !s2_wpu_pred_fail && 953 !s2_rar_nack && 954 !s2_raw_nack && 955 s2_nuke 956 957 val s2_fast_rep = !s2_mem_amb && 958 !s2_tlb_miss && 959 !s2_fwd_fail && 960 (s2_dcache_fast_rep || s2_nuke_fast_rep) && 961 s2_troublem 962 963 // need allocate new entry 964 val s2_can_query = !s2_mem_amb && 965 !s2_tlb_miss && 966 !s2_fwd_fail && 967 !s2_dcache_fast_rep && 968 s2_troublem 969 970 val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error) 971 972 // ld-ld violation require 973 io.ldu_io.lsq.ldld_nuke_query.req.valid := s2_valid && s2_can_query 974 io.ldu_io.lsq.ldld_nuke_query.req.bits.uop := s2_in.uop 975 io.ldu_io.lsq.ldld_nuke_query.req.bits.mask := s2_in.mask 976 io.ldu_io.lsq.ldld_nuke_query.req.bits.paddr := s2_in.paddr 977 io.ldu_io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss) 978 979 // st-ld violation require 980 io.ldu_io.lsq.stld_nuke_query.req.valid := s2_valid && s2_can_query 981 io.ldu_io.lsq.stld_nuke_query.req.bits.uop := s2_in.uop 982 io.ldu_io.lsq.stld_nuke_query.req.bits.mask := s2_in.mask 983 io.ldu_io.lsq.stld_nuke_query.req.bits.paddr := s2_in.paddr 984 io.ldu_io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss) 985 986 // merge forward result 987 // lsq has higher priority than sbuffer 988 val s2_fwd_mask = Wire(Vec((VLEN/8), Bool())) 989 val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W))) 990 s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.ldu_io.lsq.forward.dataInvalid && !io.ldu_io.vec_forward.dataInvalid 991 // generate XLEN/8 Muxs 992 for (i <- 0 until VLEN / 8) { 993 s2_fwd_mask(i) := io.ldu_io.lsq.forward.forwardMask(i) || io.ldu_io.sbuffer.forwardMask(i) || io.ldu_io.vec_forward.forwardMask(i) 994 s2_fwd_data(i) := Mux( 995 io.ldu_io.lsq.forward.forwardMask(i), 996 io.ldu_io.lsq.forward.forwardData(i), 997 Mux( 998 io.ldu_io.vec_forward.forwardMask(i), 999 io.ldu_io.vec_forward.forwardData(i), 1000 io.ldu_io.sbuffer.forwardData(i) 1001 ) 1002 ) 1003 } 1004 1005 XSDebug(s2_fire && s2_ld_flow, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 1006 s2_in.uop.pc, 1007 io.ldu_io.lsq.forward.forwardData.asUInt, io.ldu_io.lsq.forward.forwardMask.asUInt, 1008 s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt 1009 ) 1010 1011 // 1012 s2_out := s2_in 1013 s2_out.data := 0.U // data will be generated in load s3 1014 s2_out.uop.fpWen := s2_in.uop.fpWen && !s2_exception && s2_ld_flow 1015 s2_out.mmio := s2_ld_mmio || s2_st_mmio 1016 s2_out.atomic := s2_st_atomic 1017 s2_out.uop.flushPipe := false.B 1018 s2_out.uop.exceptionVec := s2_exception_vec 1019 s2_out.forwardMask := s2_fwd_mask 1020 s2_out.forwardData := s2_fwd_data 1021 s2_out.handledByMSHR := s2_cache_handled 1022 s2_out.miss := s2_dcache_miss && s2_troublem 1023 s2_out.feedbacked := io.feedback_fast.valid && !io.feedback_fast.bits.hit 1024 1025 // Generate replay signal caused by: 1026 // * st-ld violation check 1027 // * tlb miss 1028 // * dcache replay 1029 // * forward data invalid 1030 // * dcache miss 1031 s2_out.rep_info.mem_amb := s2_mem_amb && s2_troublem 1032 s2_out.rep_info.tlb_miss := s2_tlb_miss && s2_troublem 1033 s2_out.rep_info.fwd_fail := s2_fwd_fail && s2_troublem 1034 s2_out.rep_info.dcache_rep := s2_mq_nack && s2_troublem 1035 s2_out.rep_info.dcache_miss := s2_dcache_miss && s2_troublem 1036 s2_out.rep_info.bank_conflict := s2_bank_conflict && s2_troublem 1037 s2_out.rep_info.wpu_fail := s2_wpu_pred_fail && s2_troublem 1038 s2_out.rep_info.rar_nack := s2_rar_nack && s2_troublem 1039 s2_out.rep_info.raw_nack := s2_raw_nack && s2_troublem 1040 s2_out.rep_info.nuke := s2_nuke && s2_troublem 1041 s2_out.rep_info.full_fwd := s2_data_fwded 1042 s2_out.rep_info.data_inv_sq_idx := Mux(io.ldu_io.vec_forward.dataInvalid, s2_out.uop.sqIdx, io.ldu_io.lsq.forward.dataInvalidSqIdx) 1043 s2_out.rep_info.addr_inv_sq_idx := Mux(io.ldu_io.vec_forward.addrInvalid, s2_out.uop.sqIdx, io.ldu_io.lsq.forward.addrInvalidSqIdx) 1044 s2_out.rep_info.rep_carry := io.ldu_io.dcache.resp.bits.replayCarry 1045 s2_out.rep_info.mshr_id := io.ldu_io.dcache.resp.bits.mshr_id 1046 s2_out.rep_info.last_beat := s2_in.paddr(log2Up(refillBytes)) 1047 s2_out.rep_info.debug := s2_in.uop.debugInfo 1048 1049 // if forward fail, replay this inst from fetch 1050 val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss 1051 // if ld-ld violation is detected, replay from this inst from fetch 1052 val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_ld_mmio && !s2_is_prefetch && !s2_in.tlbMiss 1053 // io.out.bits.uop.replayInst := false.B 1054 1055 // to be removed 1056 val s2_ld_need_fb = !s2_in.isLoadReplay && // already feedbacked 1057 io.ldu_io.lq_rep_full && // LoadQueueReplay is full 1058 s2_out.rep_info.need_rep && // need replay 1059 !s2_exception && // no exception is triggered 1060 !s2_hw_prf && // not hardware prefetch 1061 !s2_isvec 1062 val s2_st_need_fb = !s2_ld_flow && !s2_hw_prf && !s2_isvec 1063 io.feedback_fast.valid := s2_valid && (s2_ld_need_fb || s2_st_need_fb) 1064 io.feedback_fast.bits.hit := Mux(s2_ld_flow, false.B, !s2_tlb_miss) 1065 io.feedback_fast.bits.flushState := s2_in.ptwBack 1066 io.feedback_fast.bits.robIdx := s2_in.uop.robIdx 1067 io.feedback_fast.bits.sourceType := Mux(s2_ld_flow, RSFeedbackType.lrqFull, RSFeedbackType.tlbMiss) 1068 io.feedback_fast.bits.dataInvalidSqIdx := DontCare 1069 1070 val s2_vec_feedback = Wire(Valid(new VSFQFeedback)) 1071 s2_vec_feedback.valid := s2_valid && !s2_ld_flow && !s2_hw_prf && s2_isvec 1072 s2_vec_feedback.bits.flowPtr := s2_out.sflowPtr 1073 s2_vec_feedback.bits.hit := !s2_tlb_miss 1074 s2_vec_feedback.bits.sourceType := RSFeedbackType.tlbMiss 1075 s2_vec_feedback.bits.paddr := s2_paddr 1076 1077 io.stu_io.lsq_replenish := s2_out 1078 io.stu_io.lsq_replenish.miss := io.ldu_io.dcache.resp.fire && io.ldu_io.dcache.resp.bits.miss 1079 1080 io.ldu_io.ldCancel.ld1Cancel.valid := s2_valid && ( 1081 (s2_out.rep_info.need_rep && s2_out.isFirstIssue) || // exe fail and issued from IQ 1082 s2_ld_mmio // is mmio 1083 ) && s2_ld_flow 1084 io.ldu_io.ldCancel.ld1Cancel.bits := s2_out.deqPortIdx 1085 1086 // fast wakeup 1087 io.ldu_io.fast_uop.valid := RegNext( 1088 !io.ldu_io.dcache.s1_disable_fast_wakeup && 1089 s1_valid && 1090 !s1_kill && 1091 !io.tlb.resp.bits.miss && 1092 !io.ldu_io.lsq.forward.dataInvalidFast 1093 ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_ld_mmio && s2_ld_flow) && !s2_isvec 1094 io.ldu_io.fast_uop.bits := RegNext(s1_out.uop) 1095 1096 // 1097 io.ldu_io.s2_ptr_chasing := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire) 1098 1099 // prefetch train 1100 io.prefetch_train.valid := s2_valid && !s2_actually_mmio && !s2_in.tlbMiss 1101 io.prefetch_train.bits.fromLsPipelineBundle(s2_in) 1102 io.prefetch_train.bits.miss := Mux(s2_ld_flow, io.ldu_io.dcache.resp.bits.miss, io.stu_io.dcache.resp.bits.miss) // TODO: use trace with bank conflict? 1103 io.prefetch_train.bits.meta_prefetch := Mux(s2_ld_flow, io.ldu_io.dcache.resp.bits.meta_prefetch, false.B) 1104 io.prefetch_train.bits.meta_access := Mux(s2_ld_flow, io.ldu_io.dcache.resp.bits.meta_access, false.B) 1105 1106 io.prefetch_train_l1.valid := s2_valid && !s2_actually_mmio && s2_ld_flow 1107 io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in) 1108 io.prefetch_train_l1.bits.miss := io.ldu_io.dcache.resp.bits.miss 1109 io.prefetch_train_l1.bits.meta_prefetch := io.ldu_io.dcache.resp.bits.meta_prefetch 1110 io.prefetch_train_l1.bits.meta_access := io.ldu_io.dcache.resp.bits.meta_access 1111 if (env.FPGAPlatform){ 1112 io.ldu_io.dcache.s0_pc := DontCare 1113 io.ldu_io.dcache.s1_pc := DontCare 1114 io.ldu_io.dcache.s2_pc := DontCare 1115 }else{ 1116 io.ldu_io.dcache.s0_pc := s0_out.uop.pc 1117 io.ldu_io.dcache.s1_pc := s1_out.uop.pc 1118 io.ldu_io.dcache.s2_pc := s2_out.uop.pc 1119 } 1120 io.ldu_io.dcache.s2_kill := s2_pmp.ld || s2_actually_mmio || s2_kill 1121 io.stu_io.dcache.s2_kill := s2_pmp.st || s2_actually_mmio || s2_kill 1122 io.stu_io.dcache.s2_pc := s2_out.uop.pc 1123 1124 val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready && s1_ld_flow 1125 val s2_ld_valid_dup = RegInit(0.U(6.W)) 1126 s2_ld_valid_dup := 0x0.U(6.W) 1127 when (s1_ld_left_fire && !s1_out.isHWPrefetch && s1_ld_flow) { s2_ld_valid_dup := 0x3f.U(6.W) } 1128 when (s1_kill || s1_out.isHWPrefetch || !s1_ld_flow) { s2_ld_valid_dup := 0x0.U(6.W) } 1129 assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch) || RegNext(!s1_ld_flow))) 1130 1131 // Pipeline 1132 // -------------------------------------------------------------------------------- 1133 // stage 3 1134 // -------------------------------------------------------------------------------- 1135 // writeback and update load queue 1136 val s3_valid = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect)) 1137 val s3_in = RegEnable(s2_out, s2_fire) 1138 val s3_out = Wire(Valid(new MemExuOutput)) 1139 val s3_dcache_rep = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire) 1140 val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire) 1141 val s3_fast_rep = Wire(Bool()) 1142 val s3_ld_flow = RegNext(s2_ld_flow) 1143 val s3_troublem = RegNext(s2_troublem) 1144 val s3_kill = s3_in.uop.robIdx.needFlush(io.redirect) 1145 val s3_isvec = RegNext(s2_isvec) 1146 s3_ready := !s3_valid || s3_kill || sx_can_go 1147 1148 // forwrad last beat 1149 val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.ldu_io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr) 1150 val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid) 1151 val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid) && s3_ld_flow 1152 1153 1154 // s3 load fast replay 1155 io.ldu_io.fast_rep_out.valid := s3_valid && 1156 s3_fast_rep && 1157 !s3_in.uop.robIdx.needFlush(io.redirect) && 1158 s3_ld_flow && 1159 !s3_isvec 1160 io.ldu_io.fast_rep_out.bits := s3_in 1161 1162 io.ldu_io.lsq.ldin.valid := s3_valid && 1163 (!s3_fast_rep || !io.ldu_io.fast_rep_out.ready) && 1164 !s3_in.feedbacked && 1165 !s3_in.lateKill && 1166 s3_ld_flow && 1167 !s3_isvec 1168 io.ldu_io.lsq.ldin.bits := s3_in 1169 io.ldu_io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid 1170 1171 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 1172 io.ldu_io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools 1173 io.ldu_io.lsq.ldin.bits.replacementUpdated := io.ldu_io.dcache.resp.bits.replacementUpdated 1174 io.ldu_io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated) 1175 1176 val s3_dly_ld_err = 1177 if (EnableAccurateLoadError) { 1178 (s3_in.lateKill || io.ldu_io.dcache.resp.bits.error_delayed) && RegNext(io.csrCtrl.cache_error_enable) 1179 } else { 1180 WireInit(false.B) 1181 } 1182 io.ldu_io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid 1183 io.ldu_io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err 1184 io.ldu_io.lsq.ldin.bits.dcacheRequireReplay := s3_dcache_rep 1185 1186 val s3_vp_match_fail = RegNext(io.ldu_io.lsq.forward.matchInvalid || io.ldu_io.sbuffer.matchInvalid) && s3_troublem 1187 val s3_ldld_rep_inst = 1188 io.ldu_io.lsq.ldld_nuke_query.resp.valid && 1189 io.ldu_io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch && 1190 RegNext(io.csrCtrl.ldld_vio_check_enable) 1191 1192 val s3_rep_info = WireInit(s3_in.rep_info) 1193 s3_rep_info.dcache_miss := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid && s3_troublem 1194 val s3_rep_frm_fetch = s3_vp_match_fail || s3_ldld_rep_inst 1195 val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt) 1196 val s3_force_rep = s3_sel_rep_cause(LoadReplayCauses.C_TM) && 1197 !s3_in.uop.exceptionVec(loadAddrMisaligned) && 1198 s3_troublem 1199 1200 val s3_ld_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, LduCfg).asUInt.orR && s3_ld_flow 1201 val s3_st_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, StaCfg).asUInt.orR && !s3_ld_flow 1202 val s3_exception = s3_ld_exception || s3_st_exception 1203 when ((s3_ld_exception || s3_dly_ld_err || s3_rep_frm_fetch) && !s3_force_rep) { 1204 io.ldu_io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType) 1205 } .otherwise { 1206 io.ldu_io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools) 1207 } 1208 1209 // Int flow, if hit, will be writebacked at s3 1210 s3_out.valid := s3_valid && 1211 (!s3_ld_flow && !s3_in.feedbacked || !io.ldu_io.lsq.ldin.bits.rep_info.need_rep) && !s3_in.mmio 1212 s3_out.bits.uop := s3_in.uop 1213 s3_out.bits.uop.exceptionVec(loadAccessFault) := (s3_dly_ld_err || s3_in.uop.exceptionVec(loadAccessFault)) && s3_ld_flow 1214 s3_out.bits.uop.replayInst := s3_rep_frm_fetch 1215 s3_out.bits.data := s3_in.data 1216 s3_out.bits.debug.isMMIO := s3_in.mmio 1217 s3_out.bits.debug.isPerfCnt := false.B 1218 s3_out.bits.debug.paddr := s3_in.paddr 1219 s3_out.bits.debug.vaddr := s3_in.vaddr 1220 1221 when (s3_force_rep) { 1222 s3_out.bits.uop.exceptionVec := 0.U.asTypeOf(s3_in.uop.exceptionVec.cloneType) 1223 } 1224 1225 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 1226 io.ldu_io.lsq.ldin.bits.uop := s3_out.bits.uop 1227 1228 val s3_revoke = s3_exception || io.ldu_io.lsq.ldin.bits.rep_info.need_rep 1229 io.ldu_io.lsq.ldld_nuke_query.revoke := s3_revoke 1230 io.ldu_io.lsq.stld_nuke_query.revoke := s3_revoke 1231 1232 // feedback slow 1233 s3_fast_rep := RegNext(s2_fast_rep) && 1234 !s3_in.feedbacked && 1235 !s3_in.lateKill && 1236 !s3_rep_frm_fetch && 1237 !s3_exception 1238 1239 val s3_fb_no_waiting = !s3_in.isLoadReplay && !(s3_fast_rep && io.ldu_io.fast_rep_out.ready) && !s3_in.feedbacked 1240 1241 // 1242 io.feedback_slow.valid := s3_valid && !s3_in.uop.robIdx.needFlush(io.redirect) && s3_fb_no_waiting && s3_ld_flow 1243 io.feedback_slow.bits.hit := !io.ldu_io.lsq.ldin.bits.rep_info.need_rep || io.ldu_io.lsq.ldin.ready 1244 io.feedback_slow.bits.flushState := s3_in.ptwBack 1245 io.feedback_slow.bits.robIdx := s3_in.uop.robIdx 1246 io.feedback_slow.bits.sourceType := RSFeedbackType.lrqFull 1247 io.feedback_slow.bits.dataInvalidSqIdx := DontCare 1248 1249 io.vec_stu_io.feedbackSlow.valid := RegNext(s2_vec_feedback.valid && !s2_out.uop.robIdx.needFlush(io.redirect)) 1250 io.vec_stu_io.feedbackSlow.bits := RegNext(s2_vec_feedback.bits) 1251 1252 io.ldu_io.ldCancel.ld2Cancel.valid := s3_valid && ( 1253 (io.ldu_io.lsq.ldin.bits.rep_info.need_rep && s3_in.isFirstIssue) || 1254 s3_in.mmio 1255 ) && s3_ld_flow 1256 io.ldu_io.ldCancel.ld2Cancel.bits := s3_in.deqPortIdx 1257 1258 // data from dcache hit 1259 val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle) 1260 s3_ld_raw_data_frm_cache.respDcacheData := io.ldu_io.dcache.resp.bits.data_delayed 1261 s3_ld_raw_data_frm_cache.forwardMask := RegEnable(s2_fwd_mask, s2_valid) 1262 s3_ld_raw_data_frm_cache.forwardData := RegEnable(s2_fwd_data, s2_valid) 1263 s3_ld_raw_data_frm_cache.uop := RegEnable(s2_out.uop, s2_valid) 1264 s3_ld_raw_data_frm_cache.addrOffset := RegEnable(s2_out.paddr(3, 0), s2_valid) 1265 s3_ld_raw_data_frm_cache.forward_D := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid 1266 s3_ld_raw_data_frm_cache.forwardData_D := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid)) 1267 s3_ld_raw_data_frm_cache.forward_mshr := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid) 1268 s3_ld_raw_data_frm_cache.forwardData_mshr := RegEnable(s2_fwd_data_frm_mshr, s2_valid) 1269 s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid) 1270 1271 val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData() 1272 val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List( 1273 "b0000".U -> s3_merged_data_frm_cache(63, 0), 1274 "b0001".U -> s3_merged_data_frm_cache(63, 8), 1275 "b0010".U -> s3_merged_data_frm_cache(63, 16), 1276 "b0011".U -> s3_merged_data_frm_cache(63, 24), 1277 "b0100".U -> s3_merged_data_frm_cache(63, 32), 1278 "b0101".U -> s3_merged_data_frm_cache(63, 40), 1279 "b0110".U -> s3_merged_data_frm_cache(63, 48), 1280 "b0111".U -> s3_merged_data_frm_cache(63, 56), 1281 "b1000".U -> s3_merged_data_frm_cache(127, 64), 1282 "b1001".U -> s3_merged_data_frm_cache(127, 72), 1283 "b1010".U -> s3_merged_data_frm_cache(127, 80), 1284 "b1011".U -> s3_merged_data_frm_cache(127, 88), 1285 "b1100".U -> s3_merged_data_frm_cache(127, 96), 1286 "b1101".U -> s3_merged_data_frm_cache(127, 104), 1287 "b1110".U -> s3_merged_data_frm_cache(127, 112), 1288 "b1111".U -> s3_merged_data_frm_cache(127, 120) 1289 )) 1290 val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache) 1291 1292 // FIXME: add 1 cycle delay ? 1293 io.ldout.bits := s3_out.bits 1294 io.ldout.bits.data := s3_ld_data_frm_cache 1295 io.ldout.valid := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) && s3_ld_flow && !s3_isvec 1296 1297 // for uncache 1298 io.ldu_io.lsq.uncache.ready := true.B 1299 1300 // fast load to load forward 1301 io.ldu_io.l2l_fwd_out.valid := s3_out.valid && !s3_in.lateKill && s3_ld_flow 1302 io.ldu_io.l2l_fwd_out.data := s3_ld_data_frm_cache 1303 io.ldu_io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err // ecc delayed error 1304 1305 // hybrid unit writeback to rob 1306 // delay params 1307 val SelectGroupSize = RollbackGroupSize 1308 val lgSelectGroupSize = log2Ceil(SelectGroupSize) 1309 val TotalSelectCycles = scala.math.ceil(log2Ceil(LoadQueueRAWSize).toFloat / lgSelectGroupSize).toInt + 1 1310 val TotalDelayCycles = TotalSelectCycles - 2 1311 1312 // writeback 1313 val sx_valid = Wire(Vec(TotalDelayCycles + 1, Bool())) 1314 val sx_ready = Wire(Vec(TotalDelayCycles + 1, Bool())) 1315 val sx_in = Wire(Vec(TotalDelayCycles + 1, new MemExuOutput)) 1316 1317 sx_can_go := sx_ready.head 1318 for (i <- 0 until TotalDelayCycles + 1) { 1319 if (i == 0) { 1320 sx_valid(i) := s3_valid && 1321 !s3_ld_flow && 1322 !s3_in.feedbacked && 1323 !s3_in.mmio 1324 sx_in(i) := s3_out.bits 1325 sx_ready(i) := !s3_valid(i) || sx_in(i).uop.robIdx.needFlush(io.redirect) || (if (TotalDelayCycles == 0) io.stout.ready else sx_ready(i+1)) 1326 } else { 1327 val cur_kill = sx_in(i).uop.robIdx.needFlush(io.redirect) 1328 val cur_can_go = (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1)) 1329 val cur_fire = sx_valid(i) && !cur_kill && cur_can_go 1330 val prev_fire = sx_valid(i-1) && !sx_in(i-1).uop.robIdx.needFlush(io.redirect) && sx_ready(i) 1331 1332 sx_ready(i) := !sx_valid(i) || cur_kill || (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1)) 1333 val sx_valid_can_go = prev_fire || cur_fire || cur_kill 1334 sx_valid(i) := RegEnable(Mux(prev_fire, true.B, false.B), sx_valid_can_go) 1335 sx_in(i) := RegEnable(sx_in(i-1), prev_fire) 1336 } 1337 } 1338 1339 val sx_last_valid = sx_valid.takeRight(1).head 1340 val sx_last_ready = sx_ready.takeRight(1).head 1341 val sx_last_in = sx_in.takeRight(1).head 1342 1343 sx_last_ready := !sx_last_valid || sx_last_in.uop.robIdx.needFlush(io.redirect) || io.stout.ready 1344 io.stout.valid := sx_last_valid && !sx_last_in.uop.robIdx.needFlush(io.redirect) && FuType.isStore(sx_last_in.uop.fuType) 1345 io.stout.bits := sx_last_in 1346 1347 // trigger 1348 val ld_trigger = FuType.isLoad(io.ldout.bits.uop.fuType) 1349 val last_valid_data = RegEnable(io.ldout.bits.data, io.stout.fire) 1350 val hit_ld_addr_trig_hit_vec = Wire(Vec(TriggerNum, Bool())) 1351 val lq_ld_addr_trig_hit_vec = RegNext(io.ldu_io.lsq.trigger.lqLoadAddrTriggerHitVec) 1352 (0 until TriggerNum).map{i => { 1353 val tdata2 = RegNext(RegNext(io.ldu_io.trigger(i).tdata2)) 1354 val matchType = RegNext(RegNext(io.ldu_io.trigger(i).matchType)) 1355 val tEnable = RegNext(RegNext(io.ldu_io.trigger(i).tEnable)) 1356 1357 hit_ld_addr_trig_hit_vec(i) := TriggerCmp(RegNext(s3_in.vaddr), tdata2, matchType, tEnable) 1358 io.ldu_io.trigger(i).addrHit := Mux(io.ldout.valid && ld_trigger, hit_ld_addr_trig_hit_vec(i), lq_ld_addr_trig_hit_vec(i)) 1359 io.ldu_io.trigger(i).lastDataHit := TriggerCmp(last_valid_data, tdata2, matchType, tEnable) 1360 }} 1361 io.ldu_io.lsq.trigger.hitLoadAddrTriggerHitVec := hit_ld_addr_trig_hit_vec 1362 1363 // FIXME: please move this part to LoadQueueReplay 1364 io.ldu_io.debug_ls := DontCare 1365 io.stu_io.debug_ls := DontCare 1366 io.stu_io.debug_ls.s1.isTlbFirstMiss := io.tlb.resp.valid && io.tlb.resp.bits.miss && io.tlb.resp.bits.debug.isFirstIssue && !s1_in.isHWPrefetch && !s1_ld_flow 1367 io.stu_io.debug_ls.s1_robIdx := s1_in.uop.robIdx.value 1368 1369 // Topdown 1370 io.ldu_io.lsTopdownInfo.s1.robIdx := s1_in.uop.robIdx.value 1371 io.ldu_io.lsTopdownInfo.s1.vaddr_valid := s1_valid && s1_in.hasROBEntry 1372 io.ldu_io.lsTopdownInfo.s1.vaddr_bits := s1_vaddr 1373 io.ldu_io.lsTopdownInfo.s2.robIdx := s2_in.uop.robIdx.value 1374 io.ldu_io.lsTopdownInfo.s2.paddr_valid := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss 1375 io.ldu_io.lsTopdownInfo.s2.paddr_bits := s2_in.paddr 1376 io.ldu_io.lsTopdownInfo.s2.first_real_miss := io.ldu_io.dcache.resp.bits.real_miss 1377 io.ldu_io.lsTopdownInfo.s2.cache_miss_en := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated 1378 1379 // perf cnt 1380 XSPerfAccumulate("s0_in_valid", io.lsin.valid) 1381 XSPerfAccumulate("s0_in_block", io.lsin.valid && !io.lsin.fire) 1382 XSPerfAccumulate("s0_in_fire_first_issue", s0_valid && s0_isFirstIssue) 1383 XSPerfAccumulate("s0_lsq_fire_first_issue", io.ldu_io.replay.fire) 1384 XSPerfAccumulate("s0_ldu_fire_first_issue", io.lsin.fire && s0_isFirstIssue) 1385 XSPerfAccumulate("s0_fast_replay_issue", io.ldu_io.fast_rep_in.fire) 1386 XSPerfAccumulate("s0_stall_out", s0_valid && !s0_can_go) 1387 XSPerfAccumulate("s0_stall_ld_dcache", s0_valid && !io.ldu_io.dcache.req.ready) 1388 XSPerfAccumulate("s0_stall_st_dcache", s0_valid && !io.stu_io.dcache.req.ready) 1389 XSPerfAccumulate("s0_addr_spec_success", s0_fire && s0_vaddr(VAddrBits-1, 12) === io.lsin.bits.src(0)(VAddrBits-1, 12)) 1390 XSPerfAccumulate("s0_addr_spec_failed", s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.lsin.bits.src(0)(VAddrBits-1, 12)) 1391 XSPerfAccumulate("s0_addr_spec_success_once", s0_fire && s0_vaddr(VAddrBits-1, 12) === io.lsin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue) 1392 XSPerfAccumulate("s0_addr_spec_failed_once", s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.lsin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue) 1393 XSPerfAccumulate("s0_forward_tl_d_channel", s0_out.forward_tlDchannel) 1394 XSPerfAccumulate("s0_hardware_prefetch_fire", s0_fire && s0_hw_prf_select) 1395 XSPerfAccumulate("s0_software_prefetch_fire", s0_fire && s0_prf && s0_int_iss_select) 1396 XSPerfAccumulate("s0_hardware_prefetch_blocked", io.ldu_io.prefetch_req.valid && !s0_hw_prf_select) 1397 XSPerfAccumulate("s0_hardware_prefetch_total", io.ldu_io.prefetch_req.valid) 1398 1399 XSPerfAccumulate("s1_in_valid", s1_valid) 1400 XSPerfAccumulate("s1_in_fire", s1_fire) 1401 XSPerfAccumulate("s1_in_fire_first_issue", s1_fire && s1_in.isFirstIssue) 1402 XSPerfAccumulate("s1_tlb_miss", s1_fire && s1_tlb_miss) 1403 XSPerfAccumulate("s1_tlb_miss_first_issue", s1_fire && s1_tlb_miss && s1_in.isFirstIssue) 1404 XSPerfAccumulate("s1_stall_out", s1_valid && !s1_can_go) 1405 XSPerfAccumulate("s1_late_kill", s1_valid && s1_fast_rep_kill) 1406 1407 XSPerfAccumulate("s2_in_valid", s2_valid) 1408 XSPerfAccumulate("s2_in_fire", s2_fire) 1409 XSPerfAccumulate("s2_in_fire_first_issue", s2_fire && s2_in.isFirstIssue) 1410 XSPerfAccumulate("s2_dcache_miss", s2_fire && io.ldu_io.dcache.resp.bits.miss) 1411 XSPerfAccumulate("s2_dcache_miss_first_issue", s2_fire && io.ldu_io.dcache.resp.bits.miss && s2_in.isFirstIssue) 1412 XSPerfAccumulate("s2_dcache_real_miss_first_issue", s2_fire && io.ldu_io.dcache.resp.bits.miss && s2_in.isFirstIssue) 1413 XSPerfAccumulate("s2_full_forward", s2_fire && s2_full_fwd) 1414 XSPerfAccumulate("s2_dcache_miss_full_forward", s2_fire && s2_dcache_miss) 1415 XSPerfAccumulate("s2_fwd_frm_d_can", s2_valid && s2_fwd_frm_d_chan) 1416 XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr", s2_valid && s2_fwd_frm_d_chan_or_mshr) 1417 XSPerfAccumulate("s2_stall_out", s2_fire && !s2_can_go) 1418 XSPerfAccumulate("s2_prefetch", s2_fire && s2_prf) 1419 XSPerfAccumulate("s2_prefetch_ignored", s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict 1420 XSPerfAccumulate("s2_prefetch_miss", s2_fire && s2_prf && io.ldu_io.dcache.resp.bits.miss) // prefetch req miss in l1 1421 XSPerfAccumulate("s2_prefetch_hit", s2_fire && s2_prf && !io.ldu_io.dcache.resp.bits.miss) // prefetch req hit in l1 1422 XSPerfAccumulate("s2_prefetch_accept", s2_fire && s2_prf && io.ldu_io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it 1423 XSPerfAccumulate("s2_forward_req", s2_fire && s2_in.forward_tlDchannel) 1424 XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid) 1425 XSPerfAccumulate("s2_successfully_forward_mshr", s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid) 1426 1427 XSPerfAccumulate("s3_fwd_frm_d_chan", s3_valid && s3_fwd_frm_d_chan_valid) 1428 1429 XSPerfAccumulate("load_to_load_forward", s1_try_ptr_chasing && !s1_ptr_chasing_canceled) 1430 XSPerfAccumulate("load_to_load_forward_try", s1_try_ptr_chasing) 1431 XSPerfAccumulate("load_to_load_forward_fail", s1_cancel_ptr_chasing) 1432 XSPerfAccumulate("load_to_load_forward_fail_cancelled", s1_cancel_ptr_chasing && s1_ptr_chasing_canceled) 1433 XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match) 1434 XSPerfAccumulate("load_to_load_forward_fail_op_not_ld", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld) 1435 XSPerfAccumulate("load_to_load_forward_fail_addr_align", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned) 1436 XSPerfAccumulate("load_to_load_forward_fail_set_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch) 1437 1438 // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design 1439 // hardware performance counter 1440 val perfEvents = Seq( 1441 ("load_s0_in_fire ", s0_fire ), 1442 ("load_to_load_forward ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled ), 1443 ("stall_dcache ", s0_valid && s0_can_go && !io.ldu_io.dcache.req.ready ), 1444 ("load_s1_in_fire ", s0_fire ), 1445 ("load_s1_tlb_miss ", s1_fire && io.tlb.resp.bits.miss ), 1446 ("load_s2_in_fire ", s1_fire ), 1447 ("load_s2_dcache_miss ", s2_fire && io.ldu_io.dcache.resp.bits.miss ), 1448 ) 1449 generatePerfEvent() 1450}