1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import utility._ 24import xiangshan.ExceptionNO._ 25import xiangshan._ 26import xiangshan.backend.Bundles.{DynInst, MemExuInput, MemExuOutput} 27import xiangshan.backend.fu.PMPRespBundle 28import xiangshan.backend.fu.FuConfig._ 29import xiangshan.backend.ctrlblock.{DebugLsInfoBundle, LsTopdownInfo} 30import xiangshan.backend.rob.RobPtr 31import xiangshan.backend.ctrlblock.DebugLsInfoBundle 32import xiangshan.backend.fu.util.SdtrigExt 33 34import xiangshan.cache._ 35import xiangshan.cache.wpu.ReplayCarry 36import xiangshan.cache.mmu._ 37import xiangshan.mem.mdp._ 38 39class LoadToLsqReplayIO(implicit p: Parameters) extends XSBundle 40 with HasDCacheParameters 41 with HasTlbConst 42{ 43 // mshr refill index 44 val mshr_id = UInt(log2Up(cfg.nMissEntries).W) 45 // get full data from store queue and sbuffer 46 val full_fwd = Bool() 47 // wait for data from store inst's store queue index 48 val data_inv_sq_idx = new SqPtr 49 // wait for address from store queue index 50 val addr_inv_sq_idx = new SqPtr 51 // replay carry 52 val rep_carry = new ReplayCarry(nWays) 53 // data in last beat 54 val last_beat = Bool() 55 // replay cause 56 val cause = Vec(LoadReplayCauses.allCauses, Bool()) 57 // performance debug information 58 val debug = new PerfDebugInfo 59 // tlb hint 60 val tlb_id = UInt(log2Up(loadfiltersize).W) 61 val tlb_full = Bool() 62 63 // alias 64 def mem_amb = cause(LoadReplayCauses.C_MA) 65 def tlb_miss = cause(LoadReplayCauses.C_TM) 66 def fwd_fail = cause(LoadReplayCauses.C_FF) 67 def dcache_rep = cause(LoadReplayCauses.C_DR) 68 def dcache_miss = cause(LoadReplayCauses.C_DM) 69 def wpu_fail = cause(LoadReplayCauses.C_WF) 70 def bank_conflict = cause(LoadReplayCauses.C_BC) 71 def rar_nack = cause(LoadReplayCauses.C_RAR) 72 def raw_nack = cause(LoadReplayCauses.C_RAW) 73 def nuke = cause(LoadReplayCauses.C_NK) 74 def need_rep = cause.asUInt.orR 75} 76 77 78class LoadToLsqIO(implicit p: Parameters) extends XSBundle { 79 val ldin = DecoupledIO(new LqWriteBundle) 80 val uncache = Flipped(DecoupledIO(new MemExuOutput)) 81 val ld_raw_data = Input(new LoadDataFromLQBundle) 82 val forward = new PipeLoadForwardQueryIO 83 val stld_nuke_query = new LoadNukeQueryIO 84 val ldld_nuke_query = new LoadNukeQueryIO 85 val trigger = Flipped(new LqTriggerIO) 86} 87 88class LoadToLoadIO(implicit p: Parameters) extends XSBundle { 89 val valid = Bool() 90 val data = UInt(XLEN.W) // load to load fast path is limited to ld (64 bit) used as vaddr src1 only 91 val dly_ld_err = Bool() 92} 93 94class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle { 95 val tdata2 = Input(UInt(64.W)) 96 val matchType = Input(UInt(2.W)) 97 val tEnable = Input(Bool()) // timing is calculated before this 98 val addrHit = Output(Bool()) 99} 100 101class LoadUnit(implicit p: Parameters) extends XSModule 102 with HasLoadHelper 103 with HasPerfEvents 104 with HasDCacheParameters 105 with HasCircularQueuePtrHelper 106 with HasVLSUParameters 107 with SdtrigExt 108{ 109 val io = IO(new Bundle() { 110 // control 111 val redirect = Flipped(ValidIO(new Redirect)) 112 val csrCtrl = Flipped(new CustomCSRCtrlIO) 113 114 // int issue path 115 val ldin = Flipped(Decoupled(new MemExuInput)) 116 val ldout = Decoupled(new MemExuOutput) 117 118 // vec issue path 119 val vecldin = Flipped(Decoupled(new VecLoadPipeBundle)) 120 val vecldout = Decoupled(new VecExuOutput) 121 val vecReplay = Decoupled(new LsPipelineBundle) 122 123 // data path 124 val tlb = new TlbRequestIO(2) 125 val pmp = Flipped(new PMPRespBundle()) // arrive same to tlb now 126 val dcache = new DCacheLoadIO 127 val sbuffer = new LoadForwardQueryIO 128 val vec_forward = new LoadForwardQueryIO // forward from vec store flow queue 129 val lsq = new LoadToLsqIO 130 val tl_d_channel = Input(new DcacheToLduForwardIO) 131 val forward_mshr = Flipped(new LduToMissqueueForwardIO) 132 // val refill = Flipped(ValidIO(new Refill)) 133 val l2_hint = Input(Valid(new L2ToL1Hint)) 134 val tlb_hint = Flipped(new TlbHintReq) 135 // fast wakeup 136 // TODO: implement vector fast wakeup 137 val fast_uop = ValidIO(new DynInst) // early wakeup signal generated in load_s1, send to RS in load_s2 138 139 // trigger 140 val trigger = Vec(TriggerNum, new LoadUnitTriggerIO) 141 142 // prefetch 143 val prefetch_train = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms 144 val prefetch_train_l1 = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride 145 val prefetch_req = Flipped(ValidIO(new L1PrefetchReq)) // hardware prefetch to l1 cache req 146 val canAcceptLowConfPrefetch = Output(Bool()) 147 val canAcceptHighConfPrefetch = Output(Bool()) 148 149 // load to load fast path 150 val l2l_fwd_in = Input(new LoadToLoadIO) 151 val l2l_fwd_out = Output(new LoadToLoadIO) 152 153 val ld_fast_match = Input(Bool()) 154 val ld_fast_fuOpType = Input(UInt()) 155 val ld_fast_imm = Input(UInt(12.W)) 156 157 // rs feedback 158 val wakeup = ValidIO(new DynInst) 159 val feedback_fast = ValidIO(new RSFeedback) // stage 2 160 val feedback_slow = ValidIO(new RSFeedback) // stage 3 161 val ldCancel = Output(new LoadCancelIO()) // use to cancel the uops waked by this load, and cancel load 162 163 // load ecc error 164 val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different 165 166 // schedule error query 167 val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO))) 168 169 // queue-based replay 170 val replay = Flipped(Decoupled(new LsPipelineBundle)) 171 val lq_rep_full = Input(Bool()) 172 173 // misc 174 val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch 175 176 // Load fast replay path 177 val fast_rep_in = Flipped(Decoupled(new LqWriteBundle)) 178 val fast_rep_out = Decoupled(new LqWriteBundle) 179 180 // Load RAR rollback 181 val rollback = Valid(new Redirect) 182 183 // perf 184 val debug_ls = Output(new DebugLsInfoBundle) 185 val lsTopdownInfo = Output(new LsTopdownInfo) 186 val correctMissTrain = Input(Bool()) 187 }) 188 189 val s1_ready, s2_ready, s3_ready = WireInit(false.B) 190 191 // Pipeline 192 // -------------------------------------------------------------------------------- 193 // stage 0 194 // -------------------------------------------------------------------------------- 195 // generate addr, use addr to query DCache and DTLB 196 val s0_valid = Wire(Bool()) 197 val s0_mmio_select = Wire(Bool()) 198 val s0_kill = Wire(Bool()) 199 val s0_can_go = s1_ready 200 val s0_fire = s0_valid && s0_can_go 201 val s0_mmio_fire = s0_mmio_select && s0_can_go 202 val s0_out = Wire(new LqWriteBundle) 203 204 // flow source bundle 205 class FlowSource extends Bundle { 206 val vaddr = UInt(VAddrBits.W) 207 val mask = UInt((VLEN/8).W) 208 val uop = new DynInst 209 val try_l2l = Bool() 210 val has_rob_entry = Bool() 211 val rsIdx = UInt(log2Up(MemIQSizeMax).W) 212 val rep_carry = new ReplayCarry(nWays) 213 val mshrid = UInt(log2Up(cfg.nMissEntries).W) 214 val isFirstIssue = Bool() 215 val fast_rep = Bool() 216 val ld_rep = Bool() 217 val l2l_fwd = Bool() 218 val prf = Bool() 219 val prf_rd = Bool() 220 val prf_wr = Bool() 221 val sched_idx = UInt(log2Up(LoadQueueReplaySize+1).W) 222 val hlv = Bool() 223 val hlvx = Bool() 224 // Record the issue port idx of load issue queue. This signal is used by load cancel. 225 val deqPortIdx = UInt(log2Ceil(LoadPipelineWidth).W) 226 // vec only 227 val isvec = Bool() 228 val is128bit = Bool() 229 val uop_unit_stride_fof = Bool() 230 val reg_offset = UInt(vOffsetBits.W) 231 val vecActive = Bool() // 1: vector active element or scala mem operation, 0: vector not active element 232 val is_first_ele = Bool() 233 val flowPtr = new VlflowPtr 234 } 235 val s0_sel_src = Wire(new FlowSource) 236 237 // load flow select/gen 238 // src0: super load replayed by LSQ (cache miss replay) (io.replay) 239 // src1: fast load replay (io.fast_rep_in) 240 // src2: mmio (io.lsq.uncache) 241 // src3: load replayed by LSQ (io.replay) 242 // src4: hardware prefetch from prefetchor (high confidence) (io.prefetch) 243 // src5: int read / software prefetch first issue from RS (io.in) 244 // src6: vec read from RS (io.vecldin) 245 // src7: load try pointchaising when no issued or replayed load (io.fastpath) 246 // src8: hardware prefetch from prefetchor (high confidence) (io.prefetch) 247 // priority: high to low 248 val s0_rep_stall = io.ldin.valid && isAfter(io.replay.bits.uop.robIdx, io.ldin.bits.uop.robIdx) 249 val s0_super_ld_rep_valid = io.replay.valid && io.replay.bits.forward_tlDchannel 250 val s0_ld_fast_rep_valid = io.fast_rep_in.valid 251 val s0_ld_mmio_valid = io.lsq.uncache.valid 252 val s0_ld_rep_valid = io.replay.valid && !io.replay.bits.forward_tlDchannel && !s0_rep_stall 253 val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U 254 val s0_int_iss_valid = io.ldin.valid // int flow first issue or software prefetch 255 val s0_vec_iss_valid = io.vecldin.valid 256 val s0_l2l_fwd_valid = io.l2l_fwd_in.valid 257 val s0_low_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U 258 dontTouch(s0_super_ld_rep_valid) 259 dontTouch(s0_ld_fast_rep_valid) 260 dontTouch(s0_ld_mmio_valid) 261 dontTouch(s0_ld_rep_valid) 262 dontTouch(s0_high_conf_prf_valid) 263 dontTouch(s0_int_iss_valid) 264 dontTouch(s0_vec_iss_valid) 265 dontTouch(s0_l2l_fwd_valid) 266 dontTouch(s0_low_conf_prf_valid) 267 268 // load flow source ready 269 val s0_super_ld_rep_ready = WireInit(true.B) 270 val s0_ld_fast_rep_ready = !s0_super_ld_rep_valid 271 val s0_ld_mmio_ready = !s0_super_ld_rep_valid && 272 !s0_ld_fast_rep_valid 273 val s0_ld_rep_ready = !s0_super_ld_rep_valid && 274 !s0_ld_fast_rep_valid && 275 !s0_ld_mmio_valid 276 val s0_high_conf_prf_ready = !s0_super_ld_rep_valid && 277 !s0_ld_fast_rep_valid && 278 !s0_ld_mmio_valid && 279 !s0_ld_rep_valid 280 281 val s0_int_iss_ready = !s0_super_ld_rep_valid && 282 !s0_ld_fast_rep_valid && 283 !s0_ld_mmio_valid && 284 !s0_ld_rep_valid && 285 !s0_high_conf_prf_valid 286 287 val s0_vec_iss_ready = !s0_super_ld_rep_valid && 288 !s0_ld_fast_rep_valid && 289 !s0_ld_mmio_valid && 290 !s0_ld_rep_valid && 291 !s0_high_conf_prf_valid && 292 !s0_int_iss_valid 293 294 val s0_l2l_fwd_ready = !s0_super_ld_rep_valid && 295 !s0_ld_fast_rep_valid && 296 !s0_ld_mmio_valid && 297 !s0_ld_rep_valid && 298 !s0_high_conf_prf_valid && 299 !s0_int_iss_valid && 300 !s0_vec_iss_valid 301 302 val s0_low_conf_prf_ready = !s0_super_ld_rep_valid && 303 !s0_ld_fast_rep_valid && 304 !s0_ld_mmio_valid && 305 !s0_ld_rep_valid && 306 !s0_high_conf_prf_valid && 307 !s0_int_iss_valid && 308 !s0_vec_iss_valid && 309 !s0_l2l_fwd_valid 310 dontTouch(s0_super_ld_rep_ready) 311 dontTouch(s0_ld_fast_rep_ready) 312 dontTouch(s0_ld_mmio_ready) 313 dontTouch(s0_ld_rep_ready) 314 dontTouch(s0_high_conf_prf_ready) 315 dontTouch(s0_int_iss_ready) 316 dontTouch(s0_vec_iss_ready) 317 dontTouch(s0_l2l_fwd_ready) 318 dontTouch(s0_low_conf_prf_ready) 319 320 // load flow source select (OH) 321 val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready 322 val s0_ld_fast_rep_select = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready 323 val s0_ld_mmio_select = s0_ld_mmio_valid && s0_ld_mmio_ready 324 val s0_ld_rep_select = s0_ld_rep_valid && s0_ld_rep_ready 325 val s0_hw_prf_select = s0_high_conf_prf_ready && s0_high_conf_prf_valid || 326 s0_low_conf_prf_ready && s0_low_conf_prf_valid 327 val s0_int_iss_select = s0_int_iss_ready && s0_int_iss_valid 328 val s0_vec_iss_select = s0_vec_iss_ready && s0_vec_iss_valid 329 val s0_l2l_fwd_select = s0_l2l_fwd_ready && s0_l2l_fwd_valid 330 dontTouch(s0_super_ld_rep_select) 331 dontTouch(s0_ld_fast_rep_select) 332 dontTouch(s0_ld_mmio_select) 333 dontTouch(s0_ld_rep_select) 334 dontTouch(s0_hw_prf_select) 335 dontTouch(s0_int_iss_select) 336 dontTouch(s0_vec_iss_select) 337 dontTouch(s0_l2l_fwd_select) 338 339 s0_valid := (s0_super_ld_rep_valid || 340 s0_ld_fast_rep_valid || 341 s0_ld_rep_valid || 342 s0_high_conf_prf_valid || 343 s0_int_iss_valid || 344 s0_vec_iss_valid || 345 s0_l2l_fwd_valid || 346 s0_low_conf_prf_valid) && !s0_ld_mmio_select && io.dcache.req.ready && !s0_kill 347 348 s0_mmio_select := s0_ld_mmio_select && !s0_kill 349 350 // which is S0's out is ready and dcache is ready 351 val s0_try_ptr_chasing = s0_l2l_fwd_select 352 val s0_do_try_ptr_chasing = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready 353 val s0_ptr_chasing_vaddr = io.l2l_fwd_in.data(5, 0) +& io.ld_fast_imm(5, 0) 354 val s0_ptr_chasing_canceled = WireInit(false.B) 355 s0_kill := s0_ptr_chasing_canceled 356 357 // prefetch related ctrl signal 358 io.canAcceptLowConfPrefetch := s0_low_conf_prf_ready 359 io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready 360 361 // query DTLB 362 io.tlb.req.valid := s0_valid 363 io.tlb.req.bits.cmd := Mux(s0_sel_src.prf, 364 Mux(s0_sel_src.prf_wr, TlbCmd.write, TlbCmd.read), 365 TlbCmd.read 366 ) 367 io.tlb.req.bits.vaddr := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_sel_src.vaddr) 368 io.tlb.req.bits.hyperinst := s0_sel_src.hlv 369 io.tlb.req.bits.hlvx := s0_sel_src.hlvx 370 io.tlb.req.bits.size := Mux(s0_sel_src.isvec, io.vecldin.bits.alignedType, LSUOpType.size(s0_sel_src.uop.fuOpType)) 371 io.tlb.req.bits.kill := s0_kill 372 io.tlb.req.bits.memidx.is_ld := true.B 373 io.tlb.req.bits.memidx.is_st := false.B 374 io.tlb.req.bits.memidx.idx := s0_sel_src.uop.lqIdx.value 375 io.tlb.req.bits.debug.robIdx := s0_sel_src.uop.robIdx 376 io.tlb.req.bits.no_translate := s0_hw_prf_select // hw b.reqetch addr does not need to be translated 377 io.tlb.req.bits.debug.pc := s0_sel_src.uop.pc 378 io.tlb.req.bits.debug.isFirstIssue := s0_sel_src.isFirstIssue 379 380 // query DCache 381 io.dcache.req.valid := s0_valid 382 io.dcache.req.bits.cmd := Mux(s0_sel_src.prf_rd, 383 MemoryOpConstants.M_PFR, 384 Mux(s0_sel_src.prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD) 385 ) 386 io.dcache.req.bits.vaddr := s0_sel_src.vaddr 387 io.dcache.req.bits.mask := s0_sel_src.mask 388 io.dcache.req.bits.data := DontCare 389 io.dcache.req.bits.isFirstIssue := s0_sel_src.isFirstIssue 390 io.dcache.req.bits.instrtype := Mux(s0_sel_src.prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U) 391 io.dcache.req.bits.debug_robIdx := s0_sel_src.uop.robIdx.value 392 io.dcache.req.bits.replayCarry := s0_sel_src.rep_carry 393 io.dcache.req.bits.id := DontCare // TODO: update cache meta 394 io.dcache.pf_source := Mux(s0_hw_prf_select, io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL) 395 io.dcache.req.bits.lqIdx := s0_sel_src.uop.lqIdx 396 // load flow priority mux 397 def fromNullSource(): FlowSource = { 398 val out = WireInit(0.U.asTypeOf(new FlowSource)) 399 out 400 } 401 402 def fromFastReplaySource(src: LqWriteBundle): FlowSource = { 403 val out = WireInit(0.U.asTypeOf(new FlowSource)) 404 out.vaddr := src.vaddr 405 out.mask := src.mask 406 out.uop := src.uop 407 out.try_l2l := false.B 408 out.has_rob_entry := src.hasROBEntry 409 out.rep_carry := src.rep_info.rep_carry 410 out.mshrid := src.rep_info.mshr_id 411 out.rsIdx := src.rsIdx 412 out.isFirstIssue := false.B 413 out.fast_rep := true.B 414 out.ld_rep := src.isLoadReplay 415 out.l2l_fwd := false.B 416 out.prf := LSUOpType.isPrefetch(src.uop.fuOpType) 417 out.prf_rd := src.uop.fuOpType === LSUOpType.prefetch_r 418 out.prf_wr := src.uop.fuOpType === LSUOpType.prefetch_w 419 out.sched_idx := src.schedIndex 420 out.hlv := LSUOpType.isHlv(src.uop.fuOpType) 421 out.hlvx := LSUOpType.isHlvx(src.uop.fuOpType) 422 out.vecActive := true.B // true for scala load 423 out 424 } 425 426 def fromMmioSource(src: MemExuOutput) = { 427 val out = WireInit(0.U.asTypeOf(new FlowSource)) 428 out.vaddr := 0.U 429 out.mask := 0.U 430 out.uop := src.uop 431 out.try_l2l := false.B 432 out.has_rob_entry := false.B 433 out.rsIdx := 0.U 434 out.rep_carry := 0.U.asTypeOf(out.rep_carry) 435 out.mshrid := 0.U 436 out.isFirstIssue := false.B 437 out.fast_rep := false.B 438 out.ld_rep := false.B 439 out.l2l_fwd := false.B 440 out.prf := false.B 441 out.prf_rd := false.B 442 out.prf_wr := false.B 443 out.sched_idx := 0.U 444 out.hlv := LSUOpType.isHlv(src.uop.fuOpType) 445 out.hlvx := LSUOpType.isHlvx(src.uop.fuOpType) 446 out.vecActive := true.B 447 out 448 } 449 450 def fromNormalReplaySource(src: LsPipelineBundle): FlowSource = { 451 val out = WireInit(0.U.asTypeOf(new FlowSource)) 452 out.vaddr := src.vaddr 453 out.mask := genVWmask(src.vaddr, src.uop.fuOpType(1, 0)) 454 out.uop := src.uop 455 out.try_l2l := false.B 456 out.has_rob_entry := true.B 457 out.rsIdx := src.rsIdx 458 out.rep_carry := src.replayCarry 459 out.mshrid := src.mshrid 460 out.isFirstIssue := false.B 461 out.fast_rep := false.B 462 out.ld_rep := true.B 463 out.l2l_fwd := false.B 464 out.prf := LSUOpType.isPrefetch(src.uop.fuOpType) 465 out.prf_rd := src.uop.fuOpType === LSUOpType.prefetch_r 466 out.prf_wr := src.uop.fuOpType === LSUOpType.prefetch_w 467 out.sched_idx := src.schedIndex 468 out.hlv := LSUOpType.isHlv(src.uop.fuOpType) 469 out.hlvx := LSUOpType.isHlvx(src.uop.fuOpType) 470 out.vecActive := true.B // true for scala load 471 out 472 } 473 474 def fromPrefetchSource(src: L1PrefetchReq): FlowSource = { 475 val out = WireInit(0.U.asTypeOf(new FlowSource)) 476 out.vaddr := src.getVaddr() 477 out.mask := 0.U 478 out.uop := DontCare 479 out.try_l2l := false.B 480 out.has_rob_entry := false.B 481 out.rsIdx := 0.U 482 out.rep_carry := 0.U.asTypeOf(out.rep_carry) 483 out.mshrid := 0.U 484 out.isFirstIssue := false.B 485 out.fast_rep := false.B 486 out.ld_rep := false.B 487 out.l2l_fwd := false.B 488 out.prf := true.B 489 out.prf_rd := !src.is_store 490 out.prf_wr := src.is_store 491 out.sched_idx := 0.U 492 out.hlv := false.B 493 out.hlvx := false.B 494 out.vecActive := true.B // true for scala load 495 out 496 } 497 498 def fromIntIssueSource(src: MemExuInput): FlowSource = { 499 val out = WireInit(0.U.asTypeOf(new FlowSource)) 500 out.vaddr := src.src(0) + SignExt(src.uop.imm(11, 0), VAddrBits) 501 out.mask := genVWmask(out.vaddr, src.uop.fuOpType(1,0)) 502 out.uop := src.uop 503 out.try_l2l := false.B 504 out.has_rob_entry := true.B 505 out.rsIdx := src.iqIdx 506 out.rep_carry := 0.U.asTypeOf(out.rep_carry) 507 out.mshrid := 0.U 508 out.isFirstIssue := true.B 509 out.fast_rep := false.B 510 out.ld_rep := false.B 511 out.l2l_fwd := false.B 512 out.prf := LSUOpType.isPrefetch(src.uop.fuOpType) 513 out.prf_rd := src.uop.fuOpType === LSUOpType.prefetch_r 514 out.prf_wr := src.uop.fuOpType === LSUOpType.prefetch_w 515 out.sched_idx := 0.U 516 out.hlv := LSUOpType.isHlv(src.uop.fuOpType) 517 out.hlvx := LSUOpType.isHlvx(src.uop.fuOpType) 518 out.vecActive := true.B // true for scala load 519 out 520 } 521 522 def fromVecIssueSource(src: VecLoadPipeBundle): FlowSource = { 523 val out = WireInit(0.U.asTypeOf(new FlowSource)) 524 out.vaddr := src.vaddr 525 out.mask := src.mask 526 out.uop := src.uop 527 out.try_l2l := false.B 528 out.has_rob_entry := true.B 529 // TODO: VLSU, implement vector feedback 530 out.rsIdx := 0.U 531 // TODO: VLSU, implement replay carry 532 out.rep_carry := 0.U.asTypeOf(out.rep_carry) 533 out.mshrid := 0.U 534 // TODO: VLSU, implement first issue 535 out.isFirstIssue := src.isFirstIssue 536 out.fast_rep := false.B 537 out.ld_rep := false.B 538 out.l2l_fwd := false.B 539 out.prf := false.B 540 out.prf_rd := false.B 541 out.prf_wr := false.B 542 out.sched_idx := 0.U 543 out.hlv := false.B 544 out.hlvx := false.B 545 // Vector load interface 546 out.isvec := true.B 547 // vector loads only access a single element at a time, so 128-bit path is not used for now 548 out.is128bit := false.B 549 out.uop_unit_stride_fof := src.uop_unit_stride_fof 550 // out.rob_idx_valid := src.rob_idx_valid 551 // out.inner_idx := src.inner_idx 552 // out.rob_idx := src.rob_idx 553 out.reg_offset := src.reg_offset 554 // out.offset := src.offset 555 out.vecActive := src.vecActive 556 out.is_first_ele := src.is_first_ele 557 out.flowPtr := src.flowPtr 558 out 559 } 560 561 def fromLoadToLoadSource(src: LoadToLoadIO): FlowSource = { 562 val out = WireInit(0.U.asTypeOf(new FlowSource)) 563 out.vaddr := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0)) 564 out.mask := genVWmask(0.U, LSUOpType.ld) 565 // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding. 566 // Assume the pointer chasing is always ld. 567 out.uop.fuOpType := LSUOpType.ld 568 out.try_l2l := true.B 569 // we dont care out.isFirstIssue and out.rsIdx and s0_sqIdx in S0 when trying pointchasing 570 // because these signals will be updated in S1 571 out.has_rob_entry := false.B 572 out.rsIdx := 0.U 573 out.mshrid := 0.U 574 out.rep_carry := 0.U.asTypeOf(out.rep_carry) 575 out.isFirstIssue := true.B 576 out.fast_rep := false.B 577 out.ld_rep := false.B 578 out.l2l_fwd := true.B 579 out.prf := false.B 580 out.prf_rd := false.B 581 out.prf_wr := false.B 582 out.sched_idx := 0.U 583 out.hlv := LSUOpType.isHlv(out.uop.fuOpType) 584 out.hlvx := LSUOpType.isHlvx(out.uop.fuOpType) 585 out.vecActive := true.B // true for scala load 586 out 587 } 588 589 // set default 590 val s0_src_selector = Seq( 591 s0_super_ld_rep_select, 592 s0_ld_fast_rep_select, 593 s0_ld_mmio_select, 594 s0_ld_rep_select, 595 s0_hw_prf_select, 596 s0_int_iss_select, 597 s0_vec_iss_select, 598 (if (EnableLoadToLoadForward) s0_l2l_fwd_select else true.B) 599 ) 600 val s0_src_format = Seq( 601 fromNormalReplaySource(io.replay.bits), 602 fromFastReplaySource(io.fast_rep_in.bits), 603 fromMmioSource(io.lsq.uncache.bits), 604 fromNormalReplaySource(io.replay.bits), 605 fromPrefetchSource(io.prefetch_req.bits), 606 fromIntIssueSource(io.ldin.bits), 607 fromVecIssueSource(io.vecldin.bits), 608 (if (EnableLoadToLoadForward) fromLoadToLoadSource(io.l2l_fwd_in) else fromNullSource()) 609 ) 610 s0_sel_src := ParallelPriorityMux(s0_src_selector, s0_src_format) 611 612 // address align check 613 val s0_addr_aligned = LookupTree(Mux(s0_sel_src.isvec, io.vecldin.bits.alignedType, s0_sel_src.uop.fuOpType(1, 0)), List( 614 "b00".U -> true.B, //b 615 "b01".U -> (s0_sel_src.vaddr(0) === 0.U), //h 616 "b10".U -> (s0_sel_src.vaddr(1, 0) === 0.U), //w 617 "b11".U -> (s0_sel_src.vaddr(2, 0) === 0.U) //d 618 )) 619 620 // accept load flow if dcache ready (tlb is always ready) 621 // TODO: prefetch need writeback to loadQueueFlag 622 s0_out := DontCare 623 s0_out.rsIdx := s0_sel_src.rsIdx 624 s0_out.vaddr := s0_sel_src.vaddr 625 s0_out.mask := s0_sel_src.mask 626 s0_out.uop := s0_sel_src.uop 627 s0_out.isFirstIssue := s0_sel_src.isFirstIssue 628 s0_out.hasROBEntry := s0_sel_src.has_rob_entry 629 s0_out.isPrefetch := s0_sel_src.prf 630 s0_out.isHWPrefetch := s0_hw_prf_select 631 s0_out.isFastReplay := s0_sel_src.fast_rep 632 s0_out.isLoadReplay := s0_sel_src.ld_rep 633 s0_out.isFastPath := s0_sel_src.l2l_fwd 634 s0_out.mshrid := s0_sel_src.mshrid 635 s0_out.isvec := s0_sel_src.isvec 636 s0_out.is128bit := s0_sel_src.is128bit 637 s0_out.uop_unit_stride_fof := s0_sel_src.uop_unit_stride_fof 638 // s0_out.rob_idx_valid := s0_rob_idx_valid 639 // s0_out.inner_idx := s0_inner_idx 640 // s0_out.rob_idx := s0_rob_idx 641 s0_out.reg_offset := s0_sel_src.reg_offset 642 // s0_out.offset := s0_offset 643 s0_out.vecActive := s0_sel_src.vecActive 644 s0_out.is_first_ele := s0_sel_src.is_first_ele 645 s0_out.flowPtr := s0_sel_src.flowPtr 646 s0_out.uop.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned && s0_sel_src.vecActive 647 s0_out.forward_tlDchannel := s0_super_ld_rep_select 648 when(io.tlb.req.valid && s0_sel_src.isFirstIssue) { 649 s0_out.uop.debugInfo.tlbFirstReqTime := GTimer() 650 }.otherwise{ 651 s0_out.uop.debugInfo.tlbFirstReqTime := s0_sel_src.uop.debugInfo.tlbFirstReqTime 652 } 653 s0_out.schedIndex := s0_sel_src.sched_idx 654 655 // load fast replay 656 io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready) 657 658 // mmio 659 io.lsq.uncache.ready := s0_mmio_fire 660 661 // load flow source ready 662 // cache missed load has highest priority 663 // always accept cache missed load flow from load replay queue 664 io.replay.ready := (s0_can_go && io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select)) 665 666 // accept load flow from rs when: 667 // 1) there is no lsq-replayed load 668 // 2) there is no fast replayed load 669 // 3) there is no high confidence prefetch request 670 io.ldin.ready := s0_can_go && io.dcache.req.ready && s0_int_iss_ready 671 io.vecldin.ready := s0_can_go && io.dcache.req.ready && s0_vec_iss_ready 672 673 // for hw prefetch load flow feedback, to be added later 674 // io.prefetch_in.ready := s0_hw_prf_select 675 676 // dcache replacement extra info 677 // TODO: should prefetch load update replacement? 678 io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.replay.bits.replacementUpdated, false.B) 679 680 // load wakeup 681 io.wakeup.valid := s0_fire && (s0_super_ld_rep_select || s0_ld_fast_rep_select || s0_ld_rep_select || s0_int_iss_select) || s0_mmio_fire 682 io.wakeup.bits := s0_out.uop 683 684 XSDebug(io.dcache.req.fire, 685 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_sel_src.uop.pc)}, vaddr ${Hexadecimal(s0_sel_src.vaddr)}\n" 686 ) 687 XSDebug(s0_valid, 688 p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, lId ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " + 689 p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n") 690 691 // Pipeline 692 // -------------------------------------------------------------------------------- 693 // stage 1 694 // -------------------------------------------------------------------------------- 695 // TLB resp (send paddr to dcache) 696 val s1_valid = RegInit(false.B) 697 val s1_in = Wire(new LqWriteBundle) 698 val s1_out = Wire(new LqWriteBundle) 699 val s1_kill = Wire(Bool()) 700 val s1_can_go = s2_ready 701 val s1_fire = s1_valid && !s1_kill && s1_can_go 702 val s1_vecActive = RegEnable(s0_out.vecActive, true.B, s0_fire) 703 val s1_vec_alignedType = RegEnable(io.vecldin.bits.alignedType, s0_fire) 704 705 s1_ready := !s1_valid || s1_kill || s2_ready 706 when (s0_fire) { s1_valid := true.B } 707 .elsewhen (s1_fire) { s1_valid := false.B } 708 .elsewhen (s1_kill) { s1_valid := false.B } 709 s1_in := RegEnable(s0_out, s0_fire) 710 711 val s1_fast_rep_dly_kill = RegNext(io.fast_rep_in.bits.lateKill) && s1_in.isFastReplay 712 val s1_fast_rep_dly_err = RegNext(io.fast_rep_in.bits.delayedLoadError) && s1_in.isFastReplay 713 val s1_l2l_fwd_dly_err = RegNext(io.l2l_fwd_in.dly_ld_err) && s1_in.isFastPath 714 val s1_dly_err = s1_fast_rep_dly_err || s1_l2l_fwd_dly_err 715 val s1_vaddr_hi = Wire(UInt()) 716 val s1_vaddr_lo = Wire(UInt()) 717 val s1_vaddr = Wire(UInt()) 718 val s1_paddr_dup_lsu = Wire(UInt()) 719 val s1_gpaddr_dup_lsu = Wire(UInt()) 720 val s1_paddr_dup_dcache = Wire(UInt()) 721 val s1_exception = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, LduCfg).asUInt.orR // af & pf exception were modified below. 722 val s1_tlb_miss = io.tlb.resp.bits.miss 723 val s1_prf = s1_in.isPrefetch 724 val s1_hw_prf = s1_in.isHWPrefetch 725 val s1_sw_prf = s1_prf && !s1_hw_prf 726 val s1_tlb_memidx = io.tlb.resp.bits.memidx 727 728 s1_vaddr_hi := s1_in.vaddr(VAddrBits - 1, 6) 729 s1_vaddr_lo := s1_in.vaddr(5, 0) 730 s1_vaddr := Cat(s1_vaddr_hi, s1_vaddr_lo) 731 s1_paddr_dup_lsu := io.tlb.resp.bits.paddr(0) 732 s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1) 733 s1_gpaddr_dup_lsu := io.tlb.resp.bits.gpaddr(0) 734 735 when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_in.uop.lqIdx.value) { 736 // printf("load idx = %d\n", s1_tlb_memidx.idx) 737 s1_out.uop.debugInfo.tlbRespTime := GTimer() 738 } 739 740 io.tlb.req_kill := s1_kill || s1_dly_err 741 io.tlb.resp.ready := true.B 742 743 io.dcache.s1_paddr_dup_lsu <> s1_paddr_dup_lsu 744 io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache 745 io.dcache.s1_kill := s1_kill || s1_dly_err || s1_tlb_miss || s1_exception 746 747 // store to load forwarding 748 io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf) 749 io.sbuffer.vaddr := s1_vaddr 750 io.sbuffer.paddr := s1_paddr_dup_lsu 751 io.sbuffer.gpaddr:= s1_gpaddr_dup_lsu 752 io.sbuffer.uop := s1_in.uop 753 io.sbuffer.sqIdx := s1_in.uop.sqIdx 754 io.sbuffer.mask := s1_in.mask 755 io.sbuffer.pc := s1_in.uop.pc // FIXME: remove it 756 757 io.vec_forward.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_prf) 758 io.vec_forward.vaddr := s1_vaddr 759 io.vec_forward.paddr := s1_paddr_dup_lsu 760 io.vec_forward.gpaddr:= s1_gpaddr_dup_lsu 761 io.vec_forward.uop := s1_in.uop 762 io.vec_forward.sqIdx := s1_in.uop.sqIdx 763 io.vec_forward.mask := s1_in.mask 764 io.vec_forward.pc := s1_in.uop.pc // FIXME: remove it 765 766 io.lsq.forward.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf) 767 io.lsq.forward.vaddr := s1_vaddr 768 io.lsq.forward.paddr := s1_paddr_dup_lsu 769 io.lsq.forward.gpaddr := s1_gpaddr_dup_lsu 770 io.lsq.forward.uop := s1_in.uop 771 io.lsq.forward.sqIdx := s1_in.uop.sqIdx 772 io.lsq.forward.sqIdxMask := 0.U 773 io.lsq.forward.mask := s1_in.mask 774 io.lsq.forward.pc := s1_in.uop.pc // FIXME: remove it 775 776 // st-ld violation query 777 // val s1_nuke_paddr_match = VecInit((0 until StorePipelineWidth).map(w => {Mux(s1_isvec && s1_in.is128bit, 778 // s1_paddr_dup_lsu(PAddrBits-1, 4) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 4), 779 // s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3))})) 780 val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => { 781 io.stld_nuke_query(w).valid && // query valid 782 isAfter(s1_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store 783 (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match 784 (s1_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain 785 })).asUInt.orR && !s1_tlb_miss 786 787 s1_out := s1_in 788 s1_out.vaddr := s1_vaddr 789 s1_out.paddr := s1_paddr_dup_lsu 790 s1_out.gpaddr := s1_gpaddr_dup_lsu 791 s1_out.tlbMiss := s1_tlb_miss 792 s1_out.ptwBack := io.tlb.resp.bits.ptwBack 793 s1_out.rsIdx := s1_in.rsIdx 794 s1_out.rep_info.debug := s1_in.uop.debugInfo 795 s1_out.rep_info.nuke := s1_nuke && !s1_sw_prf 796 s1_out.delayedLoadError := s1_dly_err 797 798 when (!s1_dly_err) { 799 // current ori test will cause the case of ldest == 0, below will be modifeid in the future. 800 // af & pf exception were modified 801 s1_out.uop.exceptionVec(loadPageFault) := io.tlb.resp.bits.excp(0).pf.ld && s1_vecActive && !s1_tlb_miss 802 s1_out.uop.exceptionVec(loadGuestPageFault) := io.tlb.resp.bits.excp(0).gpf.ld && !s1_tlb_miss 803 s1_out.uop.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld && s1_vecActive && !s1_tlb_miss 804 } .otherwise { 805 s1_out.uop.exceptionVec(loadPageFault) := false.B 806 s1_out.uop.exceptionVec(loadGuestPageFault) := false.B 807 s1_out.uop.exceptionVec(loadAddrMisaligned) := false.B 808 s1_out.uop.exceptionVec(loadAccessFault) := s1_dly_err && s1_vecActive 809 } 810 811 // pointer chasing 812 val s1_try_ptr_chasing = RegNext(s0_do_try_ptr_chasing, false.B) 813 val s1_ptr_chasing_vaddr = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing) 814 val s1_fu_op_type_not_ld = WireInit(false.B) 815 val s1_not_fast_match = WireInit(false.B) 816 val s1_addr_mismatch = WireInit(false.B) 817 val s1_addr_misaligned = WireInit(false.B) 818 val s1_fast_mismatch = WireInit(false.B) 819 val s1_ptr_chasing_canceled = WireInit(false.B) 820 val s1_cancel_ptr_chasing = WireInit(false.B) 821 822 s1_kill := s1_fast_rep_dly_kill || 823 s1_cancel_ptr_chasing || 824 s1_in.uop.robIdx.needFlush(io.redirect) || 825 (s1_in.uop.robIdx.needFlush(RegNext(io.redirect)) && !RegNext(s0_try_ptr_chasing)) || 826 RegEnable(s0_kill, false.B, io.ldin.valid || io.replay.valid || io.l2l_fwd_in.valid || io.fast_rep_in.valid || io.vecldin.valid) 827 828 if (EnableLoadToLoadForward) { 829 // Sometimes, we need to cancel the load-load forwarding. 830 // These can be put at S0 if timing is bad at S1. 831 // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow) 832 s1_addr_mismatch := s1_ptr_chasing_vaddr(6) || 833 RegEnable(io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing) 834 // Case 1: the address is not 64-bit aligned or the fuOpType is not LD 835 s1_addr_misaligned := s1_ptr_chasing_vaddr(2, 0).orR 836 s1_fu_op_type_not_ld := io.ldin.bits.uop.fuOpType =/= LSUOpType.ld 837 // Case 2: this load-load uop is cancelled 838 s1_ptr_chasing_canceled := !io.ldin.valid 839 // Case 3: fast mismatch 840 s1_fast_mismatch := RegEnable(!io.ld_fast_match, s0_do_try_ptr_chasing) 841 842 when (s1_try_ptr_chasing) { 843 s1_cancel_ptr_chasing := s1_addr_mismatch || 844 s1_addr_misaligned || 845 s1_fu_op_type_not_ld || 846 s1_ptr_chasing_canceled || 847 s1_fast_mismatch 848 849 s1_in.uop := io.ldin.bits.uop 850 s1_in.rsIdx := io.ldin.bits.iqIdx 851 s1_in.isFirstIssue := io.ldin.bits.isFirstIssue 852 s1_vaddr_lo := s1_ptr_chasing_vaddr(5, 0) 853 s1_paddr_dup_lsu := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo) 854 s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo) 855 856 // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb) 857 s1_in.uop.debugInfo.tlbFirstReqTime := GTimer() 858 s1_in.uop.debugInfo.tlbRespTime := GTimer() 859 } 860 when (!s1_cancel_ptr_chasing) { 861 s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.replay.fire && !io.fast_rep_in.fire 862 when (s1_try_ptr_chasing) { 863 io.ldin.ready := true.B 864 } 865 } 866 } 867 868 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 869 val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize)) 870 // to enable load-load, sqIdxMask must be calculated based on ldin.uop 871 // If the timing here is not OK, load-load forwarding has to be disabled. 872 // Or we calculate sqIdxMask at RS?? 873 io.lsq.forward.sqIdxMask := s1_sqIdx_mask 874 if (EnableLoadToLoadForward) { 875 when (s1_try_ptr_chasing) { 876 io.lsq.forward.sqIdxMask := UIntToMask(io.ldin.bits.uop.sqIdx.value, StoreQueueSize) 877 } 878 } 879 880 io.forward_mshr.valid := s1_valid && s1_out.forward_tlDchannel 881 io.forward_mshr.mshrid := s1_out.mshrid 882 io.forward_mshr.paddr := s1_out.paddr 883 884 XSDebug(s1_valid, 885 p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " + 886 p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n") 887 888 // Pipeline 889 // -------------------------------------------------------------------------------- 890 // stage 2 891 // -------------------------------------------------------------------------------- 892 // s2: DCache resp 893 val s2_valid = RegInit(false.B) 894 val s2_in = Wire(new LqWriteBundle) 895 val s2_out = Wire(new LqWriteBundle) 896 val s2_kill = Wire(Bool()) 897 val s2_can_go = s3_ready 898 val s2_fire = s2_valid && !s2_kill && s2_can_go 899 val s2_vecActive = RegEnable(s1_out.vecActive, true.B, s1_fire) 900 val s2_isvec = RegEnable(s1_out.isvec, false.B, s1_fire) 901 val s2_vec_alignedType = RegEnable(s1_vec_alignedType, s1_fire) 902 903 s2_kill := s2_in.uop.robIdx.needFlush(io.redirect) 904 s2_ready := !s2_valid || s2_kill || s3_ready 905 when (s1_fire) { s2_valid := true.B } 906 .elsewhen (s2_fire) { s2_valid := false.B } 907 .elsewhen (s2_kill) { s2_valid := false.B } 908 s2_in := RegEnable(s1_out, s1_fire) 909 910 val s2_pmp = WireInit(io.pmp) 911 912 val s2_prf = s2_in.isPrefetch 913 val s2_hw_prf = s2_in.isHWPrefetch 914 915 // exception that may cause load addr to be invalid / illegal 916 // if such exception happen, that inst and its exception info 917 // will be force writebacked to rob 918 val s2_exception_vec = WireInit(s2_in.uop.exceptionVec) 919 when (!s2_in.delayedLoadError) { 920 s2_exception_vec(loadAccessFault) := (s2_in.uop.exceptionVec(loadAccessFault) || s2_pmp.ld || 921 (io.dcache.resp.bits.tag_error && RegNext(io.csrCtrl.cache_error_enable))) && s2_vecActive 922 } 923 924 // soft prefetch will not trigger any exception (but ecc error interrupt may 925 // be triggered) 926 when (!s2_in.delayedLoadError && (s2_prf || s2_in.tlbMiss)) { 927 s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType) 928 } 929 val s2_exception = ExceptionNO.selectByFu(s2_exception_vec, LduCfg).asUInt.orR && s2_vecActive 930 931 val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr) 932 val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.forward_mshr.forward() 933 val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr) 934 935 // writeback access fault caused by ecc error / bus error 936 // * ecc data error is slow to generate, so we will not use it until load stage 3 937 // * in load stage 3, an extra signal io.load_error will be used to 938 val s2_actually_mmio = s2_pmp.mmio 939 val s2_mmio = !s2_prf && 940 s2_actually_mmio && 941 !s2_exception && 942 !s2_in.tlbMiss 943 944 val s2_full_fwd = Wire(Bool()) 945 val s2_mem_amb = s2_in.uop.storeSetHit && 946 io.lsq.forward.addrInvalid 947 948 val s2_tlb_miss = s2_in.tlbMiss 949 val s2_fwd_fail = io.lsq.forward.dataInvalid || io.vec_forward.dataInvalid 950 val s2_dcache_miss = io.dcache.resp.bits.miss && 951 !s2_fwd_frm_d_chan_or_mshr && 952 !s2_full_fwd 953 954 val s2_mq_nack = io.dcache.s2_mq_nack && 955 !s2_fwd_frm_d_chan_or_mshr && 956 !s2_full_fwd 957 958 val s2_bank_conflict = io.dcache.s2_bank_conflict && 959 !s2_fwd_frm_d_chan_or_mshr && 960 !s2_full_fwd 961 962 val s2_wpu_pred_fail = io.dcache.s2_wpu_pred_fail && 963 !s2_fwd_frm_d_chan_or_mshr && 964 !s2_full_fwd 965 966 val s2_rar_nack = io.lsq.ldld_nuke_query.req.valid && 967 !io.lsq.ldld_nuke_query.req.ready 968 969 val s2_raw_nack = io.lsq.stld_nuke_query.req.valid && 970 !io.lsq.stld_nuke_query.req.ready 971 // st-ld violation query 972 // NeedFastRecovery Valid when 973 // 1. Fast recovery query request Valid. 974 // 2. Load instruction is younger than requestors(store instructions). 975 // 3. Physical address match. 976 // 4. Data contains. 977 val s2_nuke = VecInit((0 until StorePipelineWidth).map(w => { 978 io.stld_nuke_query(w).valid && // query valid 979 isAfter(s2_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store 980 // TODO: Fix me when vector instruction 981 (s2_in.paddr(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match 982 (s2_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain 983 })).asUInt.orR && !s2_tlb_miss || s2_in.rep_info.nuke 984 985 val s2_cache_handled = io.dcache.resp.bits.handled 986 val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) && 987 io.dcache.resp.bits.tag_error 988 989 val s2_troublem = !s2_exception && 990 !s2_mmio && 991 !s2_prf && 992 !s2_in.delayedLoadError 993 994 io.dcache.resp.ready := true.B 995 val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_in.delayedLoadError || s2_mmio || s2_prf) 996 assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost") 997 998 // fast replay require 999 val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail)) 1000 val s2_nuke_fast_rep = !s2_mq_nack && 1001 !s2_dcache_miss && 1002 !s2_bank_conflict && 1003 !s2_wpu_pred_fail && 1004 !s2_rar_nack && 1005 !s2_raw_nack && 1006 s2_nuke 1007 1008 val s2_fast_rep = !s2_mem_amb && 1009 !s2_tlb_miss && 1010 !s2_fwd_fail && 1011 (s2_dcache_fast_rep || s2_nuke_fast_rep) && 1012 s2_troublem 1013 1014 // need allocate new entry 1015 val s2_can_query = !s2_mem_amb && 1016 !s2_tlb_miss && 1017 !s2_fwd_fail && 1018 s2_troublem 1019 1020 val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error) 1021 1022 // ld-ld violation require 1023 io.lsq.ldld_nuke_query.req.valid := s2_valid && s2_can_query 1024 io.lsq.ldld_nuke_query.req.bits.uop := s2_in.uop 1025 io.lsq.ldld_nuke_query.req.bits.mask := s2_in.mask 1026 io.lsq.ldld_nuke_query.req.bits.paddr := s2_in.paddr 1027 io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss) 1028 1029 // st-ld violation require 1030 io.lsq.stld_nuke_query.req.valid := s2_valid && s2_can_query 1031 io.lsq.stld_nuke_query.req.bits.uop := s2_in.uop 1032 io.lsq.stld_nuke_query.req.bits.mask := s2_in.mask 1033 io.lsq.stld_nuke_query.req.bits.paddr := s2_in.paddr 1034 io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss) 1035 1036 // merge forward result 1037 // lsq has higher priority than sbuffer 1038 val s2_fwd_mask = Wire(Vec((VLEN/8), Bool())) 1039 val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W))) 1040 s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.lsq.forward.dataInvalid && !io.vec_forward.dataInvalid 1041 // generate XLEN/8 Muxs 1042 for (i <- 0 until VLEN / 8) { 1043 s2_fwd_mask(i) := io.lsq.forward.forwardMask(i) || io.sbuffer.forwardMask(i) || io.vec_forward.forwardMask(i) 1044 s2_fwd_data(i) := Mux( 1045 io.lsq.forward.forwardMask(i), 1046 io.lsq.forward.forwardData(i), 1047 Mux( 1048 io.vec_forward.forwardMask(i), 1049 io.vec_forward.forwardData(i), 1050 io.sbuffer.forwardData(i) 1051 ) 1052 ) 1053 } 1054 1055 XSDebug(s2_fire, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 1056 s2_in.uop.pc, 1057 io.lsq.forward.forwardData.asUInt, io.lsq.forward.forwardMask.asUInt, 1058 s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt 1059 ) 1060 1061 // 1062 s2_out := s2_in 1063 s2_out.data := 0.U // data will be generated in load s3 1064 s2_out.uop.fpWen := s2_in.uop.fpWen && !s2_exception 1065 s2_out.mmio := s2_mmio 1066 s2_out.uop.flushPipe := false.B 1067 s2_out.uop.exceptionVec := s2_exception_vec 1068 s2_out.forwardMask := s2_fwd_mask 1069 s2_out.forwardData := s2_fwd_data 1070 s2_out.handledByMSHR := s2_cache_handled 1071 s2_out.miss := s2_dcache_miss && s2_troublem 1072 s2_out.feedbacked := io.feedback_fast.valid 1073 1074 // Generate replay signal caused by: 1075 // * st-ld violation check 1076 // * tlb miss 1077 // * dcache replay 1078 // * forward data invalid 1079 // * dcache miss 1080 s2_out.rep_info.mem_amb := s2_mem_amb && s2_troublem 1081 s2_out.rep_info.tlb_miss := s2_tlb_miss && s2_troublem 1082 s2_out.rep_info.fwd_fail := s2_fwd_fail && s2_troublem 1083 s2_out.rep_info.dcache_rep := s2_mq_nack && s2_troublem 1084 s2_out.rep_info.dcache_miss := s2_dcache_miss && s2_troublem 1085 s2_out.rep_info.bank_conflict := s2_bank_conflict && s2_troublem 1086 s2_out.rep_info.wpu_fail := s2_wpu_pred_fail && s2_troublem 1087 s2_out.rep_info.rar_nack := s2_rar_nack && s2_troublem 1088 s2_out.rep_info.raw_nack := s2_raw_nack && s2_troublem 1089 s2_out.rep_info.nuke := s2_nuke && s2_troublem 1090 s2_out.rep_info.full_fwd := s2_data_fwded 1091 s2_out.rep_info.data_inv_sq_idx := Mux(io.vec_forward.dataInvalid, s2_out.uop.sqIdx, io.lsq.forward.dataInvalidSqIdx) 1092 s2_out.rep_info.addr_inv_sq_idx := Mux(io.vec_forward.addrInvalid, s2_out.uop.sqIdx, io.lsq.forward.addrInvalidSqIdx) 1093 s2_out.rep_info.rep_carry := io.dcache.resp.bits.replayCarry 1094 s2_out.rep_info.mshr_id := io.dcache.resp.bits.mshr_id 1095 s2_out.rep_info.last_beat := s2_in.paddr(log2Up(refillBytes)) 1096 s2_out.rep_info.debug := s2_in.uop.debugInfo 1097 s2_out.rep_info.tlb_id := io.tlb_hint.id 1098 s2_out.rep_info.tlb_full := io.tlb_hint.full 1099 1100 // if forward fail, replay this inst from fetch 1101 val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss 1102 // if ld-ld violation is detected, replay from this inst from fetch 1103 val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss 1104 1105 // to be removed 1106 io.feedback_fast.valid := false.B 1107 io.feedback_fast.bits.hit := false.B 1108 io.feedback_fast.bits.flushState := s2_in.ptwBack 1109 io.feedback_fast.bits.robIdx := s2_in.uop.robIdx 1110 io.feedback_fast.bits.sourceType := RSFeedbackType.lrqFull 1111 io.feedback_fast.bits.dataInvalidSqIdx := DontCare 1112 1113 io.ldCancel.ld1Cancel := false.B 1114 1115 // fast wakeup 1116 io.fast_uop.valid := RegNext( 1117 !io.dcache.s1_disable_fast_wakeup && 1118 s1_valid && 1119 !s1_kill && 1120 !io.tlb.resp.bits.miss && 1121 !io.lsq.forward.dataInvalidFast 1122 ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_mmio) && !s2_isvec 1123 io.fast_uop.bits := RegNext(s1_out.uop) 1124 1125 // 1126 io.s2_ptr_chasing := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire) 1127 1128 // RegNext prefetch train for better timing 1129 // ** Now, prefetch train is valid at load s3 ** 1130 io.prefetch_train.valid := RegNext(s2_valid && !s2_actually_mmio && !s2_in.tlbMiss) 1131 io.prefetch_train.bits.fromLsPipelineBundle(s2_in, latch = true) 1132 io.prefetch_train.bits.miss := RegNext(io.dcache.resp.bits.miss) // TODO: use trace with bank conflict? 1133 io.prefetch_train.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch) 1134 io.prefetch_train.bits.meta_access := RegNext(io.dcache.resp.bits.meta_access) 1135 1136 io.prefetch_train_l1.valid := RegNext(s2_valid && !s2_actually_mmio) 1137 io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in, latch = true) 1138 io.prefetch_train_l1.bits.miss := RegNext(io.dcache.resp.bits.miss) 1139 io.prefetch_train_l1.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch) 1140 io.prefetch_train_l1.bits.meta_access := RegNext(io.dcache.resp.bits.meta_access) 1141 if (env.FPGAPlatform){ 1142 io.dcache.s0_pc := DontCare 1143 io.dcache.s1_pc := DontCare 1144 io.dcache.s2_pc := DontCare 1145 }else{ 1146 io.dcache.s0_pc := s0_out.uop.pc 1147 io.dcache.s1_pc := s1_out.uop.pc 1148 io.dcache.s2_pc := s2_out.uop.pc 1149 } 1150 io.dcache.s2_kill := s2_pmp.ld || s2_actually_mmio || s2_kill 1151 1152 val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready 1153 val s2_ld_valid_dup = RegInit(0.U(6.W)) 1154 s2_ld_valid_dup := 0x0.U(6.W) 1155 when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) } 1156 when (s1_kill || s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x0.U(6.W) } 1157 assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch))) 1158 1159 // Pipeline 1160 // -------------------------------------------------------------------------------- 1161 // stage 3 1162 // -------------------------------------------------------------------------------- 1163 // writeback and update load queue 1164 val s3_valid = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect)) 1165 val s3_in = RegEnable(s2_out, s2_fire) 1166 val s3_out = Wire(Valid(new MemExuOutput)) 1167 val s3_dcache_rep = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire) 1168 val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire) 1169 val s3_fast_rep = Wire(Bool()) 1170 val s3_troublem = RegNext(s2_troublem) 1171 val s3_kill = s3_in.uop.robIdx.needFlush(io.redirect) 1172 val s3_vecout = Wire(new OnlyVecExuOutput) 1173 val s3_vecActive = RegEnable(s2_out.vecActive, true.B, s2_fire) 1174 val s3_isvec = RegEnable(s2_out.isvec, false.B, s2_fire) 1175 val s3_vec_alignedType = RegEnable(s2_vec_alignedType, s2_fire) 1176 val s3_mmio = Wire(chiselTypeOf(io.lsq.uncache)) 1177 s3_ready := !s3_valid || s3_kill || io.ldout.ready 1178 s3_mmio.valid := RegNextN(io.lsq.uncache.valid, 3, Some(false.B)) 1179 s3_mmio.ready := RegNextN(io.lsq.uncache.ready, 3, Some(false.B)) 1180 s3_mmio.bits := RegNextN(io.lsq.uncache.bits, 3) 1181 1182 // forwrad last beat 1183 val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr) 1184 val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid) 1185 val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid && s3_in.handledByMSHR) 1186 val s3_fast_rep_canceled = io.replay.valid && io.replay.bits.forward_tlDchannel || !io.dcache.req.ready && !s3_isvec 1187 1188 // s3 load fast replay 1189 io.fast_rep_out.valid := s3_valid && s3_fast_rep && !s3_in.uop.robIdx.needFlush(io.redirect) && !s3_isvec 1190 io.fast_rep_out.bits := s3_in 1191 1192 io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || s3_fast_rep_canceled) && !s3_in.feedbacked 1193 // TODO: check this --by hx 1194 // io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || !io.fast_rep_out.ready) && !s3_in.feedbacked && !s3_in.lateKill 1195 io.lsq.ldin.bits := s3_in 1196 io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid 1197 1198 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 1199 io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools 1200 io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated 1201 io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated) 1202 1203 val s3_dly_ld_err = 1204 if (EnableAccurateLoadError) { 1205 io.dcache.resp.bits.error_delayed && RegNext(io.csrCtrl.cache_error_enable) && s3_troublem 1206 } else { 1207 WireInit(false.B) 1208 } 1209 io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid 1210 io.lsq.ldin.bits.dcacheRequireReplay := s3_dcache_rep 1211 io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err 1212 1213 val s3_vp_match_fail = RegNext(io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid) && s3_troublem 1214 val s3_rep_frm_fetch = s3_vp_match_fail 1215 val s3_ldld_rep_inst = 1216 io.lsq.ldld_nuke_query.resp.valid && 1217 io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch && 1218 RegNext(io.csrCtrl.ldld_vio_check_enable) 1219 val s3_flushPipe = s3_ldld_rep_inst 1220 1221 val s3_rep_info = WireInit(s3_in.rep_info) 1222 s3_rep_info.dcache_miss := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid 1223 val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt) 1224 1225 val s3_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, LduCfg).asUInt.orR && s3_vecActive 1226 when (s3_exception || s3_dly_ld_err || s3_rep_frm_fetch) { 1227 io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType) 1228 } .otherwise { 1229 io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools) 1230 } 1231 1232 // Int load, if hit, will be writebacked at s3 1233 s3_out.valid := s3_valid && !io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio 1234 s3_out.bits.uop := s3_in.uop 1235 s3_out.bits.uop.exceptionVec(loadAccessFault) := (s3_dly_ld_err || s3_in.uop.exceptionVec(loadAccessFault)) && s3_vecActive 1236 s3_out.bits.uop.flushPipe := false.B 1237 s3_out.bits.uop.replayInst := s3_rep_frm_fetch || s3_flushPipe 1238 s3_out.bits.data := s3_in.data 1239 s3_out.bits.debug.isMMIO := s3_in.mmio 1240 s3_out.bits.debug.isPerfCnt := false.B 1241 s3_out.bits.debug.paddr := s3_in.paddr 1242 s3_out.bits.debug.vaddr := s3_in.vaddr 1243 // Vector load 1244 s3_vecout.isvec := s3_isvec 1245 s3_vecout.vecdata := 0.U // Data will be assigned later 1246 s3_vecout.mask := s3_in.mask 1247 // s3_vecout.rob_idx_valid := s3_in.rob_idx_valid 1248 // s3_vecout.inner_idx := s3_in.inner_idx 1249 // s3_vecout.rob_idx := s3_in.rob_idx 1250 // s3_vecout.offset := s3_in.offset 1251 s3_vecout.reg_offset := s3_in.reg_offset 1252 s3_vecout.vecActive := s3_vecActive 1253 s3_vecout.is_first_ele := s3_in.is_first_ele 1254 s3_vecout.uopQueuePtr := DontCare // uopQueuePtr is already saved in flow queue 1255 s3_vecout.flowPtr := s3_in.flowPtr 1256 s3_vecout.elemIdx := DontCare // elemIdx is already saved in flow queue 1257 s3_vecout.elemIdxInsideVd := DontCare 1258 1259 io.rollback.valid := s3_valid && (s3_rep_frm_fetch || s3_flushPipe) && !s3_exception 1260 io.rollback.bits := DontCare 1261 io.rollback.bits.isRVC := s3_out.bits.uop.preDecodeInfo.isRVC 1262 io.rollback.bits.robIdx := s3_out.bits.uop.robIdx 1263 io.rollback.bits.ftqIdx := s3_out.bits.uop.ftqPtr 1264 io.rollback.bits.ftqOffset := s3_out.bits.uop.ftqOffset 1265 io.rollback.bits.level := Mux(s3_rep_frm_fetch, RedirectLevel.flush, RedirectLevel.flushAfter) 1266 io.rollback.bits.cfiUpdate.target := s3_out.bits.uop.pc 1267 io.rollback.bits.debug_runahead_checkpoint_id := s3_out.bits.uop.debugInfo.runahead_checkpoint_id 1268 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 1269 1270 io.lsq.ldin.bits.uop := s3_out.bits.uop 1271 1272 val s3_revoke = s3_exception || io.lsq.ldin.bits.rep_info.need_rep 1273 io.lsq.ldld_nuke_query.revoke := s3_revoke 1274 io.lsq.stld_nuke_query.revoke := s3_revoke 1275 1276 // feedback slow 1277 s3_fast_rep := RegNext(s2_fast_rep) 1278 1279 val s3_fb_no_waiting = !s3_in.isLoadReplay && 1280 (!(s3_fast_rep && !s3_fast_rep_canceled)) && 1281 !s3_in.feedbacked 1282 1283 // 1284 io.feedback_slow.valid := s3_valid && s3_fb_no_waiting 1285 io.feedback_slow.bits.hit := !s3_rep_info.need_rep || io.lsq.ldin.ready 1286 io.feedback_slow.bits.flushState := s3_in.ptwBack 1287 io.feedback_slow.bits.robIdx := s3_in.uop.robIdx 1288 io.feedback_slow.bits.sourceType := RSFeedbackType.lrqFull 1289 io.feedback_slow.bits.dataInvalidSqIdx := DontCare 1290 1291 io.ldCancel.ld2Cancel := s3_valid && ( 1292 io.lsq.ldin.bits.rep_info.need_rep || // exe fail or 1293 s3_in.mmio // is mmio 1294 ) 1295 1296 val s3_ld_wb_meta = Mux(s3_valid, s3_out.bits, s3_mmio.bits) 1297 1298 // data from load queue refill 1299 val s3_ld_raw_data_frm_uncache = RegNextN(io.lsq.ld_raw_data, 3) 1300 val s3_merged_data_frm_uncache = s3_ld_raw_data_frm_uncache.mergedData() 1301 val s3_picked_data_frm_uncache = LookupTree(s3_ld_raw_data_frm_uncache.addrOffset, List( 1302 "b000".U -> s3_merged_data_frm_uncache(63, 0), 1303 "b001".U -> s3_merged_data_frm_uncache(63, 8), 1304 "b010".U -> s3_merged_data_frm_uncache(63, 16), 1305 "b011".U -> s3_merged_data_frm_uncache(63, 24), 1306 "b100".U -> s3_merged_data_frm_uncache(63, 32), 1307 "b101".U -> s3_merged_data_frm_uncache(63, 40), 1308 "b110".U -> s3_merged_data_frm_uncache(63, 48), 1309 "b111".U -> s3_merged_data_frm_uncache(63, 56) 1310 )) 1311 val s3_ld_data_frm_uncache = rdataHelper(s3_ld_raw_data_frm_uncache.uop, s3_picked_data_frm_uncache) 1312 1313 // data from dcache hit 1314 val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle) 1315 s3_ld_raw_data_frm_cache.respDcacheData := io.dcache.resp.bits.data_delayed 1316 s3_ld_raw_data_frm_cache.forwardMask := RegEnable(s2_fwd_mask, s2_valid) 1317 s3_ld_raw_data_frm_cache.forwardData := RegEnable(s2_fwd_data, s2_valid) 1318 s3_ld_raw_data_frm_cache.uop := RegEnable(s2_out.uop, s2_valid) 1319 s3_ld_raw_data_frm_cache.addrOffset := RegEnable(s2_out.paddr(3, 0), s2_valid) 1320 s3_ld_raw_data_frm_cache.forward_D := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid 1321 s3_ld_raw_data_frm_cache.forwardData_D := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid)) 1322 s3_ld_raw_data_frm_cache.forward_mshr := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid) 1323 s3_ld_raw_data_frm_cache.forwardData_mshr := RegEnable(s2_fwd_data_frm_mshr, s2_valid) 1324 s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid) 1325 1326 val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData() 1327 val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List( 1328 "b0000".U -> s3_merged_data_frm_cache(63, 0), 1329 "b0001".U -> s3_merged_data_frm_cache(63, 8), 1330 "b0010".U -> s3_merged_data_frm_cache(63, 16), 1331 "b0011".U -> s3_merged_data_frm_cache(63, 24), 1332 "b0100".U -> s3_merged_data_frm_cache(63, 32), 1333 "b0101".U -> s3_merged_data_frm_cache(63, 40), 1334 "b0110".U -> s3_merged_data_frm_cache(63, 48), 1335 "b0111".U -> s3_merged_data_frm_cache(63, 56), 1336 "b1000".U -> s3_merged_data_frm_cache(127, 64), 1337 "b1001".U -> s3_merged_data_frm_cache(127, 72), 1338 "b1010".U -> s3_merged_data_frm_cache(127, 80), 1339 "b1011".U -> s3_merged_data_frm_cache(127, 88), 1340 "b1100".U -> s3_merged_data_frm_cache(127, 96), 1341 "b1101".U -> s3_merged_data_frm_cache(127, 104), 1342 "b1110".U -> s3_merged_data_frm_cache(127, 112), 1343 "b1111".U -> s3_merged_data_frm_cache(127, 120) 1344 )) 1345 val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache) 1346 1347 // FIXME: add 1 cycle delay ? 1348 // io.lsq.uncache.ready := !s3_valid 1349 val s3_outexception = ExceptionNO.selectByFu(s3_out.bits.uop.exceptionVec, LduCfg).asUInt.orR && s3_vecActive 1350 io.ldout.bits := s3_ld_wb_meta 1351 io.ldout.bits.data := Mux(s3_valid, Mux(!s3_outexception, s3_ld_data_frm_cache, 0.U), s3_ld_data_frm_uncache) 1352 io.ldout.valid := (s3_out.valid || (s3_mmio.valid && !s3_valid)) && !s3_vecout.isvec 1353 1354 // TODO: check this --hx 1355 // io.ldout.valid := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) && !s3_vecout.isvec || 1356 // io.lsq.uncache.valid && !io.lsq.uncache.bits.uop.robIdx.needFlush(io.redirect) && !s3_out.valid && !io.lsq.uncache.bits.isVls 1357 // io.ldout.bits.data := Mux(s3_out.valid, s3_ld_data_frm_cache, s3_ld_data_frm_uncache) 1358 // io.ldout.valid := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) || 1359 // s3_mmio.valid && !s3_mmio.bits.uop.robIdx.needFlush(io.redirect) && !s3_out.valid 1360 1361 // s3 load fast replay 1362 io.fast_rep_out.valid := s3_valid && s3_fast_rep && !s3_isvec 1363 io.fast_rep_out.bits := s3_in 1364 io.fast_rep_out.bits.lateKill := s3_rep_frm_fetch 1365 1366 // vector output 1367 io.vecldout.bits.vec := s3_vecout 1368 // FIXME 1369 io.vecldout.bits.isPackage := DontCare 1370 io.vecldout.bits.packageNum := DontCare 1371 io.vecldout.bits.originAlignedType := DontCare 1372 io.vecldout.bits.alignedType := DontCare 1373 // TODO: VLSU, uncache data logic 1374 val vecdata = rdataVecHelper(s3_vec_alignedType, s3_picked_data_frm_cache) 1375 io.vecldout.bits.vec.vecdata := vecdata 1376 io.vecldout.bits.data := 0.U 1377 // io.vecldout.bits.fflags := s3_out.bits.fflags 1378 // io.vecldout.bits.redirectValid := s3_out.bits.redirectValid 1379 // io.vecldout.bits.redirect := s3_out.bits.redirect 1380 io.vecldout.bits.debug := s3_out.bits.debug 1381 io.vecldout.bits.uop := s3_out.bits.uop 1382 io.vecldout.valid := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) && s3_vecout.isvec || 1383 io.lsq.uncache.valid && !io.lsq.uncache.bits.uop.robIdx.needFlush(io.redirect) && !s3_out.valid && !io.lsq.uncache.bits.isVls 1384 1385 io.vecReplay.valid := s3_vecout.isvec && s3_valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) && 1386 io.lsq.ldin.bits.rep_info.need_rep 1387 io.vecReplay.bits := DontCare 1388 io.vecReplay.bits.uop := s3_in.uop 1389 io.vecReplay.bits.vaddr := s3_in.vaddr 1390 io.vecReplay.bits.paddr := s3_in.paddr 1391 io.vecReplay.bits.mask := s3_in.mask 1392 io.vecReplay.bits.isvec := true.B 1393 io.vecReplay.bits.uop_unit_stride_fof := s3_in.uop_unit_stride_fof 1394 io.vecReplay.bits.reg_offset := s3_in.reg_offset 1395 io.vecReplay.bits.vecActive := s3_in.vecActive 1396 io.vecReplay.bits.is_first_ele := s3_in.is_first_ele 1397 io.vecReplay.bits.flowPtr := s3_in.flowPtr 1398 1399 // fast load to load forward 1400 if (EnableLoadToLoadForward) { 1401 io.l2l_fwd_out.valid := s3_valid && !s3_in.mmio && !s3_rep_info.need_rep 1402 io.l2l_fwd_out.data := Mux(s3_in.vaddr(3), s3_merged_data_frm_cache(127, 64), s3_merged_data_frm_cache(63, 0)) 1403 io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err || // ecc delayed error 1404 s3_ldld_rep_inst || 1405 s3_rep_frm_fetch 1406 } else { 1407 io.l2l_fwd_out.valid := false.B 1408 io.l2l_fwd_out.data := DontCare 1409 io.l2l_fwd_out.dly_ld_err := DontCare 1410 } 1411 1412 // trigger 1413 val last_valid_data = RegNext(RegEnable(io.ldout.bits.data, io.ldout.fire)) 1414 val hit_ld_addr_trig_hit_vec = Wire(Vec(TriggerNum, Bool())) 1415 val lq_ld_addr_trig_hit_vec = io.lsq.trigger.lqLoadAddrTriggerHitVec 1416 (0 until TriggerNum).map{i => { 1417 val tdata2 = RegNext(io.trigger(i).tdata2) 1418 val matchType = RegNext(io.trigger(i).matchType) 1419 val tEnable = RegNext(io.trigger(i).tEnable) 1420 1421 hit_ld_addr_trig_hit_vec(i) := TriggerCmp(RegNext(s2_out.vaddr), tdata2, matchType, tEnable) 1422 io.trigger(i).addrHit := Mux(s3_out.valid, hit_ld_addr_trig_hit_vec(i), lq_ld_addr_trig_hit_vec(i)) 1423 }} 1424 io.lsq.trigger.hitLoadAddrTriggerHitVec := hit_ld_addr_trig_hit_vec 1425 1426 // s1 1427 io.debug_ls.s1_robIdx := s1_in.uop.robIdx.value 1428 io.debug_ls.s1_isLoadToLoadForward := s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled 1429 io.debug_ls.s1_isTlbFirstMiss := s1_fire && s1_tlb_miss && s1_in.isFirstIssue 1430 // s2 1431 io.debug_ls.s2_robIdx := s2_in.uop.robIdx.value 1432 io.debug_ls.s2_isBankConflict := s2_fire && (!s2_kill && s2_bank_conflict) 1433 io.debug_ls.s2_isDcacheFirstMiss := s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue 1434 io.debug_ls.s2_isForwardFail := s2_fire && s2_fwd_fail 1435 // s3 1436 io.debug_ls.s3_robIdx := s3_in.uop.robIdx.value 1437 io.debug_ls.s3_isReplayFast := s3_valid && s3_fast_rep && !s3_fast_rep_canceled 1438 io.debug_ls.s3_isReplayRS := RegNext(io.feedback_fast.valid && !io.feedback_fast.bits.hit) || (io.feedback_slow.valid && !io.feedback_slow.bits.hit) 1439 io.debug_ls.s3_isReplaySlow := io.lsq.ldin.valid && io.lsq.ldin.bits.rep_info.need_rep 1440 io.debug_ls.s3_isReplay := s3_valid && s3_rep_info.need_rep // include fast+slow+rs replay 1441 io.debug_ls.replayCause := s3_rep_info.cause 1442 io.debug_ls.replayCnt := 1.U 1443 1444 // Topdown 1445 io.lsTopdownInfo.s1.robIdx := s1_in.uop.robIdx.value 1446 io.lsTopdownInfo.s1.vaddr_valid := s1_valid && s1_in.hasROBEntry 1447 io.lsTopdownInfo.s1.vaddr_bits := s1_vaddr 1448 io.lsTopdownInfo.s2.robIdx := s2_in.uop.robIdx.value 1449 io.lsTopdownInfo.s2.paddr_valid := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss 1450 io.lsTopdownInfo.s2.paddr_bits := s2_in.paddr 1451 io.lsTopdownInfo.s2.first_real_miss := io.dcache.resp.bits.real_miss 1452 io.lsTopdownInfo.s2.cache_miss_en := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated 1453 1454 // perf cnt 1455 XSPerfAccumulate("s0_in_valid", io.ldin.valid) 1456 XSPerfAccumulate("s0_in_block", io.ldin.valid && !io.ldin.fire) 1457 XSPerfAccumulate("s0_in_fire_first_issue", s0_valid && s0_sel_src.isFirstIssue) 1458 XSPerfAccumulate("s0_lsq_fire_first_issue", io.replay.fire) 1459 XSPerfAccumulate("s0_ldu_fire_first_issue", io.ldin.fire && s0_sel_src.isFirstIssue) 1460 XSPerfAccumulate("s0_fast_replay_issue", io.fast_rep_in.fire) 1461 XSPerfAccumulate("s0_stall_out", s0_valid && !s0_can_go) 1462 XSPerfAccumulate("s0_stall_dcache", s0_valid && !io.dcache.req.ready) 1463 XSPerfAccumulate("s0_addr_spec_success", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12)) 1464 XSPerfAccumulate("s0_addr_spec_failed", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12)) 1465 XSPerfAccumulate("s0_addr_spec_success_once", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue) 1466 XSPerfAccumulate("s0_addr_spec_failed_once", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue) 1467 XSPerfAccumulate("s0_forward_tl_d_channel", s0_out.forward_tlDchannel) 1468 XSPerfAccumulate("s0_hardware_prefetch_fire", s0_fire && s0_hw_prf_select) 1469 XSPerfAccumulate("s0_software_prefetch_fire", s0_fire && s0_sel_src.prf && s0_int_iss_select) 1470 XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select) 1471 XSPerfAccumulate("s0_hardware_prefetch_total", io.prefetch_req.valid) 1472 1473 XSPerfAccumulate("s1_in_valid", s1_valid) 1474 XSPerfAccumulate("s1_in_fire", s1_fire) 1475 XSPerfAccumulate("s1_in_fire_first_issue", s1_fire && s1_in.isFirstIssue) 1476 XSPerfAccumulate("s1_tlb_miss", s1_fire && s1_tlb_miss) 1477 XSPerfAccumulate("s1_tlb_miss_first_issue", s1_fire && s1_tlb_miss && s1_in.isFirstIssue) 1478 XSPerfAccumulate("s1_stall_out", s1_valid && !s1_can_go) 1479 XSPerfAccumulate("s1_dly_err", s1_valid && s1_fast_rep_dly_err) 1480 1481 XSPerfAccumulate("s2_in_valid", s2_valid) 1482 XSPerfAccumulate("s2_in_fire", s2_fire) 1483 XSPerfAccumulate("s2_in_fire_first_issue", s2_fire && s2_in.isFirstIssue) 1484 XSPerfAccumulate("s2_dcache_miss", s2_fire && io.dcache.resp.bits.miss) 1485 XSPerfAccumulate("s2_dcache_miss_first_issue", s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue) 1486 XSPerfAccumulate("s2_dcache_real_miss_first_issue", s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue) 1487 XSPerfAccumulate("s2_full_forward", s2_fire && s2_full_fwd) 1488 XSPerfAccumulate("s2_dcache_miss_full_forward", s2_fire && s2_dcache_miss) 1489 XSPerfAccumulate("s2_fwd_frm_d_can", s2_valid && s2_fwd_frm_d_chan) 1490 XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr", s2_valid && s2_fwd_frm_d_chan_or_mshr) 1491 XSPerfAccumulate("s2_stall_out", s2_fire && !s2_can_go) 1492 XSPerfAccumulate("s2_prefetch", s2_fire && s2_prf) 1493 XSPerfAccumulate("s2_prefetch_ignored", s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict 1494 XSPerfAccumulate("s2_prefetch_miss", s2_fire && s2_prf && io.dcache.resp.bits.miss) // prefetch req miss in l1 1495 XSPerfAccumulate("s2_prefetch_hit", s2_fire && s2_prf && !io.dcache.resp.bits.miss) // prefetch req hit in l1 1496 XSPerfAccumulate("s2_prefetch_accept", s2_fire && s2_prf && io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it 1497 XSPerfAccumulate("s2_forward_req", s2_fire && s2_in.forward_tlDchannel) 1498 XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid) 1499 XSPerfAccumulate("s2_successfully_forward_mshr", s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid) 1500 1501 XSPerfAccumulate("s3_fwd_frm_d_chan", s3_valid && s3_fwd_frm_d_chan_valid) 1502 1503 XSPerfAccumulate("load_to_load_forward", s1_try_ptr_chasing && !s1_ptr_chasing_canceled) 1504 XSPerfAccumulate("load_to_load_forward_try", s1_try_ptr_chasing) 1505 XSPerfAccumulate("load_to_load_forward_fail", s1_cancel_ptr_chasing) 1506 XSPerfAccumulate("load_to_load_forward_fail_cancelled", s1_cancel_ptr_chasing && s1_ptr_chasing_canceled) 1507 XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match) 1508 XSPerfAccumulate("load_to_load_forward_fail_op_not_ld", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld) 1509 XSPerfAccumulate("load_to_load_forward_fail_addr_align", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned) 1510 XSPerfAccumulate("load_to_load_forward_fail_set_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch) 1511 1512 // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design 1513 // hardware performance counter 1514 val perfEvents = Seq( 1515 ("load_s0_in_fire ", s0_fire ), 1516 ("load_to_load_forward ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled ), 1517 ("stall_dcache ", s0_valid && s0_can_go && !io.dcache.req.ready ), 1518 ("load_s1_in_fire ", s0_fire ), 1519 ("load_s1_tlb_miss ", s1_fire && io.tlb.resp.bits.miss ), 1520 ("load_s2_in_fire ", s1_fire ), 1521 ("load_s2_dcache_miss ", s2_fire && io.dcache.resp.bits.miss ), 1522 ) 1523 generatePerfEvent() 1524 1525 when(io.ldout.fire){ 1526 XSDebug("ldout %x\n", io.ldout.bits.uop.pc) 1527 } 1528 // end 1529}