1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import utility._ 24import xiangshan.ExceptionNO._ 25import xiangshan._ 26import xiangshan.backend.fu.PMPRespBundle 27import xiangshan.backend.rob.{DebugLsInfoBundle, LsTopdownInfo, RobPtr} 28import xiangshan.cache._ 29import xiangshan.cache.wpu.ReplayCarry 30import xiangshan.cache.mmu._ 31import xiangshan.mem.mdp._ 32 33class LoadToLsqReplayIO(implicit p: Parameters) extends XSBundle 34 with HasDCacheParameters 35 with HasTlbConst 36{ 37 // mshr refill index 38 val mshr_id = UInt(log2Up(cfg.nMissEntries).W) 39 // get full data from store queue and sbuffer 40 val full_fwd = Bool() 41 // wait for data from store inst's store queue index 42 val data_inv_sq_idx = new SqPtr 43 // wait for address from store queue index 44 val addr_inv_sq_idx = new SqPtr 45 // replay carry 46 val rep_carry = new ReplayCarry(nWays) 47 // data in last beat 48 val last_beat = Bool() 49 // replay cause 50 val cause = Vec(LoadReplayCauses.allCauses, Bool()) 51 // performance debug information 52 val debug = new PerfDebugInfo 53 // tlb hint 54 val tlb_id = UInt(log2Up(loadfiltersize).W) 55 val tlb_full = Bool() 56 57 // alias 58 def mem_amb = cause(LoadReplayCauses.C_MA) 59 def tlb_miss = cause(LoadReplayCauses.C_TM) 60 def fwd_fail = cause(LoadReplayCauses.C_FF) 61 def dcache_rep = cause(LoadReplayCauses.C_DR) 62 def dcache_miss = cause(LoadReplayCauses.C_DM) 63 def wpu_fail = cause(LoadReplayCauses.C_WF) 64 def bank_conflict = cause(LoadReplayCauses.C_BC) 65 def rar_nack = cause(LoadReplayCauses.C_RAR) 66 def raw_nack = cause(LoadReplayCauses.C_RAW) 67 def nuke = cause(LoadReplayCauses.C_NK) 68 def need_rep = cause.asUInt.orR 69} 70 71 72class LoadToLsqIO(implicit p: Parameters) extends XSBundle { 73 val ldin = DecoupledIO(new LqWriteBundle) 74 val uncache = Flipped(DecoupledIO(new ExuOutput)) 75 val ld_raw_data = Input(new LoadDataFromLQBundle) 76 val forward = new PipeLoadForwardQueryIO 77 val stld_nuke_query = new LoadNukeQueryIO 78 val ldld_nuke_query = new LoadNukeQueryIO 79} 80 81class LoadToLoadIO(implicit p: Parameters) extends XSBundle { 82 val valid = Bool() 83 val data = UInt(XLEN.W) // load to load fast path is limited to ld (64 bit) used as vaddr src1 only 84 val dly_ld_err = Bool() 85} 86 87class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle { 88 val tdata2 = Input(UInt(64.W)) 89 val matchType = Input(UInt(2.W)) 90 val tEnable = Input(Bool()) // timing is calculated before this 91 val addrHit = Output(Bool()) 92 val lastDataHit = Output(Bool()) 93} 94 95class LoadUnit(implicit p: Parameters) extends XSModule 96 with HasLoadHelper 97 with HasPerfEvents 98 with HasDCacheParameters 99 with HasCircularQueuePtrHelper 100{ 101 val io = IO(new Bundle() { 102 // control 103 val redirect = Flipped(ValidIO(new Redirect)) 104 val csrCtrl = Flipped(new CustomCSRCtrlIO) 105 106 // int issue path 107 val ldin = Flipped(Decoupled(new ExuInput)) 108 val ldout = Decoupled(new ExuOutput) 109 val rsIdx = Input(UInt()) 110 val isFirstIssue = Input(Bool()) 111 112 // data path 113 val tlb = new TlbRequestIO(2) 114 val pmp = Flipped(new PMPRespBundle()) // arrive same to tlb now 115 val dcache = new DCacheLoadIO 116 val sbuffer = new LoadForwardQueryIO 117 val lsq = new LoadToLsqIO 118 val tl_d_channel = Input(new DcacheToLduForwardIO) 119 val forward_mshr = Flipped(new LduToMissqueueForwardIO) 120 // val refill = Flipped(ValidIO(new Refill)) 121 val l2_hint = Input(Valid(new L2ToL1Hint)) 122 val tlb_hint = Flipped(new TlbHintReq) 123 // fast wakeup 124 val fast_uop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1, send to RS in load_s2 125 126 // prefetch 127 val prefetch_train = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms 128 val prefetch_train_l1 = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride 129 val prefetch_req = Flipped(ValidIO(new L1PrefetchReq)) // hardware prefetch to l1 cache req 130 val canAcceptLowConfPrefetch = Output(Bool()) 131 val canAcceptHighConfPrefetch = Output(Bool()) 132 133 // load to load fast path 134 val l2l_fwd_in = Input(new LoadToLoadIO) 135 val l2l_fwd_out = Output(new LoadToLoadIO) 136 137 val ld_fast_match = Input(Bool()) 138 val ld_fast_fuOpType = Input(UInt()) 139 val ld_fast_imm = Input(UInt(12.W)) 140 141 // rs feedback 142 val feedback_fast = ValidIO(new RSFeedback) // stage 2 143 val feedback_slow = ValidIO(new RSFeedback) // stage 3 144 145 // load ecc error 146 val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different 147 148 // schedule error query 149 val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO))) 150 151 // queue-based replay 152 val replay = Flipped(Decoupled(new LsPipelineBundle)) 153 val lq_rep_full = Input(Bool()) 154 155 // misc 156 val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch 157 158 // Load fast replay path 159 val fast_rep_in = Flipped(Decoupled(new LqWriteBundle)) 160 val fast_rep_out = Decoupled(new LqWriteBundle) 161 162 // Load RAR rollback 163 val rollback = Valid(new Redirect) 164 165 // perf 166 val debug_ls = Output(new DebugLsInfoBundle) 167 val lsTopdownInfo = Output(new LsTopdownInfo) 168 val correctMissTrain = Input(Bool()) 169 }) 170 171 val s1_ready, s2_ready, s3_ready = WireInit(false.B) 172 173 // Pipeline 174 // -------------------------------------------------------------------------------- 175 // stage 0 176 // -------------------------------------------------------------------------------- 177 // generate addr, use addr to query DCache and DTLB 178 val s0_valid = Wire(Bool()) 179 val s0_kill = Wire(Bool()) 180 val s0_can_go = s1_ready 181 val s0_fire = s0_valid && s0_can_go 182 val s0_out = Wire(new LqWriteBundle) 183 184 // flow source bundle 185 class FlowSource extends Bundle { 186 val vaddr = UInt(VAddrBits.W) 187 val mask = UInt((VLEN/8).W) 188 val uop = new MicroOp 189 val try_l2l = Bool() 190 val has_rob_entry = Bool() 191 val rsIdx = UInt(log2Up(IssQueSize).W) 192 val rep_carry = new ReplayCarry(nWays) 193 val mshrid = UInt(log2Up(cfg.nMissEntries).W) 194 val isFirstIssue = Bool() 195 val fast_rep = Bool() 196 val ld_rep = Bool() 197 val l2l_fwd = Bool() 198 val prf = Bool() 199 val prf_rd = Bool() 200 val prf_wr = Bool() 201 val sched_idx = UInt(log2Up(LoadQueueReplaySize+1).W) 202 } 203 val s0_sel_src = Wire(new FlowSource) 204 205 // load flow select/gen 206 // src0: super load replayed by LSQ (cache miss replay) (io.replay) 207 // src1: fast load replay (io.fast_rep_in) 208 // src2: load replayed by LSQ (io.replay) 209 // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch) 210 // src4: int read / software prefetch first issue from RS (io.in) 211 // src5: vec read first issue from RS (TODO) 212 // src6: load try pointchaising when no issued or replayed load (io.fastpath) 213 // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch) 214 // priority: high to low 215 val s0_rep_stall = io.ldin.valid && isAfter(io.replay.bits.uop.robIdx, io.ldin.bits.uop.robIdx) 216 val s0_super_ld_rep_valid = io.replay.valid && io.replay.bits.forward_tlDchannel 217 val s0_ld_fast_rep_valid = io.fast_rep_in.valid 218 val s0_ld_rep_valid = io.replay.valid && !io.replay.bits.forward_tlDchannel && !s0_rep_stall 219 val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U 220 val s0_int_iss_valid = io.ldin.valid // int flow first issue or software prefetch 221 val s0_vec_iss_valid = WireInit(false.B) // TODO 222 val s0_l2l_fwd_valid = io.l2l_fwd_in.valid 223 val s0_low_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U 224 dontTouch(s0_super_ld_rep_valid) 225 dontTouch(s0_ld_fast_rep_valid) 226 dontTouch(s0_ld_rep_valid) 227 dontTouch(s0_high_conf_prf_valid) 228 dontTouch(s0_int_iss_valid) 229 dontTouch(s0_vec_iss_valid) 230 dontTouch(s0_l2l_fwd_valid) 231 dontTouch(s0_low_conf_prf_valid) 232 233 // load flow source ready 234 val s0_super_ld_rep_ready = WireInit(true.B) 235 val s0_ld_fast_rep_ready = !s0_super_ld_rep_valid 236 val s0_ld_rep_ready = !s0_super_ld_rep_valid && 237 !s0_ld_fast_rep_valid 238 val s0_high_conf_prf_ready = !s0_super_ld_rep_valid && 239 !s0_ld_fast_rep_valid && 240 !s0_ld_rep_valid 241 242 val s0_int_iss_ready = !s0_super_ld_rep_valid && 243 !s0_ld_fast_rep_valid && 244 !s0_ld_rep_valid && 245 !s0_high_conf_prf_valid 246 247 val s0_vec_iss_ready = !s0_super_ld_rep_valid && 248 !s0_ld_fast_rep_valid && 249 !s0_ld_rep_valid && 250 !s0_high_conf_prf_valid && 251 !s0_int_iss_valid 252 253 val s0_l2l_fwd_ready = !s0_super_ld_rep_valid && 254 !s0_ld_fast_rep_valid && 255 !s0_ld_rep_valid && 256 !s0_high_conf_prf_valid && 257 !s0_int_iss_valid && 258 !s0_vec_iss_valid 259 260 val s0_low_conf_prf_ready = !s0_super_ld_rep_valid && 261 !s0_ld_fast_rep_valid && 262 !s0_ld_rep_valid && 263 !s0_high_conf_prf_valid && 264 !s0_int_iss_valid && 265 !s0_vec_iss_valid && 266 !s0_l2l_fwd_valid 267 dontTouch(s0_super_ld_rep_ready) 268 dontTouch(s0_ld_fast_rep_ready) 269 dontTouch(s0_ld_rep_ready) 270 dontTouch(s0_high_conf_prf_ready) 271 dontTouch(s0_int_iss_ready) 272 dontTouch(s0_vec_iss_ready) 273 dontTouch(s0_l2l_fwd_ready) 274 dontTouch(s0_low_conf_prf_ready) 275 276 // load flow source select (OH) 277 val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready 278 val s0_ld_fast_rep_select = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready 279 val s0_ld_rep_select = s0_ld_rep_valid && s0_ld_rep_ready 280 val s0_hw_prf_select = s0_high_conf_prf_ready && s0_high_conf_prf_valid || 281 s0_low_conf_prf_ready && s0_low_conf_prf_valid 282 val s0_int_iss_select = s0_int_iss_ready && s0_int_iss_valid 283 val s0_vec_iss_select = s0_vec_iss_ready && s0_vec_iss_valid 284 val s0_l2l_fwd_select = s0_l2l_fwd_ready && s0_l2l_fwd_valid 285 assert(!s0_vec_iss_select) // to be added 286 dontTouch(s0_super_ld_rep_select) 287 dontTouch(s0_ld_fast_rep_select) 288 dontTouch(s0_ld_rep_select) 289 dontTouch(s0_hw_prf_select) 290 dontTouch(s0_int_iss_select) 291 dontTouch(s0_vec_iss_select) 292 dontTouch(s0_l2l_fwd_select) 293 294 s0_valid := (s0_super_ld_rep_valid || 295 s0_ld_fast_rep_valid || 296 s0_ld_rep_valid || 297 s0_high_conf_prf_valid || 298 s0_int_iss_valid || 299 s0_vec_iss_valid || 300 s0_l2l_fwd_valid || 301 s0_low_conf_prf_valid) && io.dcache.req.ready && !s0_kill 302 303 // which is S0's out is ready and dcache is ready 304 val s0_try_ptr_chasing = s0_l2l_fwd_select 305 val s0_do_try_ptr_chasing = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready 306 val s0_ptr_chasing_vaddr = io.l2l_fwd_in.data(5, 0) +& io.ld_fast_imm(5, 0) 307 val s0_ptr_chasing_canceled = WireInit(false.B) 308 s0_kill := s0_ptr_chasing_canceled 309 310 // prefetch related ctrl signal 311 io.canAcceptLowConfPrefetch := s0_low_conf_prf_ready 312 io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready 313 val isHlv = WireInit(LSUOpType.isHlv(s0_uop.ctrl.fuOpType)) 314 val isHlvx = WireInit(LSUOpType.isHlvx(s0_uop.ctrl.fuOpType)) 315 316 // query DTLB 317 io.tlb.req.valid := s0_valid 318 io.tlb.req.bits.cmd := Mux(s0_sel_src.prf, 319 Mux(s0_sel_src.prf_wr, TlbCmd.write, TlbCmd.read), 320 TlbCmd.read 321 ) 322 io.tlb.req.bits.vaddr := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_sel_src.vaddr) 323 io.tlb.req.bits.size := LSUOpType.size(s0_sel_src.uop.ctrl.fuOpType) 324 io.tlb.req.bits.kill := s0_kill 325 io.tlb.req.bits.memidx.is_ld := true.B 326 io.tlb.req.bits.memidx.is_st := false.B 327 io.tlb.req.bits.memidx.idx := s0_sel_src.uop.lqIdx.value 328 io.tlb.req.bits.debug.robIdx := s0_sel_src.uop.robIdx 329 io.tlb.req.bits.no_translate := s0_hw_prf_select // hw b.reqetch addr does not need to be translated 330 io.tlb.req.bits.debug.pc := s0_sel_src.uop.cf.pc 331 io.tlb.req.bits.debug.isFirstIssue := s0_sel_src.isFirstIssue 332 333 // query DCache 334 io.dcache.req.valid := s0_valid 335 io.dcache.req.bits.cmd := Mux(s0_sel_src.prf_rd, 336 MemoryOpConstants.M_PFR, 337 Mux(s0_sel_src.prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD) 338 ) 339 io.dcache.req.bits.vaddr := s0_sel_src.vaddr 340 io.dcache.req.bits.mask := s0_sel_src.mask 341 io.dcache.req.bits.data := DontCare 342 io.dcache.req.bits.isFirstIssue := s0_sel_src.isFirstIssue 343 io.dcache.req.bits.instrtype := Mux(s0_sel_src.prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U) 344 io.dcache.req.bits.debug_robIdx := s0_sel_src.uop.robIdx.value 345 io.dcache.req.bits.replayCarry := s0_sel_src.rep_carry 346 io.dcache.req.bits.id := DontCare // TODO: update cache meta 347 io.dcache.pf_source := Mux(s0_hw_prf_select, io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL) 348 io.dcache.req.bits.lqIdx := s0_sel_src.uop.lqIdx 349 // load flow priority mux 350 def fromNullSource(): FlowSource = { 351 val out = WireInit(0.U.asTypeOf(new FlowSource)) 352 out 353 } 354 355 def fromFastReplaySource(src: LqWriteBundle): FlowSource = { 356 val out = WireInit(0.U.asTypeOf(new FlowSource)) 357 out.vaddr := src.vaddr 358 out.mask := src.mask 359 out.uop := src.uop 360 out.try_l2l := false.B 361 out.has_rob_entry := src.hasROBEntry 362 out.rep_carry := src.rep_info.rep_carry 363 out.mshrid := src.rep_info.mshr_id 364 out.rsIdx := src.rsIdx 365 out.isFirstIssue := false.B 366 out.fast_rep := true.B 367 out.ld_rep := src.isLoadReplay 368 out.l2l_fwd := false.B 369 out.prf := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType) 370 out.prf_rd := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r 371 out.prf_wr := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w 372 out.sched_idx := src.schedIndex 373 out 374 } 375 376 def fromNormalReplaySource(src: LsPipelineBundle): FlowSource = { 377 val out = WireInit(0.U.asTypeOf(new FlowSource)) 378 out.vaddr := src.vaddr 379 out.mask := genVWmask(src.vaddr, src.uop.ctrl.fuOpType(1, 0)) 380 out.uop := src.uop 381 out.try_l2l := false.B 382 out.has_rob_entry := true.B 383 out.rsIdx := src.rsIdx 384 out.rep_carry := src.replayCarry 385 out.mshrid := src.mshrid 386 out.isFirstIssue := false.B 387 out.fast_rep := false.B 388 out.ld_rep := true.B 389 out.l2l_fwd := false.B 390 out.prf := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType) 391 out.prf_rd := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r 392 out.prf_wr := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w 393 out.sched_idx := src.schedIndex 394 out 395 s0_hlv := LSUOpType.isHlv(src.uop.ctrl.fuOpType) 396 s0_hlvx := LSUOpType.isHlvx(src.uop.ctrl.fuOpType) 397 } 398 399 def fromPrefetchSource(src: L1PrefetchReq): FlowSource = { 400 val out = WireInit(0.U.asTypeOf(new FlowSource)) 401 out.vaddr := src.getVaddr() 402 out.mask := 0.U 403 out.uop := DontCare 404 out.try_l2l := false.B 405 out.has_rob_entry := false.B 406 out.rsIdx := 0.U 407 out.rep_carry := 0.U.asTypeOf(out.rep_carry.cloneType) 408 out.mshrid := 0.U 409 out.isFirstIssue := false.B 410 out.fast_rep := false.B 411 out.ld_rep := false.B 412 out.l2l_fwd := false.B 413 out.prf := true.B 414 out.prf_rd := !src.is_store 415 out.prf_wr := src.is_store 416 out.sched_idx := 0.U 417 out 418 s0_hlv := false.B 419 s0_hlvx := false.B 420 } 421 422 def fromIntIssueSource(src: ExuInput): FlowSource = { 423 val out = WireInit(0.U.asTypeOf(new FlowSource)) 424 out.vaddr := src.src(0) + SignExt(src.uop.ctrl.imm(11, 0), VAddrBits) 425 out.mask := genVWmask(out.vaddr, src.uop.ctrl.fuOpType(1,0)) 426 out.uop := src.uop 427 out.try_l2l := false.B 428 out.has_rob_entry := true.B 429 out.rsIdx := io.rsIdx 430 out.rep_carry := 0.U.asTypeOf(out.rep_carry.cloneType) 431 out.mshrid := 0.U 432 out.isFirstIssue := true.B 433 out.fast_rep := false.B 434 out.ld_rep := false.B 435 out.l2l_fwd := false.B 436 out.prf := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType) 437 out.prf_rd := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r 438 out.prf_wr := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w 439 out.sched_idx := 0.U 440 out 441 s0_hlv := LSUOpType.isHlv(src.uop.ctrl.fuOpType) 442 s0_hlvx := LSUOpType.isHlvx(src.uop.ctrl.fuOpType) 443 } 444 445 def fromVecIssueSource(): FlowSource = { 446 val out = WireInit(0.U.asTypeOf(new FlowSource)) 447 out.vaddr := 0.U 448 out.mask := 0.U 449 out.uop := 0.U.asTypeOf(new MicroOp) 450 out.try_l2l := false.B 451 out.has_rob_entry := false.B 452 out.rsIdx := 0.U 453 out.rep_carry := 0.U.asTypeOf(out.rep_carry.cloneType) 454 out.mshrid := 0.U 455 out.isFirstIssue := false.B 456 out.fast_rep := false.B 457 out.ld_rep := false.B 458 out.l2l_fwd := false.B 459 out.prf := false.B 460 out.prf_rd := false.B 461 out.prf_wr := false.B 462 out.sched_idx := 0.U 463 out 464 s0_hlv := false.B 465 s0_hlvx := false.B 466 } 467 468 def fromLoadToLoadSource(src: LoadToLoadIO): FlowSource = { 469 val out = WireInit(0.U.asTypeOf(new FlowSource)) 470 out.vaddr := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0)) 471 out.mask := genVWmask(0.U, LSUOpType.ld) 472 // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding. 473 // Assume the pointer chasing is always ld. 474 out.uop.ctrl.fuOpType := LSUOpType.ld 475 out.try_l2l := true.B 476 // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx in S0 when trying pointchasing 477 // because these signals will be updated in S1 478 out.has_rob_entry := false.B 479 out.rsIdx := 0.U 480 out.mshrid := 0.U 481 out.rep_carry := 0.U.asTypeOf(out.rep_carry.cloneType) 482 out.isFirstIssue := true.B 483 out.fast_rep := false.B 484 out.ld_rep := false.B 485 out.l2l_fwd := true.B 486 out.prf := false.B 487 out.prf_rd := false.B 488 out.prf_wr := false.B 489 out.sched_idx := 0.U 490 out 491 s0_hlv := LSUOpType.isHlv(s0_uop.ctrl.fuOpType) 492 s0_hlvx := LSUOpType.isHlvx(s0_uop.ctrl.fuOpType) 493 } 494 495 // set default 496 val s0_src_selector = Seq( 497 s0_super_ld_rep_select, 498 s0_ld_fast_rep_select, 499 s0_ld_rep_select, 500 s0_hw_prf_select, 501 s0_int_iss_select, 502 s0_vec_iss_select, 503 (if (EnableLoadToLoadForward) s0_l2l_fwd_select else true.B) 504 ) 505 val s0_src_format = Seq( 506 fromNormalReplaySource(io.replay.bits), 507 fromFastReplaySource(io.fast_rep_in.bits), 508 fromNormalReplaySource(io.replay.bits), 509 fromPrefetchSource(io.prefetch_req.bits), 510 fromIntIssueSource(io.ldin.bits), 511 fromVecIssueSource(), 512 (if (EnableLoadToLoadForward) fromLoadToLoadSource(io.l2l_fwd_in) else fromNullSource()) 513 ) 514 s0_sel_src := ParallelPriorityMux(s0_src_selector, s0_src_format) 515 516 // address align check 517 val s0_addr_aligned = LookupTree(s0_sel_src.uop.ctrl.fuOpType(1, 0), List( 518 "b00".U -> true.B, //b 519 "b01".U -> (s0_sel_src.vaddr(0) === 0.U), //h 520 "b10".U -> (s0_sel_src.vaddr(1, 0) === 0.U), //w 521 "b11".U -> (s0_sel_src.vaddr(2, 0) === 0.U) //d 522 )) 523 524 // accept load flow if dcache ready (tlb is always ready) 525 // TODO: prefetch need writeback to loadQueueFlag 526 s0_out := DontCare 527 s0_out.rsIdx := s0_sel_src.rsIdx 528 s0_out.vaddr := s0_sel_src.vaddr 529 s0_out.mask := s0_sel_src.mask 530 s0_out.uop := s0_sel_src.uop 531 s0_out.isFirstIssue := s0_sel_src.isFirstIssue 532 s0_out.hasROBEntry := s0_sel_src.has_rob_entry 533 s0_out.isPrefetch := s0_sel_src.prf 534 s0_out.isHWPrefetch := s0_hw_prf_select 535 s0_out.isFastReplay := s0_sel_src.fast_rep 536 s0_out.isLoadReplay := s0_sel_src.ld_rep 537 s0_out.isFastPath := s0_sel_src.l2l_fwd 538 s0_out.mshrid := s0_sel_src.mshrid 539 s0_out.uop.cf.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned 540 s0_out.forward_tlDchannel := s0_super_ld_rep_select 541 when(io.tlb.req.valid && s0_sel_src.isFirstIssue) { 542 s0_out.uop.debugInfo.tlbFirstReqTime := GTimer() 543 }.otherwise{ 544 s0_out.uop.debugInfo.tlbFirstReqTime := s0_sel_src.uop.debugInfo.tlbFirstReqTime 545 } 546 s0_out.schedIndex := s0_sel_src.sched_idx 547 548 // load fast replay 549 io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready) 550 551 // load flow source ready 552 // cache missed load has highest priority 553 // always accept cache missed load flow from load replay queue 554 io.replay.ready := (s0_can_go && io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select)) 555 556 // accept load flow from rs when: 557 // 1) there is no lsq-replayed load 558 // 2) there is no fast replayed load 559 // 3) there is no high confidence prefetch request 560 io.ldin.ready := (s0_can_go && io.dcache.req.ready && s0_int_iss_ready) 561 562 // for hw prefetch load flow feedback, to be added later 563 // io.prefetch_in.ready := s0_hw_prf_select 564 565 // dcache replacement extra info 566 // TODO: should prefetch load update replacement? 567 io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.replay.bits.replacementUpdated, false.B) 568 569 XSDebug(io.dcache.req.fire, 570 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_sel_src.uop.cf.pc)}, vaddr ${Hexadecimal(s0_sel_src.vaddr)}\n" 571 ) 572 XSDebug(s0_valid, 573 p"S0: pc ${Hexadecimal(s0_out.uop.cf.pc)}, lId ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " + 574 p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n") 575 576 // Pipeline 577 // -------------------------------------------------------------------------------- 578 // stage 1 579 // -------------------------------------------------------------------------------- 580 // TLB resp (send paddr to dcache) 581 val s1_valid = RegInit(false.B) 582 val s1_in = Wire(new LqWriteBundle) 583 val s1_out = Wire(new LqWriteBundle) 584 val s1_kill = Wire(Bool()) 585 val s1_can_go = s2_ready 586 val s1_fire = s1_valid && !s1_kill && s1_can_go 587 588 s1_ready := !s1_valid || s1_kill || s2_ready 589 when (s0_fire) { s1_valid := true.B } 590 .elsewhen (s1_fire) { s1_valid := false.B } 591 .elsewhen (s1_kill) { s1_valid := false.B } 592 s1_in := RegEnable(s0_out, s0_fire) 593 594 val s1_fast_rep_dly_kill = RegNext(io.fast_rep_in.bits.lateKill) && s1_in.isFastReplay 595 val s1_fast_rep_dly_err = RegNext(io.fast_rep_in.bits.delayedLoadError) && s1_in.isFastReplay 596 val s1_l2l_fwd_dly_err = RegNext(io.l2l_fwd_in.dly_ld_err) && s1_in.isFastPath 597 val s1_dly_err = s1_fast_rep_dly_err || s1_l2l_fwd_dly_err 598 val s1_vaddr_hi = Wire(UInt()) 599 val s1_vaddr_lo = Wire(UInt()) 600 val s1_vaddr = Wire(UInt()) 601 val s1_paddr_dup_lsu = Wire(UInt()) 602 val s1_gpaddr_dup_lsu = Wire(UInt()) 603 val s1_paddr_dup_dcache = Wire(UInt()) 604 val s1_exception = ExceptionNO.selectByFu(s1_out.uop.cf.exceptionVec, lduCfg).asUInt.orR // af & pf exception were modified below. 605 val s1_tlb_miss = io.tlb.resp.bits.miss 606 val s1_prf = s1_in.isPrefetch 607 val s1_hw_prf = s1_in.isHWPrefetch 608 val s1_sw_prf = s1_prf && !s1_hw_prf 609 val s1_tlb_memidx = io.tlb.resp.bits.memidx 610 611 s1_vaddr_hi := s1_in.vaddr(VAddrBits - 1, 6) 612 s1_vaddr_lo := s1_in.vaddr(5, 0) 613 s1_vaddr := Cat(s1_vaddr_hi, s1_vaddr_lo) 614 s1_paddr_dup_lsu := io.tlb.resp.bits.paddr(0) 615 s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1) 616 s1_gpaddr_dup_lsu := io.tlb.resp.bits.gpaddr(0) 617 618 when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_in.uop.lqIdx.value) { 619 // printf("load idx = %d\n", s1_tlb_memidx.idx) 620 s1_out.uop.debugInfo.tlbRespTime := GTimer() 621 } 622 623 io.tlb.req_kill := s1_kill || s1_dly_err 624 io.tlb.resp.ready := true.B 625 626 io.dcache.s1_paddr_dup_lsu <> s1_paddr_dup_lsu 627 io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache 628 io.dcache.s1_kill := s1_kill || s1_dly_err || s1_tlb_miss || s1_exception 629 630 // store to load forwarding 631 io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf) 632 io.sbuffer.vaddr := s1_vaddr 633 io.sbuffer.paddr := s1_paddr_dup_lsu 634 io.sbuffer.gpaddr := s1_gpaddr_dup_lsu 635 io.sbuffer.gpaddr:= s1_gpaddr_dup_lsu 636 io.sbuffer.uop := s1_in.uop 637 io.sbuffer.sqIdx := s1_in.uop.sqIdx 638 io.sbuffer.mask := s1_in.mask 639 io.sbuffer.pc := s1_in.uop.cf.pc // FIXME: remove it 640 641 io.lsq.forward.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf) 642 io.lsq.forward.vaddr := s1_vaddr 643 io.lsq.forward.paddr := s1_paddr_dup_lsu 644 io.lsq.gpaddr := s1_gpaddr_dup_lsu 645 io.lsq.forward.uop := s1_in.uop 646 io.lsq.forward.sqIdx := s1_in.uop.sqIdx 647 io.lsq.forward.sqIdxMask := 0.U 648 io.lsq.forward.mask := s1_in.mask 649 io.lsq.forward.pc := s1_in.uop.cf.pc // FIXME: remove it 650 651 // st-ld violation query 652 val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => { 653 io.stld_nuke_query(w).valid && // query valid 654 isAfter(s1_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store 655 // TODO: Fix me when vector instruction 656 (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match 657 (s1_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain 658 })).asUInt.orR && !s1_tlb_miss 659 660 s1_out := s1_in 661 s1_out.vaddr := s1_vaddr 662 s1_out.paddr := s1_paddr_dup_lsu 663 s1_out.tlbMiss := s1_tlb_miss 664 s1_out.ptwBack := io.tlb.resp.bits.ptwBack 665 s1_out.rsIdx := s1_in.rsIdx 666 s1_out.rep_info.debug := s1_in.uop.debugInfo 667 s1_out.rep_info.nuke := s1_nuke && !s1_sw_prf 668 s1_out.delayedLoadError := s1_dly_err 669 670 when (!s1_dly_err) { 671 // current ori test will cause the case of ldest == 0, below will be modifeid in the future. 672 // af & pf exception were modified 673 s1_out.uop.cf.exceptionVec(loadPageFault) := io.tlb.resp.bits.excp(0).pf.ld && !s1_tlb_miss 674 s1_out.uop.cf.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld && !s1_tlb_miss 675 } .otherwise { 676 s1_out.uop.cf.exceptionVec(loadPageFault) := false.B 677 s1_out.uop.cf.exceptionVec(loadAddrMisaligned) := false.B 678 s1_out.uop.cf.exceptionVec(loadAccessFault) := s1_dly_err 679 } 680 681 // pointer chasing 682 val s1_try_ptr_chasing = RegNext(s0_do_try_ptr_chasing, false.B) 683 val s1_ptr_chasing_vaddr = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing) 684 val s1_fu_op_type_not_ld = WireInit(false.B) 685 val s1_not_fast_match = WireInit(false.B) 686 val s1_addr_mismatch = WireInit(false.B) 687 val s1_addr_misaligned = WireInit(false.B) 688 val s1_fast_mismatch = WireInit(false.B) 689 val s1_ptr_chasing_canceled = WireInit(false.B) 690 val s1_cancel_ptr_chasing = WireInit(false.B) 691 692 s1_kill := s1_fast_rep_dly_kill || 693 s1_cancel_ptr_chasing || 694 s1_in.uop.robIdx.needFlush(io.redirect) || 695 (s1_in.uop.robIdx.needFlush(RegNext(io.redirect)) && !RegNext(s0_try_ptr_chasing)) || 696 RegEnable(s0_kill, false.B, io.ldin.valid || io.replay.valid || io.l2l_fwd_in.valid || io.fast_rep_in.valid) 697 698 if (EnableLoadToLoadForward) { 699 // Sometimes, we need to cancel the load-load forwarding. 700 // These can be put at S0 if timing is bad at S1. 701 // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow) 702 s1_addr_mismatch := s1_ptr_chasing_vaddr(6) || 703 RegEnable(io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing) 704 // Case 1: the address is not 64-bit aligned or the fuOpType is not LD 705 s1_addr_misaligned := s1_ptr_chasing_vaddr(2, 0).orR 706 s1_fu_op_type_not_ld := io.ldin.bits.uop.ctrl.fuOpType =/= LSUOpType.ld 707 // Case 2: this load-load uop is cancelled 708 s1_ptr_chasing_canceled := !io.ldin.valid 709 // Case 3: fast mismatch 710 s1_fast_mismatch := RegEnable(!io.ld_fast_match, s0_do_try_ptr_chasing) 711 712 when (s1_try_ptr_chasing) { 713 s1_cancel_ptr_chasing := s1_addr_mismatch || 714 s1_addr_misaligned || 715 s1_fu_op_type_not_ld || 716 s1_ptr_chasing_canceled || 717 s1_fast_mismatch 718 719 s1_in.uop := io.ldin.bits.uop 720 s1_in.rsIdx := io.rsIdx 721 s1_in.isFirstIssue := io.isFirstIssue 722 s1_vaddr_lo := s1_ptr_chasing_vaddr(5, 0) 723 s1_paddr_dup_lsu := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo) 724 s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo) 725 726 // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb) 727 s1_in.uop.debugInfo.tlbFirstReqTime := GTimer() 728 s1_in.uop.debugInfo.tlbRespTime := GTimer() 729 } 730 when (!s1_cancel_ptr_chasing) { 731 s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.replay.fire && !io.fast_rep_in.fire 732 when (s1_try_ptr_chasing) { 733 io.ldin.ready := true.B 734 } 735 } 736 } 737 738 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 739 val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize)) 740 // to enable load-load, sqIdxMask must be calculated based on ldin.uop 741 // If the timing here is not OK, load-load forwarding has to be disabled. 742 // Or we calculate sqIdxMask at RS?? 743 io.lsq.forward.sqIdxMask := s1_sqIdx_mask 744 if (EnableLoadToLoadForward) { 745 when (s1_try_ptr_chasing) { 746 io.lsq.forward.sqIdxMask := UIntToMask(io.ldin.bits.uop.sqIdx.value, StoreQueueSize) 747 } 748 } 749 750 io.forward_mshr.valid := s1_valid && s1_out.forward_tlDchannel 751 io.forward_mshr.mshrid := s1_out.mshrid 752 io.forward_mshr.paddr := s1_out.paddr 753 754 XSDebug(s1_valid, 755 p"S1: pc ${Hexadecimal(s1_out.uop.cf.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " + 756 p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n") 757 758 // Pipeline 759 // -------------------------------------------------------------------------------- 760 // stage 2 761 // -------------------------------------------------------------------------------- 762 // s2: DCache resp 763 val s2_valid = RegInit(false.B) 764 val s2_in = Wire(new LqWriteBundle) 765 val s2_out = Wire(new LqWriteBundle) 766 val s2_kill = Wire(Bool()) 767 val s2_can_go = s3_ready 768 val s2_fire = s2_valid && !s2_kill && s2_can_go 769 770 s2_kill := s2_in.uop.robIdx.needFlush(io.redirect) 771 s2_ready := !s2_valid || s2_kill || s3_ready 772 when (s1_fire) { s2_valid := true.B } 773 .elsewhen (s2_fire) { s2_valid := false.B } 774 .elsewhen (s2_kill) { s2_valid := false.B } 775 s2_in := RegEnable(s1_out, s1_fire) 776 777 val s2_pmp = WireInit(io.pmp) 778 779 val s2_prf = s2_in.isPrefetch 780 val s2_hw_prf = s2_in.isHWPrefetch 781 782 // exception that may cause load addr to be invalid / illegal 783 // if such exception happen, that inst and its exception info 784 // will be force writebacked to rob 785 val s2_exception_vec = WireInit(s2_in.uop.cf.exceptionVec) 786 when (!s2_in.delayedLoadError) { 787 s2_exception_vec(loadAccessFault) := s2_in.uop.cf.exceptionVec(loadAccessFault) || s2_pmp.ld || 788 (io.dcache.resp.bits.tag_error && RegNext(io.csrCtrl.cache_error_enable)) 789 } 790 791 // soft prefetch will not trigger any exception (but ecc error interrupt may 792 // be triggered) 793 when (!s2_in.delayedLoadError && (s2_prf || s2_in.tlbMiss)) { 794 s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType) 795 } 796 val s2_exception = ExceptionNO.selectByFu(s2_exception_vec, lduCfg).asUInt.orR 797 798 val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr) 799 val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.forward_mshr.forward() 800 val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr) 801 802 // writeback access fault caused by ecc error / bus error 803 // * ecc data error is slow to generate, so we will not use it until load stage 3 804 // * in load stage 3, an extra signal io.load_error will be used to 805 val s2_actually_mmio = s2_pmp.mmio 806 val s2_mmio = !s2_prf && 807 s2_actually_mmio && 808 !s2_exception && 809 !s2_in.tlbMiss 810 811 val s2_full_fwd = Wire(Bool()) 812 val s2_mem_amb = s2_in.uop.cf.storeSetHit && 813 io.lsq.forward.addrInvalid 814 815 val s2_tlb_miss = s2_in.tlbMiss 816 val s2_fwd_fail = io.lsq.forward.dataInvalid 817 val s2_dcache_miss = io.dcache.resp.bits.miss && 818 !s2_fwd_frm_d_chan_or_mshr && 819 !s2_full_fwd 820 821 val s2_mq_nack = io.dcache.s2_mq_nack && 822 !s2_fwd_frm_d_chan_or_mshr && 823 !s2_full_fwd 824 825 val s2_bank_conflict = io.dcache.s2_bank_conflict && 826 !s2_fwd_frm_d_chan_or_mshr && 827 !s2_full_fwd 828 829 val s2_wpu_pred_fail = io.dcache.s2_wpu_pred_fail && 830 !s2_fwd_frm_d_chan_or_mshr && 831 !s2_full_fwd 832 833 val s2_rar_nack = io.lsq.ldld_nuke_query.req.valid && 834 !io.lsq.ldld_nuke_query.req.ready 835 836 val s2_raw_nack = io.lsq.stld_nuke_query.req.valid && 837 !io.lsq.stld_nuke_query.req.ready 838 // st-ld violation query 839 // NeedFastRecovery Valid when 840 // 1. Fast recovery query request Valid. 841 // 2. Load instruction is younger than requestors(store instructions). 842 // 3. Physical address match. 843 // 4. Data contains. 844 val s2_nuke = VecInit((0 until StorePipelineWidth).map(w => { 845 io.stld_nuke_query(w).valid && // query valid 846 isAfter(s2_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store 847 // TODO: Fix me when vector instruction 848 (s2_in.paddr(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match 849 (s2_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain 850 })).asUInt.orR && !s2_tlb_miss || s2_in.rep_info.nuke 851 852 val s2_cache_handled = io.dcache.resp.bits.handled 853 val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) && 854 io.dcache.resp.bits.tag_error 855 856 val s2_troublem = !s2_exception && 857 !s2_mmio && 858 !s2_prf && 859 !s2_in.delayedLoadError 860 861 io.dcache.resp.ready := true.B 862 val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_in.delayedLoadError || s2_mmio || s2_prf) 863 assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost") 864 865 // fast replay require 866 val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail)) 867 val s2_nuke_fast_rep = !s2_mq_nack && 868 !s2_dcache_miss && 869 !s2_bank_conflict && 870 !s2_wpu_pred_fail && 871 !s2_rar_nack && 872 !s2_raw_nack && 873 s2_nuke 874 875 val s2_fast_rep = !s2_mem_amb && 876 !s2_tlb_miss && 877 !s2_fwd_fail && 878 (s2_dcache_fast_rep || s2_nuke_fast_rep) && 879 s2_troublem 880 881 // need allocate new entry 882 val s2_can_query = !s2_mem_amb && 883 !s2_tlb_miss && 884 !s2_fwd_fail && 885 s2_troublem 886 887 val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error) 888 889 // ld-ld violation require 890 io.lsq.ldld_nuke_query.req.valid := s2_valid && s2_can_query 891 io.lsq.ldld_nuke_query.req.bits.uop := s2_in.uop 892 io.lsq.ldld_nuke_query.req.bits.mask := s2_in.mask 893 io.lsq.ldld_nuke_query.req.bits.paddr := s2_in.paddr 894 io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss) 895 896 // st-ld violation require 897 io.lsq.stld_nuke_query.req.valid := s2_valid && s2_can_query 898 io.lsq.stld_nuke_query.req.bits.uop := s2_in.uop 899 io.lsq.stld_nuke_query.req.bits.mask := s2_in.mask 900 io.lsq.stld_nuke_query.req.bits.paddr := s2_in.paddr 901 io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss) 902 903 // merge forward result 904 // lsq has higher priority than sbuffer 905 val s2_fwd_mask = Wire(Vec((VLEN/8), Bool())) 906 val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W))) 907 s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.lsq.forward.dataInvalid 908 // generate XLEN/8 Muxs 909 for (i <- 0 until VLEN / 8) { 910 s2_fwd_mask(i) := io.lsq.forward.forwardMask(i) || io.sbuffer.forwardMask(i) 911 s2_fwd_data(i) := Mux(io.lsq.forward.forwardMask(i), io.lsq.forward.forwardData(i), io.sbuffer.forwardData(i)) 912 } 913 914 XSDebug(s2_fire, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 915 s2_in.uop.cf.pc, 916 io.lsq.forward.forwardData.asUInt, io.lsq.forward.forwardMask.asUInt, 917 s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt 918 ) 919 920 // 921 s2_out := s2_in 922 s2_out.data := 0.U // data will be generated in load s3 923 s2_out.uop.ctrl.fpWen := s2_in.uop.ctrl.fpWen && !s2_exception 924 s2_out.mmio := s2_mmio 925 s2_out.uop.ctrl.flushPipe := false.B 926 s2_out.uop.cf.exceptionVec := s2_exception_vec 927 s2_out.forwardMask := s2_fwd_mask 928 s2_out.forwardData := s2_fwd_data 929 s2_out.handledByMSHR := s2_cache_handled 930 s2_out.miss := s2_dcache_miss && s2_troublem 931 s2_out.feedbacked := io.feedback_fast.valid 932 933 // Generate replay signal caused by: 934 // * st-ld violation check 935 // * tlb miss 936 // * dcache replay 937 // * forward data invalid 938 // * dcache miss 939 s2_out.rep_info.mem_amb := s2_mem_amb && s2_troublem 940 s2_out.rep_info.tlb_miss := s2_tlb_miss && s2_troublem 941 s2_out.rep_info.fwd_fail := s2_fwd_fail && s2_troublem 942 s2_out.rep_info.dcache_rep := s2_mq_nack && s2_troublem 943 s2_out.rep_info.dcache_miss := s2_dcache_miss && s2_troublem 944 s2_out.rep_info.bank_conflict := s2_bank_conflict && s2_troublem 945 s2_out.rep_info.wpu_fail := s2_wpu_pred_fail && s2_troublem 946 s2_out.rep_info.rar_nack := s2_rar_nack && s2_troublem 947 s2_out.rep_info.raw_nack := s2_raw_nack && s2_troublem 948 s2_out.rep_info.nuke := s2_nuke && s2_troublem 949 s2_out.rep_info.full_fwd := s2_data_fwded 950 s2_out.rep_info.data_inv_sq_idx := io.lsq.forward.dataInvalidSqIdx 951 s2_out.rep_info.addr_inv_sq_idx := io.lsq.forward.addrInvalidSqIdx 952 s2_out.rep_info.rep_carry := io.dcache.resp.bits.replayCarry 953 s2_out.rep_info.mshr_id := io.dcache.resp.bits.mshr_id 954 s2_out.rep_info.last_beat := s2_in.paddr(log2Up(refillBytes)) 955 s2_out.rep_info.debug := s2_in.uop.debugInfo 956 s2_out.rep_info.tlb_id := io.tlb_hint.id 957 s2_out.rep_info.tlb_full := io.tlb_hint.full 958 959 // if forward fail, replay this inst from fetch 960 val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss 961 // if ld-ld violation is detected, replay from this inst from fetch 962 val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss 963 // io.out.bits.uop.ctrl.replayInst := false.B 964 965 // to be removed 966 io.feedback_fast.valid := false.B 967 io.feedback_fast.bits.hit := false.B 968 io.feedback_fast.bits.flushState := s2_in.ptwBack 969 io.feedback_fast.bits.rsIdx := s2_in.rsIdx 970 io.feedback_fast.bits.sourceType := RSFeedbackType.lrqFull 971 io.feedback_fast.bits.dataInvalidSqIdx := DontCare 972 973 // fast wakeup 974 io.fast_uop.valid := RegNext( 975 !io.dcache.s1_disable_fast_wakeup && 976 s1_valid && 977 !s1_kill && 978 !io.tlb.resp.bits.miss && 979 !io.lsq.forward.dataInvalidFast 980 ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_mmio) 981 io.fast_uop.bits := RegNext(s1_out.uop) 982 983 // 984 io.s2_ptr_chasing := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire) 985 986 // RegNext prefetch train for better timing 987 // ** Now, prefetch train is valid at load s3 ** 988 io.prefetch_train.valid := RegNext(s2_valid && !s2_actually_mmio && !s2_in.tlbMiss) 989 io.prefetch_train.bits.fromLsPipelineBundle(s2_in, latch = true) 990 io.prefetch_train.bits.miss := RegNext(io.dcache.resp.bits.miss) // TODO: use trace with bank conflict? 991 io.prefetch_train.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch) 992 io.prefetch_train.bits.meta_access := RegNext(io.dcache.resp.bits.meta_access) 993 994 io.prefetch_train_l1.valid := RegNext(s2_valid && !s2_actually_mmio) 995 io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in, latch = true) 996 io.prefetch_train_l1.bits.miss := RegNext(io.dcache.resp.bits.miss) 997 io.prefetch_train_l1.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch) 998 io.prefetch_train_l1.bits.meta_access := RegNext(io.dcache.resp.bits.meta_access) 999 if (env.FPGAPlatform){ 1000 io.dcache.s0_pc := DontCare 1001 io.dcache.s1_pc := DontCare 1002 io.dcache.s2_pc := DontCare 1003 }else{ 1004 io.dcache.s0_pc := s0_out.uop.cf.pc 1005 io.dcache.s1_pc := s1_out.uop.cf.pc 1006 io.dcache.s2_pc := s2_out.uop.cf.pc 1007 } 1008 io.dcache.s2_kill := s2_pmp.ld || s2_actually_mmio || s2_kill 1009 1010 val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready 1011 val s2_ld_valid_dup = RegInit(0.U(6.W)) 1012 s2_ld_valid_dup := 0x0.U(6.W) 1013 when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) } 1014 when (s1_kill || s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x0.U(6.W) } 1015 assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch))) 1016 1017 // Pipeline 1018 // -------------------------------------------------------------------------------- 1019 // stage 3 1020 // -------------------------------------------------------------------------------- 1021 // writeback and update load queue 1022 val s3_valid = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect)) 1023 val s3_in = RegEnable(s2_out, s2_fire) 1024 val s3_out = Wire(Valid(new ExuOutput)) 1025 val s3_dcache_rep = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire) 1026 val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire) 1027 val s3_fast_rep = Wire(Bool()) 1028 val s3_troublem = RegNext(s2_troublem) 1029 val s3_kill = s3_in.uop.robIdx.needFlush(io.redirect) 1030 s3_ready := !s3_valid || s3_kill || io.ldout.ready 1031 1032 // forwrad last beat 1033 val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr) 1034 val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid) 1035 val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid) 1036 1037 val s3_fast_rep_canceled = io.replay.valid && io.replay.bits.forward_tlDchannel || !io.dcache.req.ready 1038 io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || s3_fast_rep_canceled) && !s3_in.feedbacked 1039 io.lsq.ldin.bits := s3_in 1040 io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid 1041 1042 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 1043 io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools 1044 io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated 1045 io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated) 1046 1047 val s3_dly_ld_err = 1048 if (EnableAccurateLoadError) { 1049 io.dcache.resp.bits.error_delayed && RegNext(io.csrCtrl.cache_error_enable) && s3_troublem 1050 } else { 1051 WireInit(false.B) 1052 } 1053 io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid 1054 io.lsq.ldin.bits.dcacheRequireReplay := s3_dcache_rep 1055 io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err 1056 1057 val s3_vp_match_fail = RegNext(io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid) && s3_troublem 1058 val s3_rep_frm_fetch = s3_vp_match_fail 1059 val s3_ldld_rep_inst = 1060 io.lsq.ldld_nuke_query.resp.valid && 1061 io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch && 1062 RegNext(io.csrCtrl.ldld_vio_check_enable) 1063 val s3_flushPipe = s3_ldld_rep_inst 1064 1065 val s3_rep_info = WireInit(s3_in.rep_info) 1066 s3_rep_info.dcache_miss := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid 1067 val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt) 1068 1069 val s3_exception = ExceptionNO.selectByFu(s3_in.uop.cf.exceptionVec, lduCfg).asUInt.orR 1070 when (s3_exception || s3_dly_ld_err || s3_rep_frm_fetch) { 1071 io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType) 1072 } .otherwise { 1073 io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools) 1074 } 1075 1076 // Int load, if hit, will be writebacked at s3 1077 s3_out.valid := s3_valid && !io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio 1078 s3_out.bits.uop := s3_in.uop 1079 s3_out.bits.uop.cf.exceptionVec(loadAccessFault) := s3_dly_ld_err || s3_in.uop.cf.exceptionVec(loadAccessFault) 1080 s3_out.bits.uop.ctrl.flushPipe := false.B 1081 s3_out.bits.uop.ctrl.replayInst := false.B 1082 s3_out.bits.data := s3_in.data 1083 s3_out.bits.redirectValid := false.B 1084 s3_out.bits.redirect := DontCare 1085 s3_out.bits.debug.isMMIO := s3_in.mmio 1086 s3_out.bits.debug.isPerfCnt := false.B 1087 s3_out.bits.debug.paddr := s3_in.paddr 1088 s3_out.bits.debug.vaddr := s3_in.vaddr 1089 s3_out.bits.fflags := DontCare 1090 1091 io.rollback.valid := s3_valid && (s3_rep_frm_fetch || s3_flushPipe) && !s3_exception 1092 io.rollback.bits := DontCare 1093 io.rollback.bits.isRVC := s3_out.bits.uop.cf.pd.isRVC 1094 io.rollback.bits.robIdx := s3_out.bits.uop.robIdx 1095 io.rollback.bits.ftqIdx := s3_out.bits.uop.cf.ftqPtr 1096 io.rollback.bits.ftqOffset := s3_out.bits.uop.cf.ftqOffset 1097 io.rollback.bits.level := Mux(s3_rep_frm_fetch, RedirectLevel.flush, RedirectLevel.flushAfter) 1098 io.rollback.bits.cfiUpdate.target := s3_out.bits.uop.cf.pc 1099 io.rollback.bits.debug_runahead_checkpoint_id := s3_out.bits.uop.debugInfo.runahead_checkpoint_id 1100 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 1101 1102 io.lsq.ldin.bits.uop := s3_out.bits.uop 1103 1104 val s3_revoke = s3_exception || io.lsq.ldin.bits.rep_info.need_rep 1105 io.lsq.ldld_nuke_query.revoke := s3_revoke 1106 io.lsq.stld_nuke_query.revoke := s3_revoke 1107 1108 // feedback slow 1109 s3_fast_rep := RegNext(s2_fast_rep) 1110 1111 val s3_fb_no_waiting = !s3_in.isLoadReplay && 1112 (!(s3_fast_rep && !s3_fast_rep_canceled)) && 1113 !s3_in.feedbacked 1114 1115 // 1116 io.feedback_slow.valid := s3_valid && s3_fb_no_waiting 1117 io.feedback_slow.bits.hit := !s3_rep_info.need_rep || io.lsq.ldin.ready 1118 io.feedback_slow.bits.flushState := s3_in.ptwBack 1119 io.feedback_slow.bits.rsIdx := s3_in.rsIdx 1120 io.feedback_slow.bits.sourceType := RSFeedbackType.lrqFull 1121 io.feedback_slow.bits.dataInvalidSqIdx := DontCare 1122 1123 val s3_ld_wb_meta = Mux(s3_valid, s3_out.bits, io.lsq.uncache.bits) 1124 1125 // data from load queue refill 1126 val s3_ld_raw_data_frm_uncache = io.lsq.ld_raw_data 1127 val s3_merged_data_frm_uncache = s3_ld_raw_data_frm_uncache.mergedData() 1128 val s3_picked_data_frm_uncache = LookupTree(s3_ld_raw_data_frm_uncache.addrOffset, List( 1129 "b000".U -> s3_merged_data_frm_uncache(63, 0), 1130 "b001".U -> s3_merged_data_frm_uncache(63, 8), 1131 "b010".U -> s3_merged_data_frm_uncache(63, 16), 1132 "b011".U -> s3_merged_data_frm_uncache(63, 24), 1133 "b100".U -> s3_merged_data_frm_uncache(63, 32), 1134 "b101".U -> s3_merged_data_frm_uncache(63, 40), 1135 "b110".U -> s3_merged_data_frm_uncache(63, 48), 1136 "b111".U -> s3_merged_data_frm_uncache(63, 56) 1137 )) 1138 val s3_ld_data_frm_uncache = rdataHelper(s3_ld_raw_data_frm_uncache.uop, s3_picked_data_frm_uncache) 1139 1140 // data from dcache hit 1141 val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle) 1142 s3_ld_raw_data_frm_cache.respDcacheData := io.dcache.resp.bits.data_delayed 1143 s3_ld_raw_data_frm_cache.forwardMask := RegEnable(s2_fwd_mask, s2_valid) 1144 s3_ld_raw_data_frm_cache.forwardData := RegEnable(s2_fwd_data, s2_valid) 1145 s3_ld_raw_data_frm_cache.uop := RegEnable(s2_out.uop, s2_valid) 1146 s3_ld_raw_data_frm_cache.addrOffset := RegEnable(s2_out.paddr(3, 0), s2_valid) 1147 s3_ld_raw_data_frm_cache.forward_D := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid 1148 s3_ld_raw_data_frm_cache.forwardData_D := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid)) 1149 s3_ld_raw_data_frm_cache.forward_mshr := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid) 1150 s3_ld_raw_data_frm_cache.forwardData_mshr := RegEnable(s2_fwd_data_frm_mshr, s2_valid) 1151 s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid) 1152 1153 val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData() 1154 val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List( 1155 "b0000".U -> s3_merged_data_frm_cache(63, 0), 1156 "b0001".U -> s3_merged_data_frm_cache(63, 8), 1157 "b0010".U -> s3_merged_data_frm_cache(63, 16), 1158 "b0011".U -> s3_merged_data_frm_cache(63, 24), 1159 "b0100".U -> s3_merged_data_frm_cache(63, 32), 1160 "b0101".U -> s3_merged_data_frm_cache(63, 40), 1161 "b0110".U -> s3_merged_data_frm_cache(63, 48), 1162 "b0111".U -> s3_merged_data_frm_cache(63, 56), 1163 "b1000".U -> s3_merged_data_frm_cache(127, 64), 1164 "b1001".U -> s3_merged_data_frm_cache(127, 72), 1165 "b1010".U -> s3_merged_data_frm_cache(127, 80), 1166 "b1011".U -> s3_merged_data_frm_cache(127, 88), 1167 "b1100".U -> s3_merged_data_frm_cache(127, 96), 1168 "b1101".U -> s3_merged_data_frm_cache(127, 104), 1169 "b1110".U -> s3_merged_data_frm_cache(127, 112), 1170 "b1111".U -> s3_merged_data_frm_cache(127, 120) 1171 )) 1172 val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache) 1173 1174 // FIXME: add 1 cycle delay ? 1175 io.lsq.uncache.ready := !s3_valid 1176 io.ldout.bits := s3_ld_wb_meta 1177 io.ldout.bits.data := Mux(s3_valid, s3_ld_data_frm_cache, s3_ld_data_frm_uncache) 1178 io.ldout.valid := s3_out.valid || (io.lsq.uncache.valid && !s3_valid) 1179 1180 // s3 load fast replay 1181 io.fast_rep_out.valid := s3_valid && s3_fast_rep 1182 io.fast_rep_out.bits := s3_in 1183 io.fast_rep_out.bits.lateKill := s3_rep_frm_fetch 1184 1185 1186 // fast load to load forward 1187 if (EnableLoadToLoadForward) { 1188 io.l2l_fwd_out.valid := s3_valid && !s3_in.mmio && !s3_rep_info.need_rep 1189 io.l2l_fwd_out.data := Mux(s3_in.vaddr(3), s3_merged_data_frm_cache(127, 64), s3_merged_data_frm_cache(63, 0)) 1190 io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err || // ecc delayed error 1191 s3_ldld_rep_inst || 1192 s3_rep_frm_fetch 1193 } else { 1194 io.l2l_fwd_out.valid := false.B 1195 io.l2l_fwd_out.data := DontCare 1196 io.l2l_fwd_out.dly_ld_err := DontCare 1197 } 1198 1199 1200 // FIXME: please move this part to LoadQueueReplay 1201 io.debug_ls := DontCare 1202 1203 1204 // Topdown 1205 io.lsTopdownInfo.s1.robIdx := s1_in.uop.robIdx.value 1206 io.lsTopdownInfo.s1.vaddr_valid := s1_valid && s1_in.hasROBEntry 1207 io.lsTopdownInfo.s1.vaddr_bits := s1_vaddr 1208 io.lsTopdownInfo.s2.robIdx := s2_in.uop.robIdx.value 1209 io.lsTopdownInfo.s2.paddr_valid := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss 1210 io.lsTopdownInfo.s2.paddr_bits := s2_in.paddr 1211 io.lsTopdownInfo.s2.first_real_miss := io.dcache.resp.bits.real_miss 1212 io.lsTopdownInfo.s2.cache_miss_en := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated 1213 1214 // perf cnt 1215 XSPerfAccumulate("s0_in_valid", io.ldin.valid) 1216 XSPerfAccumulate("s0_in_block", io.ldin.valid && !io.ldin.fire) 1217 XSPerfAccumulate("s0_in_fire_first_issue", s0_valid && s0_sel_src.isFirstIssue) 1218 XSPerfAccumulate("s0_lsq_fire_first_issue", io.replay.fire) 1219 XSPerfAccumulate("s0_ldu_fire_first_issue", io.ldin.fire && s0_sel_src.isFirstIssue) 1220 XSPerfAccumulate("s0_fast_replay_issue", io.fast_rep_in.fire) 1221 XSPerfAccumulate("s0_stall_out", s0_valid && !s0_can_go) 1222 XSPerfAccumulate("s0_stall_dcache", s0_valid && !io.dcache.req.ready) 1223 XSPerfAccumulate("s0_addr_spec_success", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12)) 1224 XSPerfAccumulate("s0_addr_spec_failed", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12)) 1225 XSPerfAccumulate("s0_addr_spec_success_once", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue) 1226 XSPerfAccumulate("s0_addr_spec_failed_once", s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue) 1227 XSPerfAccumulate("s0_forward_tl_d_channel", s0_out.forward_tlDchannel) 1228 XSPerfAccumulate("s0_hardware_prefetch_fire", s0_fire && s0_hw_prf_select) 1229 XSPerfAccumulate("s0_software_prefetch_fire", s0_fire && s0_sel_src.prf && s0_int_iss_select) 1230 XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select) 1231 XSPerfAccumulate("s0_hardware_prefetch_total", io.prefetch_req.valid) 1232 1233 XSPerfAccumulate("s1_in_valid", s1_valid) 1234 XSPerfAccumulate("s1_in_fire", s1_fire) 1235 XSPerfAccumulate("s1_in_fire_first_issue", s1_fire && s1_in.isFirstIssue) 1236 XSPerfAccumulate("s1_tlb_miss", s1_fire && s1_tlb_miss) 1237 XSPerfAccumulate("s1_tlb_miss_first_issue", s1_fire && s1_tlb_miss && s1_in.isFirstIssue) 1238 XSPerfAccumulate("s1_stall_out", s1_valid && !s1_can_go) 1239 XSPerfAccumulate("s1_dly_err", s1_valid && s1_fast_rep_dly_err) 1240 1241 XSPerfAccumulate("s2_in_valid", s2_valid) 1242 XSPerfAccumulate("s2_in_fire", s2_fire) 1243 XSPerfAccumulate("s2_in_fire_first_issue", s2_fire && s2_in.isFirstIssue) 1244 XSPerfAccumulate("s2_dcache_miss", s2_fire && io.dcache.resp.bits.miss) 1245 XSPerfAccumulate("s2_dcache_miss_first_issue", s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue) 1246 XSPerfAccumulate("s2_dcache_real_miss_first_issue", s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue) 1247 XSPerfAccumulate("s2_full_forward", s2_fire && s2_full_fwd) 1248 XSPerfAccumulate("s2_dcache_miss_full_forward", s2_fire && s2_dcache_miss) 1249 XSPerfAccumulate("s2_fwd_frm_d_can", s2_valid && s2_fwd_frm_d_chan) 1250 XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr", s2_valid && s2_fwd_frm_d_chan_or_mshr) 1251 XSPerfAccumulate("s2_stall_out", s2_fire && !s2_can_go) 1252 XSPerfAccumulate("s2_prefetch", s2_fire && s2_prf) 1253 XSPerfAccumulate("s2_prefetch_ignored", s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict 1254 XSPerfAccumulate("s2_prefetch_miss", s2_fire && s2_prf && io.dcache.resp.bits.miss) // prefetch req miss in l1 1255 XSPerfAccumulate("s2_prefetch_hit", s2_fire && s2_prf && !io.dcache.resp.bits.miss) // prefetch req hit in l1 1256 XSPerfAccumulate("s2_prefetch_accept", s2_fire && s2_prf && io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it 1257 XSPerfAccumulate("s2_forward_req", s2_fire && s2_in.forward_tlDchannel) 1258 XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid) 1259 XSPerfAccumulate("s2_successfully_forward_mshr", s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid) 1260 1261 XSPerfAccumulate("s3_fwd_frm_d_chan", s3_valid && s3_fwd_frm_d_chan_valid) 1262 1263 XSPerfAccumulate("load_to_load_forward", s1_try_ptr_chasing && !s1_ptr_chasing_canceled) 1264 XSPerfAccumulate("load_to_load_forward_try", s1_try_ptr_chasing) 1265 XSPerfAccumulate("load_to_load_forward_fail", s1_cancel_ptr_chasing) 1266 XSPerfAccumulate("load_to_load_forward_fail_cancelled", s1_cancel_ptr_chasing && s1_ptr_chasing_canceled) 1267 XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match) 1268 XSPerfAccumulate("load_to_load_forward_fail_op_not_ld", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld) 1269 XSPerfAccumulate("load_to_load_forward_fail_addr_align", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned) 1270 XSPerfAccumulate("load_to_load_forward_fail_set_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch) 1271 1272 // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design 1273 // hardware performance counter 1274 val perfEvents = Seq( 1275 ("load_s0_in_fire ", s0_fire ), 1276 ("load_to_load_forward ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled ), 1277 ("stall_dcache ", s0_valid && s0_can_go && !io.dcache.req.ready ), 1278 ("load_s1_in_fire ", s0_fire ), 1279 ("load_s1_tlb_miss ", s1_fire && io.tlb.resp.bits.miss ), 1280 ("load_s2_in_fire ", s1_fire ), 1281 ("load_s2_dcache_miss ", s2_fire && io.dcache.resp.bits.miss ), 1282 ) 1283 generatePerfEvent() 1284 1285 when(io.ldout.fire){ 1286 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 1287 } 1288 // end 1289}