xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision b8b991d636e3eae0d6cc2e36846166652699f0c2)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan.ExceptionNO._
25import xiangshan._
26import xiangshan.backend.fu.PMPRespBundle
27import xiangshan.backend.rob.{DebugLsInfoBundle, LsTopdownInfo, RobPtr}
28import xiangshan.cache._
29import xiangshan.cache.dcache.ReplayCarry
30import xiangshan.cache.mmu.{TlbCmd, TlbReq, TlbRequestIO, TlbResp}
31import xiangshan.mem.mdp._
32
33class LoadToLsqReplayIO(implicit p: Parameters) extends XSBundle with HasDCacheParameters {
34  // mshr refill index
35  val mshr_id         = UInt(log2Up(cfg.nMissEntries).W)
36  // get full data from store queue and sbuffer
37  val full_fwd        = Bool()
38  // wait for data from store inst's store queue index
39  val data_inv_sq_idx = new SqPtr
40  // wait for address from store queue index
41  val addr_inv_sq_idx = new SqPtr
42  // replay carry
43  val rep_carry       = new ReplayCarry
44  // data in last beat
45  val last_beat       = Bool()
46  // replay cause
47  val cause           = Vec(LoadReplayCauses.allCauses, Bool())
48  // performance debug information
49  val debug           = new PerfDebugInfo
50
51  // alias
52  def tlb_miss      = cause(LoadReplayCauses.C_TM)
53  def nuke          = cause(LoadReplayCauses.C_NK)
54  def mem_amb       = cause(LoadReplayCauses.C_MA)
55  def fwd_fail      = cause(LoadReplayCauses.C_FF)
56  def dcache_miss   = cause(LoadReplayCauses.C_DM)
57  def bank_conflict = cause(LoadReplayCauses.C_BC)
58  def dcache_rep    = cause(LoadReplayCauses.C_DR)
59  def rar_nack      = cause(LoadReplayCauses.C_RAR)
60  def raw_nack      = cause(LoadReplayCauses.C_RAW)
61  def need_rep      = cause.asUInt.orR
62}
63
64
65class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
66  val ldin            = DecoupledIO(new LqWriteBundle)
67  val uncache         = Flipped(DecoupledIO(new ExuOutput))
68  val ld_raw_data     = Input(new LoadDataFromLQBundle)
69  val forward         = new PipeLoadForwardQueryIO
70  val stld_nuke_query = new LoadNukeQueryIO
71  val ldld_nuke_query = new LoadNukeQueryIO
72  val trigger         = Flipped(new LqTriggerIO)
73}
74
75class LoadToLoadIO(implicit p: Parameters) extends XSBundle {
76  val valid      = Bool()
77  val data       = UInt(XLEN.W) // load to load fast path is limited to ld (64 bit) used as vaddr src1 only
78  val dly_ld_err = Bool()
79}
80
81class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle {
82  val tdata2      = Input(UInt(64.W))
83  val matchType   = Input(UInt(2.W))
84  val tEnable     = Input(Bool()) // timing is calculated before this
85  val addrHit     = Output(Bool())
86  val lastDataHit = Output(Bool())
87}
88
89class LoadUnit(implicit p: Parameters) extends XSModule
90  with HasLoadHelper
91  with HasPerfEvents
92  with HasDCacheParameters
93  with HasCircularQueuePtrHelper
94{
95  val io = IO(new Bundle() {
96    // control
97    val redirect      = Flipped(ValidIO(new Redirect))
98    val csrCtrl       = Flipped(new CustomCSRCtrlIO)
99
100    // int issue path
101    val ldin          = Flipped(Decoupled(new ExuInput))
102    val ldout         = Decoupled(new ExuOutput)
103    val rsIdx         = Input(UInt())
104    val isFirstIssue  = Input(Bool())
105
106    // data path
107    val tlb           = new TlbRequestIO(2)
108    val pmp           = Flipped(new PMPRespBundle()) // arrive same to tlb now
109    val dcache        = new DCacheLoadIO
110    val sbuffer       = new LoadForwardQueryIO
111    val lsq           = new LoadToLsqIO
112    val tl_d_channel  = Input(new DcacheToLduForwardIO)
113    val forward_mshr  = Flipped(new LduToMissqueueForwardIO)
114    val refill        = Flipped(ValidIO(new Refill))
115    val l2_hint       = Input(Valid(new L2ToL1Hint))
116
117    // fast wakeup
118    val fast_uop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1, send to RS in load_s2
119
120    // trigger
121    val trigger = Vec(3, new LoadUnitTriggerIO)
122
123    // prefetch
124    val prefetch_train = ValidIO(new LdPrefetchTrainBundle())  // provide prefetch info
125    val prefetch_req   = Flipped(ValidIO(new L1PrefetchReq))  // hardware prefetch to l1 cache req
126
127    // load to load fast path
128    val l2l_fwd_in    = Input(new LoadToLoadIO)
129    val l2l_fwd_out   = Output(new LoadToLoadIO)
130    val ld_fast_match = Input(Bool())
131    val ld_fast_imm   = Input(UInt(12.W))
132
133    // rs feedback
134    val feedback_fast = ValidIO(new RSFeedback) // stage 2
135    val feedback_slow = ValidIO(new RSFeedback) // stage 3
136
137    // load ecc error
138    val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different
139
140    // schedule error query
141    val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO)))
142
143    // queue-based replay
144    val replay       = Flipped(Decoupled(new LsPipelineBundle))
145    val lq_rep_full  = Input(Bool())
146
147    // misc
148    val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch
149
150    // Load fast replay path
151    val fast_rep_in  = Flipped(Decoupled(new LqWriteBundle))
152    val fast_rep_out = Decoupled(new LqWriteBundle)
153
154    // perf
155    val debug_ls      = Output(new DebugLsInfoBundle)
156    val lsTopdownInfo = Output(new LsTopdownInfo)
157  })
158
159  val s1_ready, s2_ready, s3_ready = WireInit(false.B)
160
161  // Pipeline
162  // --------------------------------------------------------------------------------
163  // stage 0
164  // --------------------------------------------------------------------------------
165  // generate addr, use addr to query DCache and DTLB
166  val s0_valid         = Wire(Bool())
167  val s0_kill          = Wire(Bool())
168  val s0_vaddr         = Wire(UInt(VAddrBits.W))
169  val s0_mask          = Wire(UInt(8.W))
170  val s0_uop           = Wire(new MicroOp)
171  val s0_has_rob_entry = Wire(Bool())
172  val s0_rsIdx         = Wire(UInt(log2Up(IssQueSize).W))
173  val s0_sqIdx         = Wire(new SqPtr)
174  val s0_mshrid        = Wire(UInt())
175  val s0_try_l2l       = Wire(Bool())
176  val s0_rep_carry     = Wire(new ReplayCarry)
177  val s0_isFirstIssue  = Wire(Bool())
178  val s0_fast_rep      = Wire(Bool())
179  val s0_ld_rep        = Wire(Bool())
180  val s0_l2l_fwd       = Wire(Bool())
181  val s0_sched_idx     = Wire(UInt())
182  val s0_can_go        = s1_ready
183  val s0_fire          = s0_valid && s0_can_go
184  val s0_out           = Wire(new LqWriteBundle)
185
186  // load flow select/gen
187  // src0: load replayed by LSQ (io.replay)
188  // src1: hardware prefetch from prefetchor (high confidence) (io.prefetch)
189  // src2: int read / software prefetch first issue from RS (io.in)
190  // src3: vec read first issue from RS (TODO)
191  // src4: load try pointchaising when no issued or replayed load (io.fastpath)
192  // src5: hardware prefetch from prefetchor (high confidence) (io.prefetch)
193  // priority: high to low
194  val s0_rep_stall           = io.ldin.valid && isAfter(io.replay.bits.uop.robIdx, io.ldin.bits.uop.robIdx)
195  val s0_ld_fast_rep_valid   = io.fast_rep_in.valid
196  val s0_ld_rep_valid        = io.replay.valid && !s0_rep_stall
197  val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U
198  val s0_int_iss_valid       = io.ldin.valid // int flow first issue or software prefetch
199  val s0_vec_iss_valid       = WireInit(false.B) // TODO
200  val s0_l2l_fwd_valid       = io.l2l_fwd_in.valid
201  val s0_low_conf_prf_valid  = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U
202  dontTouch(s0_ld_fast_rep_valid)
203  dontTouch(s0_ld_rep_valid)
204  dontTouch(s0_high_conf_prf_valid)
205  dontTouch(s0_int_iss_valid)
206  dontTouch(s0_vec_iss_valid)
207  dontTouch(s0_l2l_fwd_valid)
208  dontTouch(s0_low_conf_prf_valid)
209
210  // load flow source ready
211  val s0_ld_fast_rep_ready   = WireInit(true.B)
212  val s0_ld_rep_ready        = !s0_ld_fast_rep_valid
213  val s0_high_conf_prf_ready = !s0_ld_fast_rep_valid &&
214                               !s0_ld_rep_valid
215
216  val s0_int_iss_ready       = !s0_ld_fast_rep_valid &&
217                               !s0_ld_rep_valid &&
218                               !s0_high_conf_prf_valid
219
220  val s0_vec_iss_ready       = !s0_ld_fast_rep_valid &&
221                               !s0_ld_rep_valid &&
222                               !s0_high_conf_prf_valid &&
223                               !s0_int_iss_valid
224
225  val s0_l2l_fwd_ready       = !s0_ld_fast_rep_valid &&
226                               !s0_ld_rep_valid &&
227                               !s0_high_conf_prf_valid &&
228                               !s0_int_iss_valid &&
229                               !s0_vec_iss_valid
230
231  val s0_low_conf_prf_ready  = !s0_ld_fast_rep_valid &&
232                               !s0_ld_rep_valid &&
233                               !s0_high_conf_prf_valid &&
234                               !s0_int_iss_valid &&
235                               !s0_vec_iss_valid &&
236                               !s0_l2l_fwd_valid
237  dontTouch(s0_ld_fast_rep_ready)
238  dontTouch(s0_ld_rep_ready)
239  dontTouch(s0_high_conf_prf_ready)
240  dontTouch(s0_int_iss_ready)
241  dontTouch(s0_vec_iss_ready)
242  dontTouch(s0_l2l_fwd_ready)
243  dontTouch(s0_low_conf_prf_ready)
244
245  // load flow source select (OH)
246  val s0_ld_fast_rep_select = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready
247  val s0_ld_rep_select      = s0_ld_rep_valid && s0_ld_rep_ready
248  val s0_hw_prf_select      = s0_high_conf_prf_ready && s0_high_conf_prf_valid ||
249                              s0_low_conf_prf_ready && s0_low_conf_prf_valid
250  val s0_int_iss_select     = s0_int_iss_ready && s0_int_iss_valid
251  val s0_vec_iss_select     = s0_vec_iss_ready && s0_vec_iss_valid
252  val s0_l2l_fwd_select     = s0_l2l_fwd_ready && s0_l2l_fwd_valid
253  assert(!s0_vec_iss_select) // to be added
254  dontTouch(s0_ld_fast_rep_select)
255  dontTouch(s0_ld_rep_select)
256  dontTouch(s0_hw_prf_select)
257  dontTouch(s0_int_iss_select)
258  dontTouch(s0_vec_iss_select)
259  dontTouch(s0_l2l_fwd_select)
260
261  s0_valid := (s0_ld_fast_rep_valid ||
262               s0_ld_rep_valid ||
263               s0_high_conf_prf_valid ||
264               s0_int_iss_valid ||
265               s0_vec_iss_valid ||
266               s0_l2l_fwd_valid ||
267               s0_low_conf_prf_valid) && io.dcache.req.ready && !s0_kill
268
269  // which is S0's out is ready and dcache is ready
270  val s0_try_ptr_chasing      = s0_l2l_fwd_select
271  val s0_do_try_ptr_chasing   = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready
272  val s0_ptr_chasing_vaddr    = io.l2l_fwd_in.data(5, 0) +& io.ld_fast_imm(5, 0)
273  val s0_ptr_chasing_canceled = WireInit(false.B)
274  s0_kill := s0_ptr_chasing_canceled || (s0_out.uop.robIdx.needFlush(io.redirect) && !s0_try_ptr_chasing)
275
276  // prefetch related ctrl signal
277  val s0_prf    = Wire(Bool())
278  val s0_prf_rd = Wire(Bool())
279  val s0_prf_wr = Wire(Bool())
280  val s0_hw_prf = s0_hw_prf_select
281
282  // query DTLB
283  io.tlb.req.valid                   := s0_valid
284  io.tlb.req.bits.cmd                := Mux(s0_prf,
285                                         Mux(s0_prf_wr, TlbCmd.write, TlbCmd.read),
286                                         TlbCmd.read
287                                       )
288  io.tlb.req.bits.vaddr              := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_vaddr)
289  io.tlb.req.bits.size               := LSUOpType.size(s0_uop.ctrl.fuOpType)
290  io.tlb.req.bits.kill               := s0_kill
291  io.tlb.req.bits.memidx.is_ld       := true.B
292  io.tlb.req.bits.memidx.is_st       := false.B
293  io.tlb.req.bits.memidx.idx         := s0_uop.lqIdx.value
294  io.tlb.req.bits.debug.robIdx       := s0_uop.robIdx
295  io.tlb.req.bits.no_translate       := s0_hw_prf_select  // hw b.reqetch addr does not need to be translated
296  io.tlb.req.bits.debug.pc           := s0_uop.cf.pc
297  io.tlb.req.bits.debug.isFirstIssue := s0_isFirstIssue
298
299  // query DCache
300  io.dcache.req.valid             := s0_valid
301  io.dcache.req.bits.cmd          := Mux(s0_prf_rd,
302                                      MemoryOpConstants.M_PFR,
303                                      Mux(s0_prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD)
304                                    )
305  io.dcache.req.bits.vaddr        := s0_vaddr
306  io.dcache.req.bits.mask         := s0_mask
307  io.dcache.req.bits.data         := DontCare
308  io.dcache.req.bits.isFirstIssue := s0_isFirstIssue
309  io.dcache.req.bits.instrtype    := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U)
310  io.dcache.req.bits.debug_robIdx := s0_uop.robIdx.value
311  io.dcache.req.bits.replayCarry  := s0_rep_carry
312  io.dcache.req.bits.id           := DontCare // TODO: update cache meta
313
314  // load flow priority mux
315  def fromNullSource() = {
316    s0_vaddr         := 0.U
317    s0_mask          := 0.U
318    s0_uop           := 0.U.asTypeOf(new MicroOp)
319    s0_try_l2l       := false.B
320    s0_has_rob_entry := false.B
321    s0_sqIdx         := 0.U.asTypeOf(new SqPtr)
322    s0_rsIdx         := 0.U
323    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
324    s0_mshrid        := 0.U
325    s0_isFirstIssue  := false.B
326    s0_fast_rep      := false.B
327    s0_ld_rep        := false.B
328    s0_l2l_fwd       := false.B
329    s0_prf           := false.B
330    s0_prf_rd        := false.B
331    s0_prf_wr        := false.B
332    s0_sched_idx     := 0.U
333  }
334
335  def fromFastReplaySource(src: LqWriteBundle) = {
336    s0_vaddr         := src.vaddr
337    s0_mask          := src.mask
338    s0_uop           := src.uop
339    s0_try_l2l       := false.B
340    s0_has_rob_entry := src.hasROBEntry
341    s0_sqIdx         := src.uop.sqIdx
342    s0_rep_carry     := src.rep_info.rep_carry
343    s0_mshrid        := src.rep_info.mshr_id
344    s0_rsIdx         := src.rsIdx
345    s0_isFirstIssue  := false.B
346    s0_fast_rep      := true.B
347    s0_ld_rep        := src.isLoadReplay
348    s0_l2l_fwd       := false.B
349    s0_prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
350    s0_prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
351    s0_prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
352    s0_sched_idx     := src.schedIndex
353  }
354
355  def fromNormalReplaySource(src: LsPipelineBundle) = {
356    s0_vaddr         := src.vaddr
357    s0_mask          := genWmask(src.vaddr, src.uop.ctrl.fuOpType(1, 0))
358    s0_uop           := src.uop
359    s0_try_l2l       := false.B
360    s0_has_rob_entry := true.B
361    s0_sqIdx         := src.uop.sqIdx
362    s0_rsIdx         := src.rsIdx
363    s0_rep_carry     := src.replayCarry
364    s0_mshrid        := src.mshrid
365    s0_isFirstIssue  := src.isFirstIssue
366    s0_fast_rep      := false.B
367    s0_ld_rep        := true.B
368    s0_l2l_fwd       := false.B
369    s0_prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
370    s0_prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
371    s0_prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
372    s0_sched_idx     := src.schedIndex
373  }
374
375  def fromPrefetchSource(src: L1PrefetchReq) = {
376    s0_vaddr         := src.getVaddr()
377    s0_mask          := 0.U
378    s0_uop           := DontCare
379    s0_try_l2l       := false.B
380    s0_has_rob_entry := false.B
381    s0_sqIdx         := DontCare
382    s0_rsIdx         := DontCare
383    s0_rep_carry     := DontCare
384    s0_mshrid        := DontCare
385    s0_isFirstIssue  := false.B
386    s0_fast_rep      := false.B
387    s0_ld_rep        := false.B
388    s0_l2l_fwd       := false.B
389    s0_prf           := true.B
390    s0_prf_rd        := !src.is_store
391    s0_prf_wr        := src.is_store
392    s0_sched_idx     := 0.U
393  }
394
395  def fromIntIssueSource(src: ExuInput) = {
396    s0_vaddr         := src.src(0) + SignExt(src.uop.ctrl.imm(11, 0), VAddrBits)
397    s0_mask          := genWmask(s0_vaddr, src.uop.ctrl.fuOpType(1,0))
398    s0_uop           := src.uop
399    s0_try_l2l       := false.B
400    s0_has_rob_entry := true.B
401    s0_sqIdx         := src.uop.sqIdx
402    s0_rsIdx         := io.rsIdx
403    s0_rep_carry     := DontCare
404    s0_mshrid        := DontCare
405    s0_isFirstIssue  := true.B
406    s0_fast_rep      := false.B
407    s0_ld_rep        := false.B
408    s0_l2l_fwd       := false.B
409    s0_prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
410    s0_prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
411    s0_prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
412    s0_sched_idx     := 0.U
413  }
414
415  def fromVecIssueSource() = {
416    s0_vaddr         := 0.U
417    s0_mask          := 0.U
418    s0_uop           := 0.U.asTypeOf(new MicroOp)
419    s0_try_l2l       := false.B
420    s0_has_rob_entry := false.B
421    s0_sqIdx         := 0.U.asTypeOf(new SqPtr)
422    s0_rsIdx         := 0.U
423    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
424    s0_mshrid        := 0.U
425    s0_isFirstIssue  := false.B
426    s0_fast_rep      := false.B
427    s0_ld_rep        := false.B
428    s0_l2l_fwd       := false.B
429    s0_prf           := false.B
430    s0_prf_rd        := false.B
431    s0_prf_wr        := false.B
432    s0_sched_idx     := 0.U
433  }
434
435  def fromLoadToLoadSource(src: LoadToLoadIO) = {
436    s0_vaddr              := Cat(io.l2l_fwd_in.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0))
437    s0_mask               := genWmask(0.U, LSUOpType.ld)
438    // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding.
439    // Assume the pointer chasing is always ld.
440    s0_uop.ctrl.fuOpType  := LSUOpType.ld
441    s0_try_l2l            := s0_l2l_fwd_select
442    // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx in S0 when trying pointchasing
443    // because these signals will be updated in S1
444    s0_has_rob_entry      := false.B
445    s0_sqIdx              := DontCare
446    s0_rsIdx              := DontCare
447    s0_mshrid             := DontCare
448    s0_rep_carry          := DontCare
449    s0_isFirstIssue       := true.B
450    s0_fast_rep           := false.B
451    s0_ld_rep             := false.B
452    s0_l2l_fwd            := true.B
453    s0_prf                := false.B
454    s0_prf_rd             := false.B
455    s0_prf_wr             := false.B
456    s0_sched_idx          := 0.U
457  }
458
459  // set default
460  s0_uop := DontCare
461  when (s0_ld_fast_rep_select)  { fromFastReplaySource(io.fast_rep_in.bits)  }
462  .elsewhen (s0_ld_rep_select)  { fromNormalReplaySource(io.replay.bits) }
463  .elsewhen (s0_hw_prf_select)  { fromPrefetchSource(io.prefetch_req.bits)   }
464  .elsewhen (s0_int_iss_select) { fromIntIssueSource(io.ldin.bits)           }
465  .elsewhen (s0_vec_iss_select) { fromVecIssueSource()                       }
466  .otherwise {
467    if (EnableLoadToLoadForward) {
468      fromLoadToLoadSource(io.l2l_fwd_in)
469    } else {
470      fromNullSource()
471    }
472  }
473
474  // address align check
475  val s0_addr_aligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List(
476    "b00".U   -> true.B,                   //b
477    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
478    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
479    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
480  ))
481
482  // accept load flow if dcache ready (tlb is always ready)
483  // TODO: prefetch need writeback to loadQueueFlag
484  s0_out               := DontCare
485  s0_out.rsIdx         := s0_rsIdx
486  s0_out.vaddr         := s0_vaddr
487  s0_out.mask          := s0_mask
488  s0_out.uop           := s0_uop
489  s0_out.isFirstIssue  := s0_isFirstIssue
490  s0_out.hasROBEntry   := s0_has_rob_entry
491  s0_out.isPrefetch    := s0_prf
492  s0_out.isHWPrefetch  := s0_hw_prf
493  s0_out.isFastReplay  := s0_fast_rep
494  s0_out.isLoadReplay  := s0_ld_rep
495  s0_out.isFastPath    := s0_l2l_fwd
496  s0_out.mshrid        := s0_mshrid
497  s0_out.uop.cf.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned
498  s0_out.forward_tlDchannel := io.replay.valid && io.replay.bits.forward_tlDchannel
499  when(io.tlb.req.valid && s0_isFirstIssue) {
500    s0_out.uop.debugInfo.tlbFirstReqTime := GTimer()
501  }.otherwise{
502    s0_out.uop.debugInfo.tlbFirstReqTime := s0_uop.debugInfo.tlbFirstReqTime
503  }
504  s0_out.schedIndex     := s0_sched_idx
505
506  // load fast replay
507  io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready)
508
509  // load flow source ready
510  // always accept load flow from load replay queue
511  // io.replay has highest priority
512  io.replay.ready := (s0_can_go && io.dcache.req.ready && s0_ld_rep_ready && !s0_rep_stall)
513
514  // accept load flow from rs when:
515  // 1) there is no lsq-replayed load
516  // 2) there is no high confidence prefetch request
517  io.ldin.ready := (s0_can_go && io.dcache.req.ready && s0_int_iss_ready)
518
519  // for hw prefetch load flow feedback, to be added later
520  // io.prefetch_in.ready := s0_hw_prf_select
521
522  // dcache replacement extra info
523  // TODO: should prefetch load update replacement?
524  io.dcache.replacementUpdated := Mux(s0_ld_rep_select, io.replay.bits.replacementUpdated, false.B)
525
526  XSDebug(io.dcache.req.fire,
527    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
528  )
529  XSDebug(s0_valid,
530    p"S0: pc ${Hexadecimal(s0_out.uop.cf.pc)}, lId ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " +
531    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
532
533  // Pipeline
534  // --------------------------------------------------------------------------------
535  // stage 1
536  // --------------------------------------------------------------------------------
537  // TLB resp (send paddr to dcache)
538  val s1_valid      = RegInit(false.B)
539  val s1_in         = Wire(new LqWriteBundle)
540  val s1_out        = Wire(new LqWriteBundle)
541  val s1_kill       = Wire(Bool())
542  val s1_can_go     = s2_ready
543  val s1_fire       = s1_valid && !s1_kill && s1_can_go
544
545  s1_ready := !s1_valid || s1_kill || s2_ready
546  when (s0_fire) { s1_valid := true.B }
547  .elsewhen (s1_fire) { s1_valid := false.B }
548  .elsewhen (s1_kill) { s1_valid := false.B }
549  s1_in   := RegEnable(s0_out, s0_fire)
550
551  val s1_fast_rep_kill = RegEnable(io.fast_rep_in.bits.delayedLoadError, s0_fire) && s1_in.isFastReplay
552  val s1_l2l_fwd_kill  = RegEnable(io.l2l_fwd_in.dly_ld_err, s0_fire) && s1_in.isFastPath
553  s1_kill := s1_l2l_fwd_kill ||
554             s1_in.uop.robIdx.needFlush(io.redirect) ||
555             RegEnable(s0_kill, false.B, io.ldin.valid || io.replay.valid || io.l2l_fwd_in.valid || io.fast_rep_in.valid)
556
557  val s1_vaddr_hi         = Wire(UInt())
558  val s1_vaddr_lo         = Wire(UInt())
559  val s1_vaddr            = Wire(UInt())
560  val s1_paddr_dup_lsu    = Wire(UInt())
561  val s1_paddr_dup_dcache = Wire(UInt())
562  val s1_exception        = ExceptionNO.selectByFu(s1_out.uop.cf.exceptionVec, lduCfg).asUInt.orR   // af & pf exception were modified below.
563  val s1_tlb_miss         = io.tlb.resp.bits.miss
564  val s1_prf              = s1_in.isPrefetch
565  val s1_hw_prf           = s1_in.isHWPrefetch
566  val s1_sw_prf           = s1_prf && !s1_hw_prf
567  val s1_tlb_memidx       = io.tlb.resp.bits.memidx
568
569  s1_vaddr_hi         := s1_in.vaddr(VAddrBits - 1, 6)
570  s1_vaddr_lo         := s1_in.vaddr(5, 0)
571  s1_vaddr            := Cat(s1_vaddr_hi, s1_vaddr_lo)
572  s1_paddr_dup_lsu    := io.tlb.resp.bits.paddr(0)
573  s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1)
574
575  when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_in.uop.lqIdx.value) {
576    // printf("load idx = %d\n", s1_tlb_memidx.idx)
577    s1_out.uop.debugInfo.tlbRespTime := GTimer()
578  }
579
580  io.tlb.req_kill := s1_kill || s1_fast_rep_kill
581  io.tlb.resp.ready := true.B
582
583  io.dcache.s1_paddr_dup_lsu    <> s1_paddr_dup_lsu
584  io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache
585  io.dcache.s1_kill             := s1_kill || s1_fast_rep_kill || s1_tlb_miss || s1_exception
586
587  // store to load forwarding
588  io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf)
589  io.sbuffer.vaddr := s1_vaddr
590  io.sbuffer.paddr := s1_paddr_dup_lsu
591  io.sbuffer.uop   := s1_in.uop
592  io.sbuffer.sqIdx := s1_in.uop.sqIdx
593  io.sbuffer.mask  := s1_in.mask
594  io.sbuffer.pc    := s1_in.uop.cf.pc // FIXME: remove it
595
596  io.lsq.forward.valid     := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf)
597  io.lsq.forward.vaddr     := s1_vaddr
598  io.lsq.forward.paddr     := s1_paddr_dup_lsu
599  io.lsq.forward.uop       := s1_in.uop
600  io.lsq.forward.sqIdx     := s1_in.uop.sqIdx
601  io.lsq.forward.sqIdxMask := DontCare
602  io.lsq.forward.mask      := s1_in.mask
603  io.lsq.forward.pc        := s1_in.uop.cf.pc // FIXME: remove it
604
605  // st-ld violation query
606  val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => {
607                       io.stld_nuke_query(w).valid && // query valid
608                       isAfter(s1_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
609                       (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
610                       (s1_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
611                      })).asUInt.orR && !s1_tlb_miss
612  // Generate forwardMaskFast to wake up insts earlier
613  val s1_fwd_mask_fast = ((~(io.lsq.forward.forwardMaskFast.asUInt | io.sbuffer.forwardMaskFast.asUInt)).asUInt & s1_in.mask) === 0.U
614
615  s1_out                  := s1_in
616  s1_out.vaddr            := s1_vaddr
617  s1_out.paddr            := s1_paddr_dup_lsu
618  s1_out.tlbMiss          := s1_tlb_miss
619  s1_out.ptwBack          := io.tlb.resp.bits.ptwBack
620  s1_out.rsIdx            := s1_in.rsIdx
621  s1_out.rep_info.debug   := s1_in.uop.debugInfo
622  s1_out.rep_info.nuke    := s1_nuke && !s1_sw_prf
623  s1_out.lateKill         := s1_fast_rep_kill
624  s1_out.delayedLoadError := s1_l2l_fwd_kill || s1_fast_rep_kill
625
626  when (!s1_fast_rep_kill) {
627    // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
628    // af & pf exception were modified
629    s1_out.uop.cf.exceptionVec(loadPageFault)   := io.tlb.resp.bits.excp(0).pf.ld
630    s1_out.uop.cf.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld
631  } .otherwise {
632    s1_out.uop.cf.exceptionVec(loadAddrMisaligned) := false.B
633    s1_out.uop.cf.exceptionVec(loadAccessFault)    := s1_fast_rep_kill
634  }
635
636  // pointer chasing
637  val s1_try_ptr_chasing       = RegNext(s0_do_try_ptr_chasing, false.B)
638  val s1_ptr_chasing_vaddr     = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing)
639  val s1_fu_op_type_not_ld     = WireInit(false.B)
640  val s1_not_fast_match        = WireInit(false.B)
641  val s1_addr_mismatch         = WireInit(false.B)
642  val s1_addr_misaligned       = WireInit(false.B)
643  val s1_ptr_chasing_canceled  = WireInit(false.B)
644  val s1_cancel_ptr_chasing    = WireInit(false.B)
645
646  if (EnableLoadToLoadForward) {
647    // Sometimes, we need to cancel the load-load forwarding.
648    // These can be put at S0 if timing is bad at S1.
649    // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow)
650    s1_addr_mismatch      := s1_ptr_chasing_vaddr(6) || RegEnable(io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing)
651    // Case 1: the address is not 64-bit aligned or the fuOpType is not LD
652    s1_addr_misaligned    := s1_ptr_chasing_vaddr(2, 0).orR
653    s1_fu_op_type_not_ld  := io.ldin.bits.uop.ctrl.fuOpType =/= LSUOpType.ld
654    // Case 2: this is not a valid load-load pair
655    s1_not_fast_match := RegEnable(!io.ld_fast_match, s0_try_ptr_chasing)
656    // Case 3: this load-load uop is cancelled
657    s1_ptr_chasing_canceled := !io.ldin.valid
658
659    when (s1_try_ptr_chasing) {
660      s1_cancel_ptr_chasing := s1_addr_mismatch || s1_addr_misaligned || s1_fu_op_type_not_ld || s1_not_fast_match || s1_ptr_chasing_canceled
661
662      s1_in.uop           := io.ldin.bits.uop
663      s1_in.rsIdx         := io.rsIdx
664      s1_in.isFirstIssue  := io.isFirstIssue
665      s1_vaddr_lo         := Cat(s1_ptr_chasing_vaddr(5, 3), 0.U(3.W))
666      s1_paddr_dup_lsu    := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_ptr_chasing_vaddr(5, 3), 0.U(3.W))
667      s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_ptr_chasing_vaddr(5, 3), 0.U(3.W))
668
669      // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb)
670      s1_in.uop.debugInfo.tlbFirstReqTime := GTimer()
671      s1_in.uop.debugInfo.tlbRespTime     := GTimer()
672    }
673    when (s1_cancel_ptr_chasing) {
674      s1_kill := true.B
675    }.otherwise {
676      s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.replay.fire && !io.fast_rep_in.fire
677      when (s1_try_ptr_chasing) {
678        io.ldin.ready := true.B
679      }
680    }
681  }
682
683  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
684  val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize))
685  // to enable load-load, sqIdxMask must be calculated based on ldin.uop
686  // If the timing here is not OK, load-load forwarding has to be disabled.
687  // Or we calculate sqIdxMask at RS??
688  io.lsq.forward.sqIdxMask := s1_sqIdx_mask
689  if (EnableLoadToLoadForward) {
690    when (s1_try_ptr_chasing) {
691      io.lsq.forward.sqIdxMask := UIntToMask(io.ldin.bits.uop.sqIdx.value, StoreQueueSize)
692    }
693  }
694
695  io.forward_mshr.valid  := s1_valid && s1_out.forward_tlDchannel
696  io.forward_mshr.mshrid := s1_out.mshrid
697  io.forward_mshr.paddr  := s1_out.paddr
698
699  XSDebug(s1_valid,
700    p"S1: pc ${Hexadecimal(s1_out.uop.cf.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
701    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
702
703  // Pipeline
704  // --------------------------------------------------------------------------------
705  // stage 2
706  // --------------------------------------------------------------------------------
707  // s2: DCache resp
708  val s2_valid  = RegInit(false.B)
709  val s2_kill   = Wire(Bool())
710  val s2_can_go = s3_ready
711  val s2_fire   = s2_valid && !s2_kill && s2_can_go
712  val s2_in     = Wire(new LqWriteBundle)
713  val s2_out    = Wire(new LqWriteBundle)
714
715  s2_kill := s2_in.uop.robIdx.needFlush(io.redirect)
716  s2_ready := !s2_valid || s2_kill || s3_ready
717  when (s1_fire) { s2_valid := true.B }
718  .elsewhen (s2_fire) { s2_valid := false.B }
719  .elsewhen (s2_kill) { s2_valid := false.B }
720  s2_in := RegEnable(s1_out, s1_fire)
721
722  val s2_pmp = WireInit(io.pmp)
723  val s2_static_pm = RegNext(io.tlb.resp.bits.static_pm)
724  when (s2_static_pm.valid) {
725    s2_pmp.ld    := false.B
726    s2_pmp.st    := false.B
727    s2_pmp.instr := false.B
728    s2_pmp.mmio  := s2_static_pm.bits
729  }
730  val s2_prf    = s2_in.isPrefetch
731  val s2_hw_prf = s2_in.isHWPrefetch
732
733  // exception that may cause load addr to be invalid / illegal
734  // if such exception happen, that inst and its exception info
735  // will be force writebacked to rob
736  val s2_exception_vec = WireInit(s2_in.uop.cf.exceptionVec)
737  when (!s2_in.lateKill) {
738    s2_exception_vec(loadAccessFault) := s2_in.uop.cf.exceptionVec(loadAccessFault) || s2_pmp.ld
739    // soft prefetch will not trigger any exception (but ecc error interrupt may be triggered)
740    when (s2_prf || s2_in.tlbMiss) {
741      s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
742    }
743  }
744  val s2_exception = ExceptionNO.selectByFu(s2_exception_vec, lduCfg).asUInt.orR
745
746  val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr)
747  val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.forward_mshr.forward()
748  val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr)
749  val s2_cache_hit = io.dcache.s2_hit || s2_fwd_frm_d_chan_or_mshr
750
751  // writeback access fault caused by ecc error / bus error
752  // * ecc data error is slow to generate, so we will not use it until load stage 3
753  // * in load stage 3, an extra signal io.load_error will be used to
754  val s2_actually_mmio   = s2_pmp.mmio
755  val s2_mmio            = !s2_prf && s2_actually_mmio && !s2_exception && !s2_in.tlbMiss
756  val s2_full_fwd        = Wire(Bool())
757  val s2_cache_miss      = io.dcache.resp.bits.miss && !s2_fwd_frm_d_chan_or_mshr
758  val s2_mq_nack         = io.dcache.s2_mq_nack
759  val s2_bank_conflict   = io.dcache.s2_bank_conflict && !io.dcache.resp.bits.miss && !s2_full_fwd
760  val s2_wpu_pred_fail   = io.dcache.s2_wpu_pred_fail
761  val s2_cache_rep       = s2_bank_conflict || s2_wpu_pred_fail
762  val s2_cache_handled   = io.dcache.resp.bits.handled
763  val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) && io.dcache.resp.bits.tag_error
764  val s2_fwd_fail        = io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid
765  val s2_mem_amb         = s2_in.uop.cf.storeSetHit && io.lsq.forward.addrInvalid && !s2_mmio && !s2_prf
766  val s2_data_inv        = io.lsq.forward.dataInvalid && !s2_exception
767  val s2_dcache_kill     = s2_pmp.ld || s2_pmp.mmio
768  val s2_troublem        = !s2_exception && !s2_mmio && !s2_prf && !s2_in.lateKill
769
770  io.dcache.resp.ready := true.B
771  val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_mmio || s2_prf)
772  assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost")
773
774  // st-ld violation query
775  //  NeedFastRecovery Valid when
776  //  1. Fast recovery query request Valid.
777  //  2. Load instruction is younger than requestors(store instructions).
778  //  3. Physical address match.
779  //  4. Data contains.
780  val s2_nuke = VecInit((0 until StorePipelineWidth).map(w => {
781                        io.stld_nuke_query(w).valid && // query valid
782                        isAfter(s2_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
783                        (s2_in.paddr(PAddrBits-1,3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
784                        (s2_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
785                      })).asUInt.orR || s2_in.rep_info.nuke
786
787  // fast replay require
788  val s2_fast_rep = (s2_nuke || (!s2_mem_amb && !s2_in.tlbMiss && s2_cache_rep)) && s2_troublem
789
790  // need allocate new entry
791  val s2_can_query = !s2_in.tlbMiss &&
792                     !s2_mem_amb &&
793                     !s2_fast_rep &&
794                     !s2_in.rep_info.mem_amb &&
795                     s2_troublem
796
797  val s2_data_fwded = s2_cache_miss && (s2_full_fwd || s2_cache_tag_error)
798
799  // ld-ld violation require
800  io.lsq.ldld_nuke_query.req.valid           := s2_valid && s2_can_query
801  io.lsq.ldld_nuke_query.req.bits.uop        := s2_in.uop
802  io.lsq.ldld_nuke_query.req.bits.mask       := s2_in.mask
803  io.lsq.ldld_nuke_query.req.bits.paddr      := s2_in.paddr
804  io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd, true.B, !s2_cache_miss) && !s2_cache_rep
805
806  // st-ld violation require
807  io.lsq.stld_nuke_query.req.valid           := s2_valid && s2_can_query
808  io.lsq.stld_nuke_query.req.bits.uop        := s2_in.uop
809  io.lsq.stld_nuke_query.req.bits.mask       := s2_in.mask
810  io.lsq.stld_nuke_query.req.bits.paddr      := s2_in.paddr
811  io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd, true.B, !s2_cache_miss) && !s2_cache_rep
812
813  val s2_rar_nack = io.lsq.ldld_nuke_query.req.valid && !io.lsq.ldld_nuke_query.req.ready
814  val s2_raw_nack = io.lsq.stld_nuke_query.req.valid && !io.lsq.stld_nuke_query.req.ready
815
816  // merge forward result
817  // lsq has higher priority than sbuffer
818  val s2_fwd_mask = Wire(Vec(8, Bool()))
819  val s2_fwd_data = Wire(Vec(8, UInt(8.W)))
820  s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.lsq.forward.dataInvalid
821  // generate XLEN/8 Muxs
822  for (i <- 0 until XLEN / 8) {
823    s2_fwd_mask(i) := io.lsq.forward.forwardMask(i) || io.sbuffer.forwardMask(i)
824    s2_fwd_data(i) := Mux(io.lsq.forward.forwardMask(i), io.lsq.forward.forwardData(i), io.sbuffer.forwardData(i))
825  }
826
827  XSDebug(s2_fire, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
828    s2_in.uop.cf.pc,
829    io.lsq.forward.forwardData.asUInt, io.lsq.forward.forwardMask.asUInt,
830    s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt
831  )
832
833  //
834  s2_out                     := s2_in
835  s2_out.data                := 0.U // data will be generated in load s3
836  s2_out.uop.ctrl.fpWen      := s2_in.uop.ctrl.fpWen && !s2_exception
837  s2_out.mmio                := s2_mmio
838  s2_out.uop.ctrl.flushPipe  := false.B // io.fast_uop.valid && s2_mmio
839  s2_out.uop.cf.exceptionVec := s2_exception_vec
840  s2_out.forwardMask         := s2_fwd_mask
841  s2_out.forwardData         := s2_fwd_data
842  s2_out.handledByMSHR       := s2_cache_handled
843  s2_out.miss                := s2_cache_miss && !s2_full_fwd && s2_troublem
844  s2_out.feedbacked          := io.feedback_fast.valid
845
846  // Generate replay signal caused by:
847  // * st-ld violation check
848  // * tlb miss
849  // * dcache replay
850  // * forward data invalid
851  // * dcache miss
852  s2_out.rep_info.tlb_miss        := s2_in.tlbMiss
853  s2_out.rep_info.mem_amb         := s2_mem_amb && s2_troublem
854  s2_out.rep_info.nuke            := s2_nuke && s2_troublem
855  s2_out.rep_info.fwd_fail        := s2_data_inv && s2_troublem
856  s2_out.rep_info.dcache_rep      := s2_cache_rep && s2_troublem
857  s2_out.rep_info.dcache_miss     := s2_out.miss
858  s2_out.rep_info.bank_conflict   := s2_bank_conflict && s2_troublem
859  s2_out.rep_info.rar_nack        := s2_rar_nack && s2_troublem
860  s2_out.rep_info.raw_nack        := s2_raw_nack && s2_troublem
861  s2_out.rep_info.full_fwd        := s2_data_fwded
862  s2_out.rep_info.data_inv_sq_idx := io.lsq.forward.dataInvalidSqIdx
863  s2_out.rep_info.addr_inv_sq_idx := io.lsq.forward.addrInvalidSqIdx
864  s2_out.rep_info.rep_carry       := io.dcache.resp.bits.replayCarry
865  s2_out.rep_info.mshr_id         := io.dcache.resp.bits.mshr_id
866  s2_out.rep_info.last_beat       := s2_in.paddr(log2Up(refillBytes))
867  s2_out.rep_info.debug           := s2_in.uop.debugInfo
868
869  // if forward fail, replay this inst from fetch
870  val debug_fwd_fail_rep = s2_fwd_fail && !s2_mmio && !s2_prf && !s2_in.tlbMiss
871  // if ld-ld violation is detected, replay from this inst from fetch
872  val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss
873  // io.out.bits.uop.ctrl.replayInst := false.B
874
875  // to be removed
876  io.feedback_fast.valid                 := s2_valid && !s2_in.isLoadReplay && !s2_exception && io.lq_rep_full && s2_out.rep_info.need_rep && !s2_out.uop.robIdx.needFlush(io.redirect)
877  io.feedback_fast.bits.hit              := false.B
878  io.feedback_fast.bits.flushState       := s2_in.ptwBack
879  io.feedback_fast.bits.rsIdx            := s2_in.rsIdx
880  io.feedback_fast.bits.sourceType       := RSFeedbackType.lrqFull
881  io.feedback_fast.bits.dataInvalidSqIdx := DontCare
882
883  // fast wakeup
884  io.fast_uop.valid := RegNext(
885    !io.dcache.s1_disable_fast_wakeup &&
886    s1_valid &&
887    !s1_kill &&
888    !s1_fast_rep_kill &&
889    !io.tlb.resp.bits.fast_miss &&
890    !io.lsq.forward.dataInvalidFast
891  ) && (s2_valid && !io.feedback_fast.valid && !s2_out.rep_info.need_rep && !s2_mmio)
892  io.fast_uop.bits := RegNext(s1_out.uop)
893
894  //
895  io.s2_ptr_chasing                    := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, s1_fire)
896  io.prefetch_train.valid              := s2_valid && !s2_in.mmio && !s2_in.tlbMiss
897  io.prefetch_train.bits.fromLsPipelineBundle(s2_in)
898  io.prefetch_train.bits.miss          := io.dcache.resp.bits.miss
899  io.prefetch_train.bits.meta_prefetch := io.dcache.resp.bits.meta_prefetch
900  io.prefetch_train.bits.meta_access   := io.dcache.resp.bits.meta_access
901  if (env.FPGAPlatform)
902    io.dcache.s2_pc := DontCare
903  else
904    io.dcache.s2_pc := s2_out.uop.cf.pc
905  io.dcache.s2_kill := s2_pmp.ld || s2_pmp.mmio || s2_kill
906
907  val s1_ld_left_fire = s1_valid && !s1_kill && !s1_fast_rep_kill && s2_ready
908  val s2_ld_valid_dup = RegInit(0.U(6.W))
909  s2_ld_valid_dup := 0x0.U(6.W)
910  when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) }
911  when (s1_kill || s1_fast_rep_kill) { s2_ld_valid_dup := 0x0.U(6.W) }
912  assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch)))
913
914  // Pipeline
915  // --------------------------------------------------------------------------------
916  // stage 3
917  // --------------------------------------------------------------------------------
918  // writeback and update load queue
919  val s3_valid        = RegNext(s2_valid) && !RegNext(s2_out.uop.robIdx.needFlush(io.redirect))
920  val s3_in           = RegEnable(s2_out, s2_fire)
921  val s3_out          = Wire(Valid(new ExuOutput))
922  val s3_cache_rep    = RegEnable(s2_cache_rep && s2_troublem, s2_fire)
923  val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire)
924  val s3_fast_rep     = Wire(Bool())
925  val s3_kill         = s3_in.uop.robIdx.needFlush(io.redirect)
926  s3_ready := !s3_valid || s3_kill || io.ldout.ready
927
928  // s3 load fast replay
929  io.fast_rep_out.valid := s3_valid && s3_fast_rep && !s3_in.uop.robIdx.needFlush(io.redirect)
930  io.fast_rep_out.bits := s3_in
931
932  io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || !io.fast_rep_out.ready) && !s3_in.feedbacked && !s3_in.lateKill
933  io.lsq.ldin.bits := s3_in
934
935  /* <------- DANGEROUS: Don't change sequence here ! -------> */
936  io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools
937  io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated
938
939  val s3_dly_ld_err =
940    if (EnableAccurateLoadError) {
941      (s3_in.delayedLoadError || io.dcache.resp.bits.error_delayed) && RegNext(io.csrCtrl.cache_error_enable)
942    } else {
943      WireInit(false.B)
944    }
945  io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid
946  io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err
947  io.lsq.ldin.bits.dcacheRequireReplay  := s3_cache_rep
948
949  val s3_vp_match_fail = RegNext(io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid)
950  val s3_ldld_rep_inst =
951      io.lsq.ldld_nuke_query.resp.valid &&
952      io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch &&
953      RegNext(io.csrCtrl.ldld_vio_check_enable)
954
955  val s3_rep_info = s3_in.rep_info
956  val s3_rep_frm_fetch = s3_vp_match_fail || s3_ldld_rep_inst
957  val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt)
958  val s3_force_rep = s3_sel_rep_cause(LoadReplayCauses.C_MA) ||
959                     s3_sel_rep_cause(LoadReplayCauses.C_TM) ||
960                     s3_sel_rep_cause(LoadReplayCauses.C_NK)
961
962  val s3_exception = ExceptionNO.selectByFu(s3_in.uop.cf.exceptionVec, lduCfg).asUInt.orR
963  when ((s3_exception || s3_dly_ld_err || s3_rep_frm_fetch) && !s3_force_rep) {
964    io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType)
965  } .otherwise {
966    io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools)
967  }
968
969  // Int load, if hit, will be writebacked at s2
970  s3_out.valid                := s3_valid && !io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio && !s3_in.lateKill
971  s3_out.bits.uop             := s3_in.uop
972  s3_out.bits.uop.cf.exceptionVec(loadAccessFault) := s3_dly_ld_err  || s3_in.uop.cf.exceptionVec(loadAccessFault)
973  s3_out.bits.uop.ctrl.replayInst := s3_rep_frm_fetch
974  s3_out.bits.data            := s3_in.data
975  s3_out.bits.redirectValid   := false.B
976  s3_out.bits.redirect        := DontCare
977  s3_out.bits.debug.isMMIO    := s3_in.mmio
978  s3_out.bits.debug.isPerfCnt := false.B
979  s3_out.bits.debug.paddr     := s3_in.paddr
980  s3_out.bits.debug.vaddr     := s3_in.vaddr
981  s3_out.bits.fflags          := DontCare
982
983  when (s3_force_rep) {
984    s3_out.bits.uop.cf.exceptionVec := 0.U.asTypeOf(s3_in.uop.cf.exceptionVec.cloneType)
985  }
986
987  /* <------- DANGEROUS: Don't change sequence here ! -------> */
988
989  io.lsq.ldin.bits.uop := s3_out.bits.uop
990
991  val s3_revoke = s3_exception || io.lsq.ldin.bits.rep_info.need_rep
992  io.lsq.ldld_nuke_query.revoke := s3_revoke
993  io.lsq.stld_nuke_query.revoke := s3_revoke
994
995  // feedback slow
996  s3_fast_rep := (RegNext(s2_fast_rep) ||
997                    (s3_in.rep_info.dcache_miss && io.l2_hint.valid && io.l2_hint.bits.sourceId === s3_in.rep_info.mshr_id)) &&
998                    !s3_in.feedbacked &&
999                    !s3_in.lateKill &&
1000                    !s3_rep_frm_fetch &&
1001                    !s3_exception
1002  val s3_fb_no_waiting = !s3_in.isLoadReplay && !(s3_fast_rep && io.fast_rep_out.ready) && !s3_in.feedbacked
1003
1004  //
1005  io.feedback_slow.valid                 := s3_valid && !s3_in.uop.robIdx.needFlush(io.redirect) && s3_fb_no_waiting
1006  io.feedback_slow.bits.hit              := !io.lsq.ldin.bits.rep_info.need_rep || io.lsq.ldin.ready
1007  io.feedback_slow.bits.flushState       := s3_in.ptwBack
1008  io.feedback_slow.bits.rsIdx            := s3_in.rsIdx
1009  io.feedback_slow.bits.sourceType       := RSFeedbackType.lrqFull
1010  io.feedback_slow.bits.dataInvalidSqIdx := DontCare
1011
1012  val s3_ld_wb_meta = Mux(s3_out.valid, s3_out.bits, io.lsq.uncache.bits)
1013
1014  // data from load queue refill
1015  val s3_ld_raw_data_frm_uncache = io.lsq.ld_raw_data
1016  val s3_merged_data_frm_uncache = s3_ld_raw_data_frm_uncache.mergedData()
1017  val s3_picked_data_frm_uncache = LookupTree(s3_ld_raw_data_frm_uncache.addrOffset, List(
1018    "b000".U -> s3_merged_data_frm_uncache(63,  0),
1019    "b001".U -> s3_merged_data_frm_uncache(63,  8),
1020    "b010".U -> s3_merged_data_frm_uncache(63, 16),
1021    "b011".U -> s3_merged_data_frm_uncache(63, 24),
1022    "b100".U -> s3_merged_data_frm_uncache(63, 32),
1023    "b101".U -> s3_merged_data_frm_uncache(63, 40),
1024    "b110".U -> s3_merged_data_frm_uncache(63, 48),
1025    "b111".U -> s3_merged_data_frm_uncache(63, 56)
1026  ))
1027  val s3_ld_data_frm_uncache = rdataHelper(s3_ld_raw_data_frm_uncache.uop, s3_picked_data_frm_uncache)
1028
1029  // data from dcache hit
1030  val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle)
1031  s3_ld_raw_data_frm_cache.respDcacheData       := io.dcache.resp.bits.data_delayed
1032  s3_ld_raw_data_frm_cache.forwardMask          := RegEnable(s2_fwd_mask, s2_valid)
1033  s3_ld_raw_data_frm_cache.forwardData          := RegEnable(s2_fwd_data, s2_valid)
1034  s3_ld_raw_data_frm_cache.uop                  := RegEnable(s2_out.uop, s2_valid)
1035  s3_ld_raw_data_frm_cache.addrOffset           := RegEnable(s2_out.paddr(2, 0), s2_valid)
1036  s3_ld_raw_data_frm_cache.forward_D            := RegEnable(s2_fwd_frm_d_chan, s2_valid)
1037  s3_ld_raw_data_frm_cache.forwardData_D        := RegEnable(s2_fwd_data_frm_d_chan, s2_valid)
1038  s3_ld_raw_data_frm_cache.forward_mshr         := RegEnable(s2_fwd_frm_mshr, s2_valid)
1039  s3_ld_raw_data_frm_cache.forwardData_mshr     := RegEnable(s2_fwd_data_frm_mshr, s2_valid)
1040  s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, s2_valid)
1041
1042  val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData()
1043  val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List(
1044    "b000".U -> s3_merged_data_frm_cache(63,  0),
1045    "b001".U -> s3_merged_data_frm_cache(63,  8),
1046    "b010".U -> s3_merged_data_frm_cache(63, 16),
1047    "b011".U -> s3_merged_data_frm_cache(63, 24),
1048    "b100".U -> s3_merged_data_frm_cache(63, 32),
1049    "b101".U -> s3_merged_data_frm_cache(63, 40),
1050    "b110".U -> s3_merged_data_frm_cache(63, 48),
1051    "b111".U -> s3_merged_data_frm_cache(63, 56)
1052  ))
1053  val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache)
1054
1055  // FIXME: add 1 cycle delay ?
1056  io.lsq.uncache.ready := !s3_out.valid
1057  io.ldout.bits        := s3_ld_wb_meta
1058  io.ldout.bits.data   := Mux(s3_out.valid, s3_ld_data_frm_cache, s3_ld_data_frm_uncache)
1059  io.ldout.valid       := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) ||
1060                         io.lsq.uncache.valid && !io.lsq.uncache.bits.uop.robIdx.needFlush(io.redirect) && !s3_out.valid
1061
1062
1063  // fast load to load forward
1064  io.l2l_fwd_out.valid      := s3_out.valid && !s3_in.lateKill // for debug only
1065  io.l2l_fwd_out.data       := s3_merged_data_frm_cache // load to load is for ld only
1066  io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err // ecc delayed error
1067
1068   // trigger
1069  val last_valid_data = RegNext(RegEnable(io.ldout.bits.data, io.ldout.fire))
1070  val hit_ld_addr_trig_hit_vec = Wire(Vec(3, Bool()))
1071  val lq_ld_addr_trig_hit_vec = io.lsq.trigger.lqLoadAddrTriggerHitVec
1072  (0 until 3).map{i => {
1073    val tdata2    = RegNext(io.trigger(i).tdata2)
1074    val matchType = RegNext(io.trigger(i).matchType)
1075    val tEnable   = RegNext(io.trigger(i).tEnable)
1076
1077    hit_ld_addr_trig_hit_vec(i) := TriggerCmp(RegNext(s2_out.vaddr), tdata2, matchType, tEnable)
1078    io.trigger(i).addrHit       := Mux(s3_out.valid, hit_ld_addr_trig_hit_vec(i), lq_ld_addr_trig_hit_vec(i))
1079    io.trigger(i).lastDataHit   := TriggerCmp(last_valid_data, tdata2, matchType, tEnable)
1080  }}
1081  io.lsq.trigger.hitLoadAddrTriggerHitVec := hit_ld_addr_trig_hit_vec
1082
1083  // FIXME: please move this part to LoadQueueReplay
1084  io.debug_ls := DontCare
1085
1086  // Topdown
1087  io.lsTopdownInfo.s1.robIdx      := s1_in.uop.robIdx.value
1088  io.lsTopdownInfo.s1.vaddr_valid := s1_valid && s1_in.hasROBEntry
1089  io.lsTopdownInfo.s1.vaddr_bits  := s1_vaddr
1090  io.lsTopdownInfo.s2.robIdx      := s2_in.uop.robIdx.value
1091  io.lsTopdownInfo.s2.paddr_valid := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss
1092  io.lsTopdownInfo.s2.paddr_bits  := s2_in.paddr
1093
1094  // perf cnt
1095  XSPerfAccumulate("s0_in_valid",                  io.ldin.valid)
1096  XSPerfAccumulate("s0_in_block",                  io.ldin.valid && !io.ldin.fire)
1097  XSPerfAccumulate("s0_in_fire_first_issue",       s0_valid && s0_isFirstIssue)
1098  XSPerfAccumulate("s0_lsq_fire_first_issue",      io.replay.fire)
1099  XSPerfAccumulate("s0_ldu_fire_first_issue",      io.ldin.fire && s0_isFirstIssue)
1100  XSPerfAccumulate("s0_fast_replay_issue",         io.fast_rep_in.fire)
1101  XSPerfAccumulate("s0_stall_out",                 s0_valid && !s0_can_go)
1102  XSPerfAccumulate("s0_stall_dcache",              s0_valid && !io.dcache.req.ready)
1103  XSPerfAccumulate("s0_addr_spec_success",         s0_fire && s0_vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12))
1104  XSPerfAccumulate("s0_addr_spec_failed",          s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12))
1105  XSPerfAccumulate("s0_addr_spec_success_once",    s0_fire && s0_vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1106  XSPerfAccumulate("s0_addr_spec_failed_once",     s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1107  XSPerfAccumulate("s0_forward_tl_d_channel",      s0_out.forward_tlDchannel)
1108  XSPerfAccumulate("s0_hardware_prefetch_fire",    s0_fire && s0_hw_prf_select)
1109  XSPerfAccumulate("s0_software_prefetch_fire",    s0_fire && s0_prf && s0_int_iss_select)
1110  XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select)
1111  XSPerfAccumulate("s0_hardware_prefetch_total",   io.prefetch_req.valid)
1112
1113  XSPerfAccumulate("s1_in_valid",                  s1_valid)
1114  XSPerfAccumulate("s1_in_fire",                   s1_fire)
1115  XSPerfAccumulate("s1_in_fire_first_issue",       s1_fire && s1_in.isFirstIssue)
1116  XSPerfAccumulate("s1_tlb_miss",                  s1_fire && s1_tlb_miss)
1117  XSPerfAccumulate("s1_tlb_miss_first_issue",      s1_fire && s1_tlb_miss && s1_in.isFirstIssue)
1118  XSPerfAccumulate("s1_stall_out",                 s1_valid && !s1_can_go)
1119
1120  XSPerfAccumulate("s2_in_valid",                  s2_valid)
1121  XSPerfAccumulate("s2_in_fire",                   s2_fire)
1122  XSPerfAccumulate("s2_in_fire_first_issue",       s2_fire && s2_in.isFirstIssue)
1123  XSPerfAccumulate("s2_dcache_miss",               s2_fire && s2_cache_miss)
1124  XSPerfAccumulate("s2_dcache_miss_first_issue",   s2_fire && s2_cache_miss && s2_in.isFirstIssue)
1125  XSPerfAccumulate("s2_full_forward",              s2_fire && s2_full_fwd)
1126  XSPerfAccumulate("s2_dcache_miss_full_forward",  s2_fire && s2_cache_miss && s2_full_fwd)
1127  XSPerfAccumulate("s2_stall_out",                 s2_fire && !s2_can_go)
1128  XSPerfAccumulate("s2_prefetch",                  s2_fire && s2_prf)
1129  XSPerfAccumulate("s2_prefetch_ignored",          s2_fire && s2_prf && s2_cache_rep) // ignore prefetch for mshr full / miss req port conflict
1130  XSPerfAccumulate("s2_prefetch_miss",             s2_fire && s2_prf && s2_cache_miss) // prefetch req miss in l1
1131  XSPerfAccumulate("s2_prefetch_hit",              s2_fire && s2_prf && !s2_cache_miss) // prefetch req hit in l1
1132  XSPerfAccumulate("s2_prefetch_accept",           s2_fire && s2_prf && s2_cache_miss && !s2_cache_rep) // prefetch a missed line in l1, and l1 accepted it
1133  XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fwd_frm_d_chan && s2_fwd_data_valid)
1134  XSPerfAccumulate("s2_successfully_forward_mshr",      s2_fwd_frm_mshr && s2_fwd_data_valid)
1135
1136  XSPerfAccumulate("load_to_load_forward",                      s1_try_ptr_chasing && !s1_ptr_chasing_canceled)
1137  XSPerfAccumulate("load_to_load_forward_try",                  s1_try_ptr_chasing)
1138  XSPerfAccumulate("load_to_load_forward_fail",                 s1_cancel_ptr_chasing)
1139  XSPerfAccumulate("load_to_load_forward_fail_cancelled",       s1_cancel_ptr_chasing && s1_ptr_chasing_canceled)
1140  XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match)
1141  XSPerfAccumulate("load_to_load_forward_fail_op_not_ld",       s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld)
1142  XSPerfAccumulate("load_to_load_forward_fail_addr_align",      s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned)
1143  XSPerfAccumulate("load_to_load_forward_fail_set_mismatch",    s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch)
1144
1145  // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design
1146  // hardware performance counter
1147  val perfEvents = Seq(
1148    ("load_s0_in_fire         ", s0_fire                                                        ),
1149    ("load_to_load_forward    ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled      ),
1150    ("stall_dcache            ", s0_valid && s0_can_go && !io.dcache.req.ready                  ),
1151    ("load_s1_in_fire         ", s0_fire                                                        ),
1152    ("load_s1_tlb_miss        ", s1_fire && io.tlb.resp.bits.miss                               ),
1153    ("load_s2_in_fire         ", s1_fire                                                        ),
1154    ("load_s2_dcache_miss     ", s2_fire && io.dcache.resp.bits.miss                            ),
1155  )
1156  generatePerfEvent()
1157
1158  when(io.ldout.fire){
1159    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
1160  }
1161  // end
1162}