xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/HybridUnit.scala (revision 6810d1e8e7c0789e9f50ee6bdd52010b8ce506ef)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan.ExceptionNO._
25import xiangshan._
26import xiangshan.backend.Bundles.{DynInst, MemExuInput, MemExuOutput}
27import xiangshan.backend.fu.PMPRespBundle
28import xiangshan.backend.fu.FuConfig._
29import xiangshan.backend.ctrlblock.{DebugLsInfoBundle, LsTopdownInfo}
30import xiangshan.backend.rob.RobPtr
31import xiangshan.backend.fu._
32import xiangshan.cache._
33import xiangshan.cache.wpu.ReplayCarry
34import xiangshan.cache.mmu.{TlbCmd, TlbReq, TlbRequestIO, TlbResp}
35import xiangshan.mem.mdp._
36
37class HybridUnit(implicit p: Parameters) extends XSModule
38  with HasLoadHelper
39  with HasPerfEvents
40  with HasDCacheParameters
41  with HasCircularQueuePtrHelper
42{
43  val io = IO(new Bundle() {
44    // control
45    val redirect      = Flipped(ValidIO(new Redirect))
46    val csrCtrl       = Flipped(new CustomCSRCtrlIO)
47
48    // flow in
49    val lsin          = Flipped(Decoupled(new MemExuInput))
50
51    // flow out
52    val ldout = DecoupledIO(new MemExuOutput)
53    val stout = DecoupledIO(new MemExuOutput)
54
55    val ldu_io = new Bundle() {
56      // data path
57      val sbuffer       = new LoadForwardQueryIO
58      val lsq           = new LoadToLsqIO
59      val tl_d_channel  = Input(new DcacheToLduForwardIO)
60      val forward_mshr  = Flipped(new LduToMissqueueForwardIO)
61      val refill        = Flipped(ValidIO(new Refill))
62      val l2_hint       = Input(Valid(new L2ToL1Hint))
63
64      // fast wakeup
65      val fast_uop = ValidIO(new DynInst) // early wakeup signal generated in load_s1, send to RS in load_s2
66
67      // trigger
68      val trigger = Vec(3, new LoadUnitTriggerIO)
69
70      // load to load fast path
71      val l2l_fwd_in    = Input(new LoadToLoadIO)
72      val l2l_fwd_out   = Output(new LoadToLoadIO)
73
74      val ld_fast_match    = Input(Bool())
75      val ld_fast_fuOpType = Input(UInt())
76      val ld_fast_imm      = Input(UInt(12.W))
77
78      // iq cancel
79      val ldCancel = Output(new LoadCancelIO()) // use to cancel the uops waked by this load, and cancel load
80
81      // load ecc error
82      val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different
83
84      // schedule error query
85      val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO)))
86
87      // queue-based replay
88      val replay       = Flipped(Decoupled(new LsPipelineBundle))
89      val lq_rep_full  = Input(Bool())
90
91      // misc
92      val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch
93
94      // Load fast replay path
95      val fast_rep_in  = Flipped(Decoupled(new LqWriteBundle))
96      val fast_rep_out = Decoupled(new LqWriteBundle)
97
98      // perf
99      val debug_ls         = Output(new DebugLsInfoBundle)
100      val lsTopdownInfo    = Output(new LsTopdownInfo)
101    }
102
103    val stu_io = new Bundle() {
104      val issue           = Valid(new MemExuInput)
105      val lsq             = ValidIO(new LsPipelineBundle)
106      val lsq_replenish   = Output(new LsPipelineBundle())
107      val stld_nuke_query = Valid(new StoreNukeQueryIO)
108      val st_mask_out     = Valid(new StoreMaskBundle)
109      val debug_ls        = Output(new DebugLsInfoBundle)
110    }
111
112    // prefetch
113    val prefetch_train            = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms
114    val prefetch_train_l1         = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride
115    val prefetch_req              = Flipped(ValidIO(new L1PrefetchReq)) // hardware prefetch to l1 cache req
116    val canAcceptLowConfPrefetch  = Output(Bool())
117    val canAcceptHighConfPrefetch = Output(Bool())
118    val correctMissTrain          = Input(Bool())
119
120    // data path
121    val tlb           = new TlbRequestIO(2)
122    val pmp           = Flipped(new PMPRespBundle()) // arrive same to tlb now
123    val dcache        = new DCacheLoadIO
124
125    // rs feedback
126    val feedback_fast = ValidIO(new RSFeedback) // stage 2
127    val feedback_slow = ValidIO(new RSFeedback) // stage 3
128  })
129
130  val s1_ready, s2_ready, s3_ready, sx_can_go = WireInit(false.B)
131
132  // Pipeline
133  // --------------------------------------------------------------------------------
134  // stage 0
135  // --------------------------------------------------------------------------------
136  // generate addr, use addr to query DCache and DTLB
137  val s0_valid         = Wire(Bool())
138  val s0_kill          = Wire(Bool())
139  val s0_vaddr         = Wire(UInt(VAddrBits.W))
140  val s0_mask          = Wire(UInt((VLEN/8).W))
141  val s0_uop           = Wire(new DynInst)
142  val s0_has_rob_entry = Wire(Bool())
143  val s0_rsIdx         = Wire(UInt(log2Up(MemIQSizeMax).W))
144  val s0_mshrid        = Wire(UInt())
145  val s0_try_l2l       = Wire(Bool())
146  val s0_rep_carry     = Wire(new ReplayCarry(nWays))
147  val s0_isFirstIssue  = Wire(Bool())
148  val s0_fast_rep      = Wire(Bool())
149  val s0_ld_rep        = Wire(Bool())
150  val s0_l2l_fwd       = Wire(Bool())
151  val s0_sched_idx     = Wire(UInt())
152  val s0_can_go        = s1_ready
153  val s0_fire          = s0_valid && s0_can_go
154  val s0_out           = Wire(new LqWriteBundle)
155
156  // load flow select/gen
157  // src0: super load replayed by LSQ (cache miss replay) (io.ldu_io.replay)
158  // src1: fast load replay (io.ldu_io.fast_rep_in)
159  // src2: load replayed by LSQ (io.ldu_io.replay)
160  // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch)
161  // src4: int read / software prefetch first issue from RS (io.in)
162  // src5: vec read first issue from RS (TODO)
163  // src6: load try pointchaising when no issued or replayed load (io.fastpath)
164  // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch)
165  // priority: high to low
166  val s0_ld_flow             = FuType.isLoad(s0_uop.fuType)
167  val s0_rep_stall           = io.lsin.valid && isAfter(io.ldu_io.replay.bits.uop.robIdx, io.lsin.bits.uop.robIdx)
168  val s0_super_ld_rep_valid  = io.ldu_io.replay.valid && io.ldu_io.replay.bits.forward_tlDchannel
169  val s0_ld_fast_rep_valid   = io.ldu_io.fast_rep_in.valid
170  val s0_ld_rep_valid        = io.ldu_io.replay.valid && !io.ldu_io.replay.bits.forward_tlDchannel && !s0_rep_stall
171  val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U
172  val s0_int_iss_valid       = io.lsin.valid // int flow first issue or software prefetch
173  val s0_vec_iss_valid       = WireInit(false.B) // TODO
174  val s0_l2l_fwd_valid       = io.ldu_io.l2l_fwd_in.valid && io.ldu_io.ld_fast_match
175  val s0_low_conf_prf_valid  = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U
176  dontTouch(s0_super_ld_rep_valid)
177  dontTouch(s0_ld_fast_rep_valid)
178  dontTouch(s0_ld_rep_valid)
179  dontTouch(s0_high_conf_prf_valid)
180  dontTouch(s0_int_iss_valid)
181  dontTouch(s0_vec_iss_valid)
182  dontTouch(s0_l2l_fwd_valid)
183  dontTouch(s0_low_conf_prf_valid)
184
185  // load flow source ready
186  val s0_super_ld_rep_ready  = WireInit(true.B)
187  val s0_ld_fast_rep_ready   = !s0_super_ld_rep_valid
188  val s0_ld_rep_ready        = !s0_super_ld_rep_valid &&
189                               !s0_ld_fast_rep_valid
190  val s0_high_conf_prf_ready = !s0_super_ld_rep_valid &&
191                               !s0_ld_fast_rep_valid &&
192                               !s0_ld_rep_valid
193
194  val s0_int_iss_ready       = !s0_super_ld_rep_valid &&
195                               !s0_ld_fast_rep_valid &&
196                               !s0_ld_rep_valid &&
197                               !s0_high_conf_prf_valid
198
199  val s0_vec_iss_ready       = !s0_super_ld_rep_valid &&
200                               !s0_ld_fast_rep_valid &&
201                               !s0_ld_rep_valid &&
202                               !s0_high_conf_prf_valid &&
203                               !s0_int_iss_valid
204
205  val s0_l2l_fwd_ready       = !s0_super_ld_rep_valid &&
206                               !s0_ld_fast_rep_valid &&
207                               !s0_ld_rep_valid &&
208                               !s0_high_conf_prf_valid &&
209                               !s0_int_iss_valid &&
210                               !s0_vec_iss_valid
211
212  val s0_low_conf_prf_ready  = !s0_super_ld_rep_valid &&
213                               !s0_ld_fast_rep_valid &&
214                               !s0_ld_rep_valid &&
215                               !s0_high_conf_prf_valid &&
216                               !s0_int_iss_valid &&
217                               !s0_vec_iss_valid &&
218                               !s0_l2l_fwd_valid
219  dontTouch(s0_super_ld_rep_ready)
220  dontTouch(s0_ld_fast_rep_ready)
221  dontTouch(s0_ld_rep_ready)
222  dontTouch(s0_high_conf_prf_ready)
223  dontTouch(s0_int_iss_ready)
224  dontTouch(s0_vec_iss_ready)
225  dontTouch(s0_l2l_fwd_ready)
226  dontTouch(s0_low_conf_prf_ready)
227
228  // load flow source select (OH)
229  val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready
230  val s0_ld_fast_rep_select  = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready
231  val s0_ld_rep_select       = s0_ld_rep_valid && s0_ld_rep_ready
232  val s0_hw_prf_select       = s0_high_conf_prf_ready && s0_high_conf_prf_valid ||
233                               s0_low_conf_prf_ready && s0_low_conf_prf_valid
234  val s0_int_iss_select      = s0_int_iss_ready && s0_int_iss_valid
235  val s0_vec_iss_select      = s0_vec_iss_ready && s0_vec_iss_valid
236  val s0_l2l_fwd_select      = s0_l2l_fwd_ready && s0_l2l_fwd_valid
237  assert(!s0_vec_iss_select) // to be added
238  dontTouch(s0_super_ld_rep_select)
239  dontTouch(s0_ld_fast_rep_select)
240  dontTouch(s0_ld_rep_select)
241  dontTouch(s0_hw_prf_select)
242  dontTouch(s0_int_iss_select)
243  dontTouch(s0_vec_iss_select)
244  dontTouch(s0_l2l_fwd_select)
245
246  s0_valid := (s0_super_ld_rep_valid ||
247               s0_ld_fast_rep_valid ||
248               s0_ld_rep_valid ||
249               s0_high_conf_prf_valid ||
250               s0_int_iss_valid ||
251               s0_vec_iss_valid ||
252               s0_l2l_fwd_valid ||
253               s0_low_conf_prf_valid) && io.dcache.req.ready && !s0_kill
254
255  // which is S0's out is ready and dcache is ready
256  val s0_try_ptr_chasing      = s0_l2l_fwd_select
257  val s0_do_try_ptr_chasing   = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready
258  val s0_ptr_chasing_vaddr    = io.ldu_io.l2l_fwd_in.data(5, 0) +& io.ldu_io.ld_fast_imm(5, 0)
259  val s0_ptr_chasing_canceled = WireInit(false.B)
260  s0_kill := s0_ptr_chasing_canceled || (s0_out.uop.robIdx.needFlush(io.redirect) && !s0_try_ptr_chasing)
261
262  // prefetch related ctrl signal
263  val s0_prf    = Wire(Bool())
264  val s0_prf_rd = Wire(Bool())
265  val s0_prf_wr = Wire(Bool())
266  val s0_hw_prf = s0_hw_prf_select
267
268  io.canAcceptLowConfPrefetch  := s0_low_conf_prf_ready
269  io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready
270
271  // query DTLB
272  io.tlb.req.valid                   := s0_valid
273  io.tlb.req.bits.cmd                := Mux(s0_prf,
274                                         Mux(s0_prf_wr, TlbCmd.write, TlbCmd.read),
275                                         Mux(s0_ld_flow, TlbCmd.read, TlbCmd.write)
276                                       )
277  io.tlb.req.bits.vaddr              := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_vaddr)
278  io.tlb.req.bits.size               := LSUOpType.size(s0_uop.fuOpType)
279  io.tlb.req.bits.kill               := s0_kill
280  io.tlb.req.bits.memidx.is_ld       := s0_ld_flow
281  io.tlb.req.bits.memidx.is_st       := !s0_ld_flow
282  io.tlb.req.bits.memidx.idx         := s0_uop.lqIdx.value
283  io.tlb.req.bits.debug.robIdx       := s0_uop.robIdx
284  io.tlb.req.bits.no_translate       := s0_hw_prf_select  // hw b.reqetch addr does not need to be translated
285  io.tlb.req.bits.debug.pc           := s0_uop.pc
286  io.tlb.req.bits.debug.isFirstIssue := s0_isFirstIssue
287
288  // query DCache
289  io.dcache.req.valid             := s0_valid
290  io.dcache.req.bits.cmd          :=  Mux(s0_prf_rd,
291                                      MemoryOpConstants.M_PFR,
292                                      Mux(s0_prf_wr, MemoryOpConstants.M_PFW,
293                                        Mux(s0_ld_flow, MemoryOpConstants.M_XRD, MemoryOpConstants.M_XWR))
294                                    )
295  io.dcache.req.bits.vaddr        := s0_vaddr
296  io.dcache.req.bits.mask         := s0_mask
297  io.dcache.req.bits.data         := DontCare
298  io.dcache.req.bits.isFirstIssue := s0_isFirstIssue
299  io.dcache.req.bits.instrtype    := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U,
300                                     Mux(s0_ld_flow, LOAD_SOURCE.U, STORE_SOURCE.U))
301  io.dcache.req.bits.debug_robIdx := s0_uop.robIdx.value
302  io.dcache.req.bits.replayCarry  := s0_rep_carry
303  io.dcache.req.bits.id           := DontCare // TODO: update cache meta
304  io.dcache.pf_source             := Mux(s0_hw_prf_select, io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL)
305
306  // load flow priority mux
307  def fromNullSource() = {
308    s0_vaddr         := 0.U
309    s0_mask          := 0.U
310    s0_uop           := 0.U.asTypeOf(new DynInst)
311    s0_try_l2l       := false.B
312    s0_has_rob_entry := false.B
313    s0_rsIdx         := 0.U
314    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
315    s0_mshrid        := 0.U
316    s0_isFirstIssue  := false.B
317    s0_fast_rep      := false.B
318    s0_ld_rep        := false.B
319    s0_l2l_fwd       := false.B
320    s0_prf           := false.B
321    s0_prf_rd        := false.B
322    s0_prf_wr        := false.B
323    s0_sched_idx     := 0.U
324  }
325
326  def fromFastReplaySource(src: LqWriteBundle) = {
327    s0_vaddr         := src.vaddr
328    s0_mask          := src.mask
329    s0_uop           := src.uop
330    s0_try_l2l       := false.B
331    s0_has_rob_entry := src.hasROBEntry
332    s0_rep_carry     := src.rep_info.rep_carry
333    s0_mshrid        := src.rep_info.mshr_id
334    s0_rsIdx         := src.rsIdx
335    s0_isFirstIssue  := false.B
336    s0_fast_rep      := true.B
337    s0_ld_rep        := src.isLoadReplay
338    s0_l2l_fwd       := false.B
339    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
340    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
341    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
342    s0_sched_idx     := src.schedIndex
343  }
344
345  def fromNormalReplaySource(src: LsPipelineBundle) = {
346    s0_vaddr         := src.vaddr
347    s0_mask          := genVWmask(src.vaddr, src.uop.fuOpType(1, 0))
348    s0_uop           := src.uop
349    s0_try_l2l       := false.B
350    s0_has_rob_entry := true.B
351    s0_rsIdx         := src.rsIdx
352    s0_rep_carry     := src.replayCarry
353    s0_mshrid        := src.mshrid
354    s0_isFirstIssue  := false.B
355    s0_fast_rep      := false.B
356    s0_ld_rep        := true.B
357    s0_l2l_fwd       := false.B
358    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
359    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
360    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
361    s0_sched_idx     := src.schedIndex
362  }
363
364  def fromPrefetchSource(src: L1PrefetchReq) = {
365    s0_vaddr         := src.getVaddr()
366    s0_mask          := 0.U
367    s0_uop           := DontCare
368    s0_try_l2l       := false.B
369    s0_has_rob_entry := false.B
370    s0_rsIdx         := 0.U
371    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
372    s0_mshrid        := 0.U
373    s0_isFirstIssue  := false.B
374    s0_fast_rep      := false.B
375    s0_ld_rep        := false.B
376    s0_l2l_fwd       := false.B
377    s0_prf           := true.B
378    s0_prf_rd        := !src.is_store
379    s0_prf_wr        := src.is_store
380    s0_sched_idx     := 0.U
381  }
382
383  def fromIntIssueSource(src: MemExuInput) = {
384    s0_vaddr         := src.src(0) + SignExt(src.uop.imm(11, 0), VAddrBits)
385    s0_mask          := genVWmask(s0_vaddr, src.uop.fuOpType(1,0))
386    s0_uop           := src.uop
387    s0_try_l2l       := false.B
388    s0_has_rob_entry := true.B
389    s0_rsIdx         := src.iqIdx
390    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
391    s0_mshrid        := 0.U
392    s0_isFirstIssue  := true.B
393    s0_fast_rep      := false.B
394    s0_ld_rep        := false.B
395    s0_l2l_fwd       := false.B
396    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
397    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
398    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
399    s0_sched_idx     := 0.U
400  }
401
402  def fromVecIssueSource() = {
403    s0_vaddr         := 0.U
404    s0_mask          := 0.U
405    s0_uop           := 0.U.asTypeOf(new DynInst)
406    s0_try_l2l       := false.B
407    s0_has_rob_entry := false.B
408    s0_rsIdx         := 0.U
409    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
410    s0_mshrid        := 0.U
411    s0_isFirstIssue  := false.B
412    s0_fast_rep      := false.B
413    s0_ld_rep        := false.B
414    s0_l2l_fwd       := false.B
415    s0_prf           := false.B
416    s0_prf_rd        := false.B
417    s0_prf_wr        := false.B
418    s0_sched_idx     := 0.U
419  }
420
421  def fromLoadToLoadSource(src: LoadToLoadIO) = {
422    s0_vaddr              := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0))
423    s0_mask               := genVWmask(s0_vaddr, io.ldu_io.ld_fast_fuOpType(1, 0))
424    // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding.
425    // Assume the pointer chasing is always ld.
426    s0_uop.fuOpType  := io.ldu_io.ld_fast_fuOpType
427    s0_try_l2l            := true.B
428    // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx in S0 when trying pointchasing
429    // because these signals will be updated in S1
430    s0_has_rob_entry      := false.B
431    s0_rsIdx              := 0.U
432    s0_mshrid             := 0.U
433    s0_rep_carry          := 0.U.asTypeOf(s0_rep_carry.cloneType)
434    s0_isFirstIssue       := true.B
435    s0_fast_rep           := false.B
436    s0_ld_rep             := false.B
437    s0_l2l_fwd            := true.B
438    s0_prf                := false.B
439    s0_prf_rd             := false.B
440    s0_prf_wr             := false.B
441    s0_sched_idx          := 0.U
442  }
443
444  // set default
445  s0_uop := DontCare
446  when (s0_super_ld_rep_select)      { fromNormalReplaySource(io.ldu_io.replay.bits)     }
447  .elsewhen (s0_ld_fast_rep_select)  { fromFastReplaySource(io.ldu_io.fast_rep_in.bits)  }
448  .elsewhen (s0_ld_rep_select)       { fromNormalReplaySource(io.ldu_io.replay.bits)     }
449  .elsewhen (s0_hw_prf_select)       { fromPrefetchSource(io.prefetch_req.bits)   }
450  .elsewhen (s0_int_iss_select)      { fromIntIssueSource(io.lsin.bits)           }
451  .elsewhen (s0_vec_iss_select)      { fromVecIssueSource()                       }
452  .otherwise {
453    if (EnableLoadToLoadForward) {
454      fromLoadToLoadSource(io.ldu_io.l2l_fwd_in)
455    } else {
456      fromNullSource()
457    }
458  }
459
460  // address align check
461  val s0_addr_aligned = LookupTree(s0_uop.fuOpType(1, 0), List(
462    "b00".U   -> true.B,                   //b
463    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
464    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
465    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
466  ))
467
468  // accept load flow if dcache ready (tlb is always ready)
469  // TODO: prefetch need writeback to loadQueueFlag
470  s0_out               := DontCare
471  s0_out.rsIdx         := s0_rsIdx
472  s0_out.vaddr         := s0_vaddr
473  s0_out.mask          := s0_mask
474  s0_out.uop           := s0_uop
475  s0_out.isFirstIssue  := s0_isFirstIssue
476  s0_out.hasROBEntry   := s0_has_rob_entry
477  s0_out.isPrefetch    := s0_prf
478  s0_out.isHWPrefetch  := s0_hw_prf
479  s0_out.isFastReplay  := s0_fast_rep
480  s0_out.isLoadReplay  := s0_ld_rep
481  s0_out.isFastPath    := s0_l2l_fwd
482  s0_out.mshrid        := s0_mshrid
483  s0_out.uop.exceptionVec(loadAddrMisaligned)  := !s0_addr_aligned && s0_ld_flow
484  s0_out.uop.exceptionVec(storeAddrMisaligned) := !s0_addr_aligned && !s0_ld_flow
485  s0_out.forward_tlDchannel := s0_super_ld_rep_select
486  when(io.tlb.req.valid && s0_isFirstIssue) {
487    s0_out.uop.debugInfo.tlbFirstReqTime := GTimer()
488  }.otherwise{
489    s0_out.uop.debugInfo.tlbFirstReqTime := s0_uop.debugInfo.tlbFirstReqTime
490  }
491  s0_out.schedIndex     := s0_sched_idx
492
493  // load fast replay
494  io.ldu_io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready)
495
496  // load flow source ready
497  // cache missed load has highest priority
498  // always accept cache missed load flow from load replay queue
499  io.ldu_io.replay.ready := (s0_can_go && io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select))
500
501  // accept load flow from rs when:
502  // 1) there is no lsq-replayed load
503  // 2) there is no fast replayed load
504  // 3) there is no high confidence prefetch request
505  io.lsin.ready := (s0_can_go && (io.dcache.req.ready || !s0_ld_flow) && s0_int_iss_ready)
506
507  // for hw prefetch load flow feedback, to be added later
508  // io.prefetch_in.ready := s0_hw_prf_select
509
510  // dcache replacement extra info
511  // TODO: should prefetch load update replacement?
512  io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.ldu_io.replay.bits.replacementUpdated, false.B)
513
514
515  io.stu_io.st_mask_out.valid       := s0_valid && !s0_ld_flow
516  io.stu_io.st_mask_out.bits.mask   := s0_out.mask
517  io.stu_io.st_mask_out.bits.sqIdx  := s0_out.uop.sqIdx
518
519  // load debug
520  XSDebug(io.dcache.req.fire && s0_ld_flow,
521    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
522  )
523  XSDebug(s0_valid && s0_ld_flow,
524    p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, lqIdx ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " +
525    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
526
527  // store debug
528  XSDebug(io.dcache.req.fire && !s0_ld_flow,
529    p"[DCACHE STORE REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
530  )
531  XSDebug(s0_valid && !s0_ld_flow,
532    p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, sqIdx ${Hexadecimal(s0_out.uop.sqIdx.asUInt)}, " +
533    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
534
535
536  // Pipeline
537  // --------------------------------------------------------------------------------
538  // stage 1
539  // --------------------------------------------------------------------------------
540  // TLB resp (send paddr to dcache)
541  val s1_valid      = RegInit(false.B)
542  val s1_in         = Wire(new LqWriteBundle)
543  val s1_out        = Wire(new LqWriteBundle)
544  val s1_kill       = Wire(Bool())
545  val s1_can_go     = s2_ready
546  val s1_fire       = s1_valid && !s1_kill && s1_can_go
547  val s1_ld_flow    = RegNext(s0_ld_flow)
548
549  s1_ready := !s1_valid || s1_kill || s2_ready
550  when (s0_fire) { s1_valid := true.B }
551  .elsewhen (s1_fire) { s1_valid := false.B }
552  .elsewhen (s1_kill) { s1_valid := false.B }
553  s1_in   := RegEnable(s0_out, s0_fire)
554
555  val s1_fast_rep_dly_err = RegNext(io.ldu_io.fast_rep_in.bits.delayedLoadError)
556  val s1_fast_rep_kill    = s1_fast_rep_dly_err && s1_in.isFastReplay
557  val s1_l2l_fwd_dly_err  = RegNext(io.ldu_io.l2l_fwd_in.dly_ld_err)
558  val s1_l2l_fwd_kill     = s1_l2l_fwd_dly_err && s1_in.isFastPath
559  val s1_late_kill        = s1_fast_rep_kill || s1_l2l_fwd_kill
560  val s1_vaddr_hi         = Wire(UInt())
561  val s1_vaddr_lo         = Wire(UInt())
562  val s1_vaddr            = Wire(UInt())
563  val s1_paddr_dup_lsu    = Wire(UInt())
564  val s1_paddr_dup_dcache = Wire(UInt())
565  val s1_ld_exception     = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, LduCfg).asUInt.orR   // af & pf exception were modified below.
566  val s1_st_exception     = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, StaCfg).asUInt.orR   // af & pf exception were modified below.
567  val s1_exception        = (s1_ld_flow && s1_ld_exception) || (!s1_ld_flow && s1_st_exception)
568  val s1_tlb_miss         = io.tlb.resp.bits.miss
569  val s1_prf              = s1_in.isPrefetch
570  val s1_hw_prf           = s1_in.isHWPrefetch
571  val s1_sw_prf           = s1_prf && !s1_hw_prf
572  val s1_tlb_memidx       = io.tlb.resp.bits.memidx
573
574  s1_vaddr_hi         := s1_in.vaddr(VAddrBits - 1, 6)
575  s1_vaddr_lo         := s1_in.vaddr(5, 0)
576  s1_vaddr            := Cat(s1_vaddr_hi, s1_vaddr_lo)
577  s1_paddr_dup_lsu    := io.tlb.resp.bits.paddr(0)
578  s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1)
579
580  when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss &&
581        s1_tlb_memidx.idx === s1_in.uop.lqIdx.value && s1_ld_flow) {
582    // printf("Load idx = %d\n", s1_tlb_memidx.idx)
583    s1_out.uop.debugInfo.tlbRespTime := GTimer()
584  } .elsewhen(s1_tlb_memidx.is_st && io.tlb.resp.valid && !s1_tlb_miss &&
585              s1_tlb_memidx.idx === s1_out.uop.sqIdx.value && !s1_ld_flow) {
586    // printf("Store idx = %d\n", s1_tlb_memidx.idx)
587    s1_out.uop.debugInfo.tlbRespTime := GTimer()
588  }
589
590  io.tlb.req_kill   := s1_kill
591  io.tlb.resp.ready := true.B
592
593  io.dcache.s1_paddr_dup_lsu    <> s1_paddr_dup_lsu
594  io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache
595  io.dcache.s1_kill             := s1_kill || s1_tlb_miss || s1_exception || !s1_ld_flow
596
597  // store to load forwarding
598  io.ldu_io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow)
599  io.ldu_io.sbuffer.vaddr := s1_vaddr
600  io.ldu_io.sbuffer.paddr := s1_paddr_dup_lsu
601  io.ldu_io.sbuffer.uop   := s1_in.uop
602  io.ldu_io.sbuffer.sqIdx := s1_in.uop.sqIdx
603  io.ldu_io.sbuffer.mask  := s1_in.mask
604  io.ldu_io.sbuffer.pc    := s1_in.uop.pc // FIXME: remove it
605
606  io.ldu_io.lsq.forward.valid     := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow)
607  io.ldu_io.lsq.forward.vaddr     := s1_vaddr
608  io.ldu_io.lsq.forward.paddr     := s1_paddr_dup_lsu
609  io.ldu_io.lsq.forward.uop       := s1_in.uop
610  io.ldu_io.lsq.forward.sqIdx     := s1_in.uop.sqIdx
611  io.ldu_io.lsq.forward.sqIdxMask := 0.U
612  io.ldu_io.lsq.forward.mask      := s1_in.mask
613  io.ldu_io.lsq.forward.pc        := s1_in.uop.pc // FIXME: remove it
614
615  // st-ld violation query
616  val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => {
617                       io.ldu_io.stld_nuke_query(w).valid && // query valid
618                       isAfter(s1_in.uop.robIdx, io.ldu_io.stld_nuke_query(w).bits.robIdx) && // older store
619                       // TODO: Fix me when vector instruction
620                       (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.ldu_io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
621                       (s1_in.mask & io.ldu_io.stld_nuke_query(w).bits.mask).orR // data mask contain
622                      })).asUInt.orR && !s1_tlb_miss && s1_ld_flow
623
624  s1_out                   := s1_in
625  s1_out.vaddr             := s1_vaddr
626  s1_out.paddr             := s1_paddr_dup_lsu
627  s1_out.tlbMiss           := s1_tlb_miss
628  s1_out.ptwBack           := io.tlb.resp.bits.ptwBack
629  s1_out.rsIdx             := s1_in.rsIdx
630  s1_out.rep_info.debug    := s1_in.uop.debugInfo
631  s1_out.rep_info.nuke     := s1_nuke && !s1_sw_prf
632  s1_out.lateKill          := s1_late_kill
633
634  when (s1_ld_flow) {
635    when (!s1_late_kill) {
636      // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
637      // af & pf exception were modified
638      s1_out.uop.exceptionVec(loadPageFault)   := io.tlb.resp.bits.excp(0).pf.ld
639      s1_out.uop.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld
640    } .otherwise {
641      s1_out.uop.exceptionVec(loadAddrMisaligned) := false.B
642      s1_out.uop.exceptionVec(loadAccessFault)    := s1_late_kill
643    }
644  } .otherwise {
645    s1_out.uop.exceptionVec(storePageFault)   := io.tlb.resp.bits.excp(0).pf.st
646    s1_out.uop.exceptionVec(storeAccessFault) := io.tlb.resp.bits.excp(0).af.st
647  }
648
649  // pointer chasing
650  val s1_try_ptr_chasing       = RegNext(s0_do_try_ptr_chasing, false.B)
651  val s1_ptr_chasing_vaddr     = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing)
652  val s1_fu_op_type_not_ld     = WireInit(false.B)
653  val s1_not_fast_match        = WireInit(false.B)
654  val s1_addr_mismatch         = WireInit(false.B)
655  val s1_addr_misaligned       = WireInit(false.B)
656  val s1_ptr_chasing_canceled  = WireInit(false.B)
657  val s1_cancel_ptr_chasing    = WireInit(false.B)
658
659  s1_kill := s1_late_kill ||
660             s1_cancel_ptr_chasing ||
661             s1_in.uop.robIdx.needFlush(io.redirect) ||
662             RegEnable(s0_kill, false.B, io.lsin.valid || io.ldu_io.replay.valid || io.ldu_io.l2l_fwd_in.valid || io.ldu_io.fast_rep_in.valid)
663
664  if (EnableLoadToLoadForward) {
665    // Sometimes, we need to cancel the load-load forwarding.
666    // These can be put at S0 if timing is bad at S1.
667    // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow)
668    s1_addr_mismatch      := s1_ptr_chasing_vaddr(6) || RegEnable(io.ldu_io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing)
669    // Case 1: the address is misaligned, kill s1
670    s1_addr_misaligned    := LookupTree(s1_in.uop.fuOpType(1, 0), List(
671                             "b00".U   -> false.B,                   //b
672                             "b01".U   -> (s1_vaddr(0)    =/= 0.U), //h
673                             "b10".U   -> (s1_vaddr(1, 0) =/= 0.U), //w
674                             "b11".U   -> (s1_vaddr(2, 0) =/= 0.U)  //d
675                          ))
676    // Case 2: this load-load uop is cancelled
677    s1_ptr_chasing_canceled := !io.lsin.valid
678
679    when (s1_try_ptr_chasing) {
680      s1_cancel_ptr_chasing := s1_addr_mismatch || s1_addr_misaligned || s1_ptr_chasing_canceled
681
682      s1_in.uop           := io.lsin.bits.uop
683      s1_in.rsIdx         := io.lsin.bits.iqIdx
684      s1_in.isFirstIssue  := io.lsin.bits.isFirstIssue
685      s1_vaddr_lo         := s1_ptr_chasing_vaddr(5, 0)
686      s1_paddr_dup_lsu    := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
687      s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
688
689      // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb)
690      s1_in.uop.debugInfo.tlbFirstReqTime := GTimer()
691      s1_in.uop.debugInfo.tlbRespTime     := GTimer()
692    }
693    when (!s1_cancel_ptr_chasing) {
694      s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.ldu_io.replay.fire && !io.ldu_io.fast_rep_in.fire
695      when (s1_try_ptr_chasing) {
696        io.lsin.ready := true.B
697      }
698    }
699  }
700
701  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
702  val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize))
703  // to enable load-load, sqIdxMask must be calculated based on lsin.uop
704  // If the timing here is not OK, load-load forwarding has to be disabled.
705  // Or we calculate sqIdxMask at RS??
706  io.ldu_io.lsq.forward.sqIdxMask := s1_sqIdx_mask
707  if (EnableLoadToLoadForward) {
708    when (s1_try_ptr_chasing) {
709      io.ldu_io.lsq.forward.sqIdxMask := UIntToMask(io.lsin.bits.uop.sqIdx.value, StoreQueueSize)
710    }
711  }
712
713  io.ldu_io.forward_mshr.valid  := s1_valid && s1_out.forward_tlDchannel && s1_ld_flow
714  io.ldu_io.forward_mshr.mshrid := s1_out.mshrid
715  io.ldu_io.forward_mshr.paddr  := s1_out.paddr
716
717
718  // load debug
719  XSDebug(s1_valid && s1_ld_flow,
720    p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
721    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
722
723  // store debug
724  XSDebug(s1_valid && !s1_ld_flow,
725    p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.sqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
726    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
727
728  // store out
729  io.stu_io.lsq.valid         := s1_valid && !s1_ld_flow
730  io.stu_io.lsq.bits          := s1_out
731  io.stu_io.lsq.bits.miss     := s1_tlb_miss
732
733  // st-ld violation dectect request
734  io.stu_io.stld_nuke_query.valid       := s1_valid && !s1_tlb_miss && !s1_ld_flow
735  io.stu_io.stld_nuke_query.bits.robIdx := s1_in.uop.robIdx
736  io.stu_io.stld_nuke_query.bits.paddr  := s1_paddr_dup_lsu
737  io.stu_io.stld_nuke_query.bits.mask   := s1_in.mask
738
739  // Pipeline
740  // --------------------------------------------------------------------------------
741  // stage 2
742  // --------------------------------------------------------------------------------
743  // s2: DCache resp
744  val s2_valid  = RegInit(false.B)
745  val s2_in     = Wire(new LqWriteBundle)
746  val s2_out    = Wire(new LqWriteBundle)
747  val s2_kill   = Wire(Bool())
748  val s2_can_go = s3_ready
749  val s2_fire   = s2_valid && !s2_kill && s2_can_go
750
751  s2_kill := s2_in.uop.robIdx.needFlush(io.redirect)
752  s2_ready := !s2_valid || s2_kill || s3_ready
753  when (s1_fire) { s2_valid := true.B }
754  .elsewhen (s2_fire) { s2_valid := false.B }
755  .elsewhen (s2_kill) { s2_valid := false.B }
756  s2_in := RegEnable(s1_out, s1_fire)
757
758  val s2_pmp = WireInit(io.pmp)
759
760  val s2_prf    = s2_in.isPrefetch
761  val s2_hw_prf = s2_in.isHWPrefetch
762  val s2_ld_flow  = RegEnable(s1_ld_flow, s1_fire)
763
764  // exception that may cause load addr to be invalid / illegal
765  // if such exception happen, that inst and its exception info
766  // will be force writebacked to rob
767  val s2_exception_vec = WireInit(s2_in.uop.exceptionVec)
768  when (s2_ld_flow) {
769    when (!s2_in.lateKill) {
770      s2_exception_vec(loadAccessFault) := s2_in.uop.exceptionVec(loadAccessFault) || s2_pmp.ld
771      // soft prefetch will not trigger any exception (but ecc error interrupt may be triggered)
772      when (s2_prf || s2_in.tlbMiss) {
773        s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
774      }
775    }
776  } .otherwise {
777    s2_exception_vec(storeAccessFault) := s2_in.uop.exceptionVec(storeAccessFault) || s2_pmp.st
778    when (s2_prf || s2_in.tlbMiss) {
779      s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
780    }
781  }
782  val s2_ld_exception = ExceptionNO.selectByFu(s2_exception_vec, LduCfg).asUInt.orR && s2_ld_flow
783  val s2_st_exception = ExceptionNO.selectByFu(s2_exception_vec, StaCfg).asUInt.orR && !s2_ld_flow
784  val s2_exception    = s2_ld_exception || s2_st_exception
785
786  val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.ldu_io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr)
787  val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.ldu_io.forward_mshr.forward()
788  val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr)
789
790  // writeback access fault caused by ecc error / bus error
791  // * ecc data error is slow to generate, so we will not use it until load stage 3
792  // * in load stage 3, an extra signal io.load_error will be used to
793  val s2_actually_mmio = s2_pmp.mmio
794  val s2_mmio          = !s2_prf &&
795                          s2_actually_mmio &&
796                         !s2_exception &&
797                         !s2_in.tlbMiss
798  val s2_full_fwd      = Wire(Bool())
799  val s2_mem_amb       = s2_in.uop.storeSetHit &&
800                         io.ldu_io.lsq.forward.addrInvalid
801
802  val s2_tlb_miss      = s2_in.tlbMiss
803  val s2_fwd_fail      = io.ldu_io.lsq.forward.dataInvalid
804  val s2_dcache_miss   = io.dcache.resp.bits.miss &&
805                         !s2_fwd_frm_d_chan_or_mshr &&
806                         !s2_full_fwd
807
808  val s2_mq_nack       = io.dcache.s2_mq_nack &&
809                         !s2_fwd_frm_d_chan_or_mshr &&
810                         !s2_full_fwd
811
812  val s2_bank_conflict = io.dcache.s2_bank_conflict &&
813                         !s2_fwd_frm_d_chan_or_mshr &&
814                         !s2_full_fwd
815
816  val s2_wpu_pred_fail = io.dcache.s2_wpu_pred_fail &&
817                        !s2_fwd_frm_d_chan_or_mshr &&
818                        !s2_full_fwd
819
820  val s2_rar_nack      = io.ldu_io.lsq.ldld_nuke_query.req.valid &&
821                         !io.ldu_io.lsq.ldld_nuke_query.req.ready
822
823  val s2_raw_nack      = io.ldu_io.lsq.stld_nuke_query.req.valid &&
824                         !io.ldu_io.lsq.stld_nuke_query.req.ready
825
826  // st-ld violation query
827  //  NeedFastRecovery Valid when
828  //  1. Fast recovery query request Valid.
829  //  2. Load instruction is younger than requestors(store instructions).
830  //  3. Physical address match.
831  //  4. Data contains.
832  val s2_nuke = VecInit((0 until StorePipelineWidth).map(w => {
833                        io.ldu_io.stld_nuke_query(w).valid && // query valid
834                        isAfter(s2_in.uop.robIdx, io.ldu_io.stld_nuke_query(w).bits.robIdx) && // older store
835                        // TODO: Fix me when vector instruction
836                        (s2_in.paddr(PAddrBits-1, 3) === io.ldu_io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
837                        (s2_in.mask & io.ldu_io.stld_nuke_query(w).bits.mask).orR // data mask contain
838                      })).asUInt.orR && s2_ld_flow || s2_in.rep_info.nuke
839
840  val s2_cache_handled   = io.dcache.resp.bits.handled
841  val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) &&
842                           io.dcache.resp.bits.tag_error
843
844  val s2_troublem        = !s2_exception &&
845                           !s2_mmio &&
846                           !s2_prf &&
847                           !s2_in.lateKill
848
849  io.dcache.resp.ready  := true.B
850  val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_mmio || s2_prf || s2_in.lateKill) && s2_ld_flow
851  assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost")
852
853  // fast replay require
854  val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail))
855  val s2_nuke_fast_rep   = !s2_mq_nack &&
856                           !s2_dcache_miss &&
857                           !s2_bank_conflict &&
858                           !s2_wpu_pred_fail &&
859                           !s2_rar_nack &&
860                           !s2_raw_nack &&
861                           s2_nuke
862
863  val s2_fast_rep = !s2_mem_amb &&
864                    !s2_tlb_miss &&
865                    !s2_fwd_fail &&
866                    (s2_dcache_fast_rep || s2_nuke_fast_rep) &&
867                    s2_troublem
868
869  // need allocate new entry
870  val s2_can_query = !s2_mem_amb &&
871                     !s2_tlb_miss  &&
872                     !s2_fwd_fail &&
873                     !s2_dcache_fast_rep &&
874                     s2_troublem
875
876  val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error)
877
878  // ld-ld violation require
879  io.ldu_io.lsq.ldld_nuke_query.req.valid           := s2_valid && s2_can_query
880  io.ldu_io.lsq.ldld_nuke_query.req.bits.uop        := s2_in.uop
881  io.ldu_io.lsq.ldld_nuke_query.req.bits.mask       := s2_in.mask
882  io.ldu_io.lsq.ldld_nuke_query.req.bits.paddr      := s2_in.paddr
883  io.ldu_io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
884
885  // st-ld violation require
886  io.ldu_io.lsq.stld_nuke_query.req.valid           := s2_valid && s2_can_query
887  io.ldu_io.lsq.stld_nuke_query.req.bits.uop        := s2_in.uop
888  io.ldu_io.lsq.stld_nuke_query.req.bits.mask       := s2_in.mask
889  io.ldu_io.lsq.stld_nuke_query.req.bits.paddr      := s2_in.paddr
890  io.ldu_io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
891
892  // merge forward result
893  // lsq has higher priority than sbuffer
894  val s2_fwd_mask = Wire(Vec((VLEN/8), Bool()))
895  val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W)))
896  s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.ldu_io.lsq.forward.dataInvalid
897  // generate XLEN/8 Muxs
898  for (i <- 0 until VLEN / 8) {
899    s2_fwd_mask(i) := io.ldu_io.lsq.forward.forwardMask(i) || io.ldu_io.sbuffer.forwardMask(i)
900    s2_fwd_data(i) := Mux(io.ldu_io.lsq.forward.forwardMask(i), io.ldu_io.lsq.forward.forwardData(i), io.ldu_io.sbuffer.forwardData(i))
901  }
902
903  XSDebug(s2_fire && s2_ld_flow, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
904    s2_in.uop.pc,
905    io.ldu_io.lsq.forward.forwardData.asUInt, io.ldu_io.lsq.forward.forwardMask.asUInt,
906    s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt
907  )
908
909  //
910  s2_out                     := s2_in
911  s2_out.data                := 0.U // data will be generated in load s3
912  s2_out.uop.fpWen      := s2_in.uop.fpWen && !s2_exception && s2_ld_flow
913  s2_out.mmio                := s2_mmio
914  s2_out.atomic              := s2_pmp.atomic && !s2_ld_flow
915  s2_out.uop.flushPipe  := false.B
916  s2_out.uop.exceptionVec := s2_exception_vec
917  s2_out.forwardMask         := s2_fwd_mask
918  s2_out.forwardData         := s2_fwd_data
919  s2_out.handledByMSHR       := s2_cache_handled
920  s2_out.miss                := s2_dcache_miss && s2_troublem
921  s2_out.uop.fpWen      := s2_in.uop.fpWen && !s2_exception && s2_ld_flow
922
923  // Generate replay signal caused by:
924  // * st-ld violation check
925  // * tlb miss
926  // * dcache replay
927  // * forward data invalid
928  // * dcache miss
929  s2_out.rep_info.mem_amb         := s2_mem_amb && s2_troublem
930  s2_out.rep_info.tlb_miss        := s2_tlb_miss && s2_troublem
931  s2_out.rep_info.fwd_fail        := s2_fwd_fail && s2_troublem
932  s2_out.rep_info.dcache_rep      := s2_mq_nack && s2_troublem
933  s2_out.rep_info.dcache_miss     := s2_dcache_miss && s2_troublem
934  s2_out.rep_info.bank_conflict   := s2_bank_conflict && s2_troublem
935  s2_out.rep_info.wpu_fail        := s2_wpu_pred_fail && s2_troublem
936  s2_out.rep_info.rar_nack        := s2_rar_nack && s2_troublem
937  s2_out.rep_info.raw_nack        := s2_raw_nack && s2_troublem
938  s2_out.rep_info.nuke            := s2_nuke && s2_troublem
939  s2_out.rep_info.full_fwd        := s2_data_fwded
940  s2_out.rep_info.data_inv_sq_idx := io.ldu_io.lsq.forward.dataInvalidSqIdx
941  s2_out.rep_info.addr_inv_sq_idx := io.ldu_io.lsq.forward.addrInvalidSqIdx
942  s2_out.rep_info.rep_carry       := io.dcache.resp.bits.replayCarry
943  s2_out.rep_info.mshr_id         := io.dcache.resp.bits.mshr_id
944  s2_out.rep_info.last_beat       := s2_in.paddr(log2Up(refillBytes))
945  s2_out.rep_info.debug           := s2_in.uop.debugInfo
946
947  // if forward fail, replay this inst from fetch
948  val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss
949  // if ld-ld violation is detected, replay from this inst from fetch
950  val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss
951  // io.out.bits.uop.replayInst := false.B
952
953  // to be removed
954  val s2_ld_need_fb = !s2_in.isLoadReplay &&      // already feedbacked
955                      io.ldu_io.lq_rep_full &&           // LoadQueueReplay is full
956                      s2_out.rep_info.need_rep && // need replay
957                      !s2_exception &&            // no exception is triggered
958                      !s2_hw_prf                  // not hardware prefetch
959  val s2_st_need_fb = !s2_ld_flow
960  io.feedback_fast.valid                 := s2_valid && (s2_ld_need_fb || s2_st_need_fb)
961  io.feedback_fast.bits.hit              := false.B
962  io.feedback_fast.bits.flushState       := s2_in.ptwBack
963  io.feedback_fast.bits.robIdx           := s2_in.uop.robIdx
964  io.feedback_fast.bits.sourceType       := Mux(s2_ld_flow, RSFeedbackType.lrqFull, RSFeedbackType.tlbMiss)
965  io.feedback_fast.bits.dataInvalidSqIdx := DontCare
966
967  io.ldu_io.ldCancel.ld1Cancel.valid := s2_valid && (
968    (s2_out.rep_info.need_rep && s2_out.isFirstIssue) ||                // exe fail and issued from IQ
969    s2_mmio                                                             // is mmio
970  )
971  io.ldu_io.ldCancel.ld1Cancel.bits := s2_out.deqPortIdx
972
973  // fast wakeup
974  io.ldu_io.fast_uop.valid := RegNext(
975    !io.dcache.s1_disable_fast_wakeup &&
976    s1_valid &&
977    !s1_kill &&
978    !io.tlb.resp.bits.miss &&
979    !io.ldu_io.lsq.forward.dataInvalidFast
980  ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_mmio && s2_ld_flow)
981  io.ldu_io.fast_uop.bits := RegNext(s1_out.uop)
982
983  //
984  io.ldu_io.s2_ptr_chasing                    := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire)
985
986  // prefetch train
987  io.prefetch_train.valid              := s2_valid && !s2_actually_mmio && !s2_in.tlbMiss
988  io.prefetch_train.bits.fromLsPipelineBundle(s2_in)
989  io.prefetch_train.bits.miss          := io.dcache.resp.bits.miss // TODO: use trace with bank conflict?
990  io.prefetch_train.bits.meta_prefetch := io.dcache.resp.bits.meta_prefetch
991  io.prefetch_train.bits.meta_access   := io.dcache.resp.bits.meta_access
992
993  io.prefetch_train_l1.valid              := s2_valid && !s2_actually_mmio
994  io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in)
995  io.prefetch_train_l1.bits.miss          := io.dcache.resp.bits.miss
996  io.prefetch_train_l1.bits.meta_prefetch := io.dcache.resp.bits.meta_prefetch
997  io.prefetch_train_l1.bits.meta_access   := io.dcache.resp.bits.meta_access
998  if (env.FPGAPlatform){
999    io.dcache.s0_pc := DontCare
1000    io.dcache.s1_pc := DontCare
1001    io.dcache.s2_pc := DontCare
1002  }else{
1003    io.dcache.s0_pc := s0_out.uop.pc
1004    io.dcache.s1_pc := s1_out.uop.pc
1005    io.dcache.s2_pc := s2_out.uop.pc
1006  }
1007  io.dcache.s2_kill := s2_pmp.ld || s2_pmp.st || s2_actually_mmio || s2_kill || !s2_ld_flow
1008
1009  val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready && s1_ld_flow
1010  val s2_ld_valid_dup = RegInit(0.U(6.W))
1011  s2_ld_valid_dup := 0x0.U(6.W)
1012  when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) }
1013  when (s1_kill || s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x0.U(6.W) }
1014  assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch)))
1015
1016  // Pipeline
1017  // --------------------------------------------------------------------------------
1018  // stage 3
1019  // --------------------------------------------------------------------------------
1020  // writeback and update load queue
1021  val s3_valid        = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect))
1022  val s3_in           = RegEnable(s2_out, s2_fire)
1023  val s3_out          = Wire(Valid(new MemExuOutput))
1024  val s3_dcache_rep   = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire)
1025  val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire)
1026  val s3_fast_rep     = Wire(Bool())
1027  val s3_ld_flow      = RegNext(s2_ld_flow)
1028  val s3_troublem     = RegNext(s2_troublem)
1029  val s3_kill         = s3_in.uop.robIdx.needFlush(io.redirect)
1030  s3_ready := !s3_valid || s3_kill || sx_can_go
1031
1032  // forwrad last beat
1033  val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.ldu_io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr)
1034  val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1035  val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid) && s3_ld_flow
1036
1037
1038  // s3 load fast replay
1039  io.ldu_io.fast_rep_out.valid := s3_valid &&
1040                                  s3_fast_rep &&
1041                                  !s3_in.uop.robIdx.needFlush(io.redirect) &&
1042                                  s3_ld_flow
1043  io.ldu_io.fast_rep_out.bits := s3_in
1044
1045  io.ldu_io.lsq.ldin.valid := s3_valid &&
1046                              (!s3_fast_rep || !io.ldu_io.fast_rep_out.ready) &&
1047                              !s3_in.feedbacked &&
1048                              !s3_in.lateKill &&
1049                              s3_ld_flow
1050  io.ldu_io.lsq.ldin.bits := s3_in
1051  io.ldu_io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid
1052
1053  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1054  io.ldu_io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools
1055  io.ldu_io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated
1056  io.ldu_io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated)
1057
1058  val s3_dly_ld_err =
1059    if (EnableAccurateLoadError) {
1060      (s3_in.lateKill || io.dcache.resp.bits.error_delayed) && RegNext(io.csrCtrl.cache_error_enable)
1061    } else {
1062      WireInit(false.B)
1063    }
1064  io.ldu_io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid
1065  io.ldu_io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err
1066  io.ldu_io.lsq.ldin.bits.dcacheRequireReplay  := s3_dcache_rep
1067
1068  val s3_vp_match_fail = RegNext(io.ldu_io.lsq.forward.matchInvalid || io.ldu_io.sbuffer.matchInvalid) && s3_troublem
1069  val s3_ldld_rep_inst =
1070      io.ldu_io.lsq.ldld_nuke_query.resp.valid &&
1071      io.ldu_io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch &&
1072      RegNext(io.csrCtrl.ldld_vio_check_enable)
1073
1074  val s3_rep_info = WireInit(s3_in.rep_info)
1075  s3_rep_info.dcache_miss   := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid && s3_troublem
1076  val s3_rep_frm_fetch = s3_vp_match_fail || s3_ldld_rep_inst
1077  val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt)
1078  val s3_force_rep     = s3_sel_rep_cause(LoadReplayCauses.C_TM) &&
1079                         !s3_in.uop.exceptionVec(loadAddrMisaligned) &&
1080                         s3_troublem
1081
1082  val s3_ld_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, LduCfg).asUInt.orR && s3_ld_flow
1083  val s3_st_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, StaCfg).asUInt.orR && !s3_ld_flow
1084  val s3_exception    = s3_ld_exception || s3_st_exception
1085  when ((s3_ld_exception || s3_dly_ld_err || s3_rep_frm_fetch) && !s3_force_rep) {
1086    io.ldu_io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType)
1087  } .otherwise {
1088    io.ldu_io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools)
1089  }
1090
1091  // Int flow, if hit, will be writebacked at s3
1092  s3_out.valid                := s3_valid &&
1093                                (!s3_ld_flow || !io.ldu_io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio)
1094  s3_out.bits.uop             := s3_in.uop
1095  s3_out.bits.uop.exceptionVec(loadAccessFault) := (s3_dly_ld_err  || s3_in.uop.exceptionVec(loadAccessFault)) && s3_ld_flow
1096  s3_out.bits.uop.replayInst := s3_rep_frm_fetch
1097  s3_out.bits.data            := s3_in.data
1098  s3_out.bits.debug.isMMIO    := s3_in.mmio
1099  s3_out.bits.debug.isPerfCnt := false.B
1100  s3_out.bits.debug.paddr     := s3_in.paddr
1101  s3_out.bits.debug.vaddr     := s3_in.vaddr
1102
1103  when (s3_force_rep) {
1104    s3_out.bits.uop.exceptionVec := 0.U.asTypeOf(s3_in.uop.exceptionVec.cloneType)
1105  }
1106
1107  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1108  io.ldu_io.lsq.ldin.bits.uop := s3_out.bits.uop
1109
1110  val s3_revoke = s3_exception || io.ldu_io.lsq.ldin.bits.rep_info.need_rep
1111  io.ldu_io.lsq.ldld_nuke_query.revoke := s3_revoke
1112  io.ldu_io.lsq.stld_nuke_query.revoke := s3_revoke
1113
1114  // feedback slow
1115  s3_fast_rep := RegNext(s2_fast_rep) &&
1116                 !s3_in.feedbacked &&
1117                 !s3_in.lateKill &&
1118                 !s3_rep_frm_fetch &&
1119                 !s3_exception
1120
1121  val s3_fb_no_waiting = !s3_in.isLoadReplay && !(s3_fast_rep && io.ldu_io.fast_rep_out.ready) && !s3_in.feedbacked
1122
1123  //
1124  io.feedback_slow.valid                 := s3_valid && !s3_in.uop.robIdx.needFlush(io.redirect) && s3_fb_no_waiting && s3_ld_flow
1125  io.feedback_slow.bits.hit              := !io.ldu_io.lsq.ldin.bits.rep_info.need_rep || io.ldu_io.lsq.ldin.ready
1126  io.feedback_slow.bits.flushState       := s3_in.ptwBack
1127  io.feedback_slow.bits.robIdx           := s3_in.uop.robIdx
1128  io.feedback_slow.bits.sourceType       := RSFeedbackType.lrqFull
1129  io.feedback_slow.bits.dataInvalidSqIdx := DontCare
1130
1131  io.ldu_io.ldCancel.ld2Cancel.valid := s3_valid && (
1132    (io.ldu_io.lsq.ldin.bits.rep_info.need_rep && s3_in.isFirstIssue) ||
1133    s3_in.mmio
1134  )
1135  io.ldu_io.ldCancel.ld2Cancel.bits := s3_in.deqPortIdx
1136
1137  // data from dcache hit
1138  val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle)
1139  s3_ld_raw_data_frm_cache.respDcacheData       := io.dcache.resp.bits.data_delayed
1140  s3_ld_raw_data_frm_cache.forwardMask          := RegEnable(s2_fwd_mask, s2_valid)
1141  s3_ld_raw_data_frm_cache.forwardData          := RegEnable(s2_fwd_data, s2_valid)
1142  s3_ld_raw_data_frm_cache.uop                  := RegEnable(s2_out.uop, s2_valid)
1143  s3_ld_raw_data_frm_cache.addrOffset           := RegEnable(s2_out.paddr(3, 0), s2_valid)
1144  s3_ld_raw_data_frm_cache.forward_D            := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid
1145  s3_ld_raw_data_frm_cache.forwardData_D        := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid))
1146  s3_ld_raw_data_frm_cache.forward_mshr         := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid)
1147  s3_ld_raw_data_frm_cache.forwardData_mshr     := RegEnable(s2_fwd_data_frm_mshr, s2_valid)
1148  s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1149
1150  val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData()
1151  val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List(
1152    "b0000".U -> s3_merged_data_frm_cache(63,    0),
1153    "b0001".U -> s3_merged_data_frm_cache(63,    8),
1154    "b0010".U -> s3_merged_data_frm_cache(63,   16),
1155    "b0011".U -> s3_merged_data_frm_cache(63,   24),
1156    "b0100".U -> s3_merged_data_frm_cache(63,   32),
1157    "b0101".U -> s3_merged_data_frm_cache(63,   40),
1158    "b0110".U -> s3_merged_data_frm_cache(63,   48),
1159    "b0111".U -> s3_merged_data_frm_cache(63,   56),
1160    "b1000".U -> s3_merged_data_frm_cache(127,  64),
1161    "b1001".U -> s3_merged_data_frm_cache(127,  72),
1162    "b1010".U -> s3_merged_data_frm_cache(127,  80),
1163    "b1011".U -> s3_merged_data_frm_cache(127,  88),
1164    "b1100".U -> s3_merged_data_frm_cache(127,  96),
1165    "b1101".U -> s3_merged_data_frm_cache(127, 104),
1166    "b1110".U -> s3_merged_data_frm_cache(127, 112),
1167    "b1111".U -> s3_merged_data_frm_cache(127, 120)
1168  ))
1169  val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache)
1170
1171  // FIXME: add 1 cycle delay ?
1172  io.ldout.bits      := s3_out.bits
1173  io.ldout.bits.data := s3_ld_data_frm_cache
1174  io.ldout.valid     := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) && s3_ld_flow
1175
1176  // fast load to load forward
1177  io.ldu_io.l2l_fwd_out.valid      := s3_out.valid && !s3_in.lateKill && s3_ld_flow
1178  io.ldu_io.l2l_fwd_out.data       := s3_ld_data_frm_cache
1179  io.ldu_io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err // ecc delayed error
1180
1181  // hybrid unit writeback to rob
1182  // delay params
1183  val SelectGroupSize   = RollbackGroupSize
1184  val lgSelectGroupSize = log2Ceil(SelectGroupSize)
1185  val TotalSelectCycles = scala.math.ceil(log2Ceil(LoadQueueRAWSize).toFloat / lgSelectGroupSize).toInt + 1
1186  val TotalDelayCycles  = TotalSelectCycles - 2
1187
1188  // writeback
1189  val sx_valid = Wire(Vec(TotalDelayCycles + 1, Bool()))
1190  val sx_ready = Wire(Vec(TotalDelayCycles + 1, Bool()))
1191  val sx_in    = Wire(Vec(TotalDelayCycles + 1, new MemExuOutput))
1192
1193  sx_can_go := sx_ready.head
1194  for (i <- 0 until TotalDelayCycles + 1) {
1195    if (i == 0) {
1196      sx_valid(i) := s3_valid && !s3_ld_flow
1197      sx_in(i)    := s3_out.bits
1198      sx_ready(i) := !s3_valid(i) || sx_in(i).uop.robIdx.needFlush(io.redirect) || (if (TotalDelayCycles == 0) io.stout.ready else sx_ready(i+1))
1199    } else {
1200      val cur_kill   = sx_in(i).uop.robIdx.needFlush(io.redirect)
1201      val cur_can_go = (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1))
1202      val cur_fire   = sx_valid(i) && !cur_kill && cur_can_go
1203      val prev_fire  = sx_valid(i-1) && !sx_in(i-1).uop.robIdx.needFlush(io.redirect) && sx_ready(i)
1204
1205      sx_ready(i) := !sx_valid(i) || cur_kill || (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1))
1206      val sx_valid_can_go = prev_fire || cur_fire || cur_kill
1207      sx_valid(i) := RegEnable(Mux(prev_fire, true.B, false.B), sx_valid_can_go)
1208      sx_in(i) := RegEnable(sx_in(i-1), prev_fire)
1209    }
1210  }
1211
1212  val sx_last_valid = sx_valid.takeRight(1).head
1213  val sx_last_ready = sx_ready.takeRight(1).head
1214  val sx_last_in    = sx_in.takeRight(1).head
1215
1216  sx_last_ready  := !sx_last_valid || sx_last_in.uop.robIdx.needFlush(io.redirect) || io.stout.ready
1217  io.stout.valid := sx_last_valid && !sx_last_in.uop.robIdx.needFlush(io.redirect)
1218  io.stout.bits  := sx_last_in
1219
1220   // trigger
1221  val ld_trigger = FuType.isLoad(io.stout.bits.uop.fuType)
1222  val last_valid_data = RegEnable(io.stout.bits.data, io.stout.fire)
1223  val hit_ld_addr_trig_hit_vec = Wire(Vec(3, Bool()))
1224  val lq_ld_addr_trig_hit_vec = RegNext(io.ldu_io.lsq.trigger.lqLoadAddrTriggerHitVec)
1225  (0 until 3).map{i => {
1226    val tdata2    = RegNext(RegNext(io.ldu_io.trigger(i).tdata2))
1227    val matchType = RegNext(RegNext(io.ldu_io.trigger(i).matchType))
1228    val tEnable   = RegNext(RegNext(io.ldu_io.trigger(i).tEnable))
1229
1230    hit_ld_addr_trig_hit_vec(i)        := TriggerCmp(RegNext(s3_in.vaddr), tdata2, matchType, tEnable)
1231    io.ldu_io.trigger(i).addrHit       := Mux(io.stout.valid && ld_trigger, hit_ld_addr_trig_hit_vec(i), lq_ld_addr_trig_hit_vec(i))
1232    io.ldu_io.trigger(i).lastDataHit   := TriggerCmp(last_valid_data, tdata2, matchType, tEnable)
1233  }}
1234  io.ldu_io.lsq.trigger.hitLoadAddrTriggerHitVec := hit_ld_addr_trig_hit_vec
1235
1236  // FIXME: please move this part to LoadQueueReplay
1237  io.ldu_io.debug_ls := DontCare
1238
1239 // Topdown
1240  io.ldu_io.lsTopdownInfo.s1.robIdx          := s1_in.uop.robIdx.value
1241  io.ldu_io.lsTopdownInfo.s1.vaddr_valid     := s1_valid && s1_in.hasROBEntry
1242  io.ldu_io.lsTopdownInfo.s1.vaddr_bits      := s1_vaddr
1243  io.ldu_io.lsTopdownInfo.s2.robIdx          := s2_in.uop.robIdx.value
1244  io.ldu_io.lsTopdownInfo.s2.paddr_valid     := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss
1245  io.ldu_io.lsTopdownInfo.s2.paddr_bits      := s2_in.paddr
1246  io.ldu_io.lsTopdownInfo.s2.first_real_miss := io.dcache.resp.bits.real_miss
1247  io.ldu_io.lsTopdownInfo.s2.cache_miss_en   := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated
1248
1249  // perf cnt
1250  XSPerfAccumulate("s0_in_valid",                  io.lsin.valid)
1251  XSPerfAccumulate("s0_in_block",                  io.lsin.valid && !io.lsin.fire)
1252  XSPerfAccumulate("s0_in_fire_first_issue",       s0_valid && s0_isFirstIssue)
1253  XSPerfAccumulate("s0_lsq_fire_first_issue",      io.ldu_io.replay.fire)
1254  XSPerfAccumulate("s0_ldu_fire_first_issue",      io.lsin.fire && s0_isFirstIssue)
1255  XSPerfAccumulate("s0_fast_replay_issue",         io.ldu_io.fast_rep_in.fire)
1256  XSPerfAccumulate("s0_stall_out",                 s0_valid && !s0_can_go)
1257  XSPerfAccumulate("s0_stall_dcache",              s0_valid && !io.dcache.req.ready)
1258  XSPerfAccumulate("s0_addr_spec_success",         s0_fire && s0_vaddr(VAddrBits-1, 12) === io.lsin.bits.src(0)(VAddrBits-1, 12))
1259  XSPerfAccumulate("s0_addr_spec_failed",          s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.lsin.bits.src(0)(VAddrBits-1, 12))
1260  XSPerfAccumulate("s0_addr_spec_success_once",    s0_fire && s0_vaddr(VAddrBits-1, 12) === io.lsin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1261  XSPerfAccumulate("s0_addr_spec_failed_once",     s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.lsin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1262  XSPerfAccumulate("s0_forward_tl_d_channel",      s0_out.forward_tlDchannel)
1263  XSPerfAccumulate("s0_hardware_prefetch_fire",    s0_fire && s0_hw_prf_select)
1264  XSPerfAccumulate("s0_software_prefetch_fire",    s0_fire && s0_prf && s0_int_iss_select)
1265  XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select)
1266  XSPerfAccumulate("s0_hardware_prefetch_total",   io.prefetch_req.valid)
1267
1268  XSPerfAccumulate("s1_in_valid",                  s1_valid)
1269  XSPerfAccumulate("s1_in_fire",                   s1_fire)
1270  XSPerfAccumulate("s1_in_fire_first_issue",       s1_fire && s1_in.isFirstIssue)
1271  XSPerfAccumulate("s1_tlb_miss",                  s1_fire && s1_tlb_miss)
1272  XSPerfAccumulate("s1_tlb_miss_first_issue",      s1_fire && s1_tlb_miss && s1_in.isFirstIssue)
1273  XSPerfAccumulate("s1_stall_out",                 s1_valid && !s1_can_go)
1274  XSPerfAccumulate("s1_late_kill",                 s1_valid && s1_fast_rep_kill)
1275
1276  XSPerfAccumulate("s2_in_valid",                  s2_valid)
1277  XSPerfAccumulate("s2_in_fire",                   s2_fire)
1278  XSPerfAccumulate("s2_in_fire_first_issue",       s2_fire && s2_in.isFirstIssue)
1279  XSPerfAccumulate("s2_dcache_miss",               s2_fire && io.dcache.resp.bits.miss)
1280  XSPerfAccumulate("s2_dcache_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1281  XSPerfAccumulate("s2_dcache_real_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1282  XSPerfAccumulate("s2_full_forward",              s2_fire && s2_full_fwd)
1283  XSPerfAccumulate("s2_dcache_miss_full_forward",  s2_fire && s2_dcache_miss)
1284  XSPerfAccumulate("s2_fwd_frm_d_can",             s2_valid && s2_fwd_frm_d_chan)
1285  XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr",    s2_valid && s2_fwd_frm_d_chan_or_mshr)
1286  XSPerfAccumulate("s2_stall_out",                 s2_fire && !s2_can_go)
1287  XSPerfAccumulate("s2_prefetch",                  s2_fire && s2_prf)
1288  XSPerfAccumulate("s2_prefetch_ignored",          s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict
1289  XSPerfAccumulate("s2_prefetch_miss",             s2_fire && s2_prf && io.dcache.resp.bits.miss) // prefetch req miss in l1
1290  XSPerfAccumulate("s2_prefetch_hit",              s2_fire && s2_prf && !io.dcache.resp.bits.miss) // prefetch req hit in l1
1291  XSPerfAccumulate("s2_prefetch_accept",           s2_fire && s2_prf && io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it
1292  XSPerfAccumulate("s2_forward_req",               s2_fire && s2_in.forward_tlDchannel)
1293  XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid)
1294  XSPerfAccumulate("s2_successfully_forward_mshr",      s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid)
1295
1296  XSPerfAccumulate("s3_fwd_frm_d_chan",            s3_valid && s3_fwd_frm_d_chan_valid)
1297
1298  XSPerfAccumulate("load_to_load_forward",                      s1_try_ptr_chasing && !s1_ptr_chasing_canceled)
1299  XSPerfAccumulate("load_to_load_forward_try",                  s1_try_ptr_chasing)
1300  XSPerfAccumulate("load_to_load_forward_fail",                 s1_cancel_ptr_chasing)
1301  XSPerfAccumulate("load_to_load_forward_fail_cancelled",       s1_cancel_ptr_chasing && s1_ptr_chasing_canceled)
1302  XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match)
1303  XSPerfAccumulate("load_to_load_forward_fail_op_not_ld",       s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld)
1304  XSPerfAccumulate("load_to_load_forward_fail_addr_align",      s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned)
1305  XSPerfAccumulate("load_to_load_forward_fail_set_mismatch",    s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch)
1306
1307  // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design
1308  // hardware performance counter
1309  val perfEvents = Seq(
1310    ("load_s0_in_fire         ", s0_fire                                                        ),
1311    ("load_to_load_forward    ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled      ),
1312    ("stall_dcache            ", s0_valid && s0_can_go && !io.dcache.req.ready                  ),
1313    ("load_s1_in_fire         ", s0_fire                                                        ),
1314    ("load_s1_tlb_miss        ", s1_fire && io.tlb.resp.bits.miss                               ),
1315    ("load_s2_in_fire         ", s1_fire                                                        ),
1316    ("load_s2_dcache_miss     ", s2_fire && io.dcache.resp.bits.miss                            ),
1317  )
1318  generatePerfEvent()
1319}