xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/HybridUnit.scala (revision 887862dbb8debde8ab099befc426493834a69ee7)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan.ExceptionNO._
25import xiangshan._
26import xiangshan.backend.Bundles.{DynInst, MemExuInput, MemExuOutput}
27import xiangshan.backend.fu.PMPRespBundle
28import xiangshan.backend.fu.FuConfig._
29import xiangshan.backend.ctrlblock.{DebugLsInfoBundle, LsTopdownInfo}
30import xiangshan.backend.fu.NewCSR._
31import xiangshan.backend.rob.RobPtr
32import xiangshan.backend.fu._
33import xiangshan.backend.fu.util.SdtrigExt
34import xiangshan.cache._
35import xiangshan.cache.wpu.ReplayCarry
36import xiangshan.cache.mmu.{TlbCmd, TlbHintReq, TlbReq, TlbRequestIO, TlbResp}
37import xiangshan.mem.mdp._
38
39class HybridUnit(implicit p: Parameters) extends XSModule
40  with HasLoadHelper
41  with HasPerfEvents
42  with HasDCacheParameters
43  with HasCircularQueuePtrHelper
44  with HasVLSUParameters
45  with SdtrigExt
46{
47  val io = IO(new Bundle() {
48    // control
49    val redirect      = Flipped(ValidIO(new Redirect))
50    val csrCtrl       = Flipped(new CustomCSRCtrlIO)
51
52    // flow in
53    val lsin          = Flipped(Decoupled(new MemExuInput))
54
55    // flow out
56    val ldout = DecoupledIO(new MemExuOutput)
57    val stout = DecoupledIO(new MemExuOutput)
58
59    val ldu_io = new Bundle() {
60      // dcache
61      val dcache        = new DCacheLoadIO
62
63      // data path
64      val sbuffer       = new LoadForwardQueryIO
65      val vec_forward   = new LoadForwardQueryIO
66      val lsq           = new LoadToLsqIO
67      val tl_d_channel  = Input(new DcacheToLduForwardIO)
68      val forward_mshr  = Flipped(new LduToMissqueueForwardIO)
69      val tlb_hint      = Flipped(new TlbHintReq)
70      val l2_hint       = Input(Valid(new L2ToL1Hint))
71
72      // fast wakeup
73      val fast_uop = ValidIO(new DynInst) // early wakeup signal generated in load_s1, send to RS in load_s2
74
75      // trigger
76      val trigger = Vec(TriggerNum, new LoadUnitTriggerIO)
77
78      // load to load fast path
79      val l2l_fwd_in    = Input(new LoadToLoadIO)
80      val l2l_fwd_out   = Output(new LoadToLoadIO)
81
82      val ld_fast_match    = Input(Bool())
83      val ld_fast_fuOpType = Input(UInt())
84      val ld_fast_imm      = Input(UInt(12.W))
85
86      // hardware prefetch to l1 cache req
87      val prefetch_req    = Flipped(ValidIO(new L1PrefetchReq))
88
89      // iq cancel
90      val ldCancel = Output(new LoadCancelIO()) // use to cancel the uops waked by this load, and cancel load
91
92      // iq wakeup, use to wakeup consumer uop at load s2
93      val wakeup = ValidIO(new DynInst)
94
95      // load ecc error
96      val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different
97
98      // schedule error query
99      val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO)))
100
101      // queue-based replay
102      val replay       = Flipped(Decoupled(new LsPipelineBundle))
103      val lq_rep_full  = Input(Bool())
104
105      // misc
106      val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch
107
108      // Load fast replay path
109      val fast_rep_in  = Flipped(Decoupled(new LqWriteBundle))
110      val fast_rep_out = Decoupled(new LqWriteBundle)
111
112      // Load RAR rollback
113      val rollback = Valid(new Redirect)
114
115      // perf
116      val debug_ls         = Output(new DebugLsInfoBundle)
117      val lsTopdownInfo    = Output(new LsTopdownInfo)
118    }
119
120    val stu_io = new Bundle() {
121      val dcache          = new DCacheStoreIO
122      val prefetch_req    = Flipped(DecoupledIO(new StorePrefetchReq))
123      val issue           = Valid(new MemExuInput)
124      val lsq             = ValidIO(new LsPipelineBundle)
125      val lsq_replenish   = Output(new LsPipelineBundle())
126      val stld_nuke_query = Valid(new StoreNukeQueryIO)
127      val st_mask_out     = Valid(new StoreMaskBundle)
128      val debug_ls        = Output(new DebugLsInfoBundle)
129    }
130
131    val vec_stu_io = new Bundle() {
132      val in = Flipped(DecoupledIO(new VecPipeBundle()))
133      val isFirstIssue = Input(Bool())
134      val lsq = ValidIO(new LsPipelineBundle())
135      val feedbackSlow = ValidIO(new VSFQFeedback)
136    }
137
138    // speculative for gated control
139    val s0_prefetch_spec = Output(Bool())
140    val s1_prefetch_spec = Output(Bool())
141    // prefetch
142    val prefetch_train            = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms
143    val prefetch_train_l1         = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride
144    val canAcceptLowConfPrefetch  = Output(Bool())
145    val canAcceptHighConfPrefetch = Output(Bool())
146    val correctMissTrain          = Input(Bool())
147
148    // data path
149    val tlb           = new TlbRequestIO(2)
150    val pmp           = Flipped(new PMPRespBundle()) // arrive same to tlb now
151
152    // rs feedback
153    val feedback_fast = ValidIO(new RSFeedback) // stage 2
154    val feedback_slow = ValidIO(new RSFeedback) // stage 3
155
156    // for store trigger
157    val fromCsrTrigger = Input(new CsrTriggerBundle)
158  })
159
160  val StorePrefetchL1Enabled = EnableStorePrefetchAtCommit || EnableStorePrefetchAtIssue || EnableStorePrefetchSPB
161  val s1_ready, s2_ready, s3_ready, sx_can_go = WireInit(false.B)
162
163  // Pipeline
164  // --------------------------------------------------------------------------------
165  // stage 0
166  // --------------------------------------------------------------------------------
167  // generate addr, use addr to query DCache and DTLB
168  val s0_valid         = Wire(Bool())
169  val s0_dcache_ready  = Wire(Bool())
170  val s0_kill          = Wire(Bool())
171  val s0_vaddr         = Wire(UInt(VAddrBits.W))
172  val s0_mask          = Wire(UInt((VLEN/8).W))
173  val s0_uop           = Wire(new DynInst)
174  val s0_has_rob_entry = Wire(Bool())
175  val s0_mshrid        = Wire(UInt())
176  val s0_try_l2l       = Wire(Bool())
177  val s0_rep_carry     = Wire(new ReplayCarry(nWays))
178  val s0_isFirstIssue  = Wire(Bool())
179  val s0_fast_rep      = Wire(Bool())
180  val s0_ld_rep        = Wire(Bool())
181  val s0_l2l_fwd       = Wire(Bool())
182  val s0_sched_idx     = Wire(UInt())
183  val s0_can_go        = s1_ready
184  val s0_fire          = s0_valid && s0_dcache_ready && s0_can_go
185  val s0_out           = Wire(new LqWriteBundle)
186  // vector
187  val s0_isvec = WireInit(false.B)
188  val s0_vecActive = WireInit(true.B)
189  // val s0_flowPtr = WireInit(0.U.asTypeOf(new VsFlowPtr))
190  val s0_isLastElem = WireInit(false.B)
191
192  // load flow select/gen
193  // src0: super load replayed by LSQ (cache miss replay) (io.ldu_io.replay)
194  // src1: fast load replay (io.ldu_io.fast_rep_in)
195  // src2: load replayed by LSQ (io.ldu_io.replay)
196  // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch)
197  // src4: int read / software prefetch first issue from RS (io.in)
198  // src5: vec read first issue from RS (TODO)
199  // src6: load try pointchaising when no issued or replayed load (io.fastpath)
200  // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch)
201  // priority: high to low
202  val s0_ld_flow             = FuType.isLoad(s0_uop.fuType) || FuType.isVLoad(s0_uop.fuType)
203  val s0_rep_stall           = io.lsin.valid && isAfter(io.ldu_io.replay.bits.uop.robIdx, io.lsin.bits.uop.robIdx)
204  private val SRC_NUM = 8
205  private val Seq(
206    super_rep_idx, fast_rep_idx, lsq_rep_idx, high_pf_idx,
207    int_iss_idx, vec_iss_idx, l2l_fwd_idx, low_pf_idx
208  ) = (0 until SRC_NUM).toSeq
209  // load flow source valid
210  val s0_src_valid_vec = WireInit(VecInit(Seq(
211    io.ldu_io.replay.valid && io.ldu_io.replay.bits.forward_tlDchannel,
212    io.ldu_io.fast_rep_in.valid,
213    io.ldu_io.replay.valid && !io.ldu_io.replay.bits.forward_tlDchannel && !s0_rep_stall,
214    io.ldu_io.prefetch_req.valid && io.ldu_io.prefetch_req.bits.confidence > 0.U,
215    io.lsin.valid, // int flow first issue or software prefetch
216    io.vec_stu_io.in.valid,
217    io.ldu_io.l2l_fwd_in.valid && io.ldu_io.ld_fast_match,
218    io.ldu_io.prefetch_req.valid && io.ldu_io.prefetch_req.bits.confidence === 0.U,
219  )))
220  // load flow source ready
221  val s0_src_ready_vec = Wire(Vec(SRC_NUM, Bool()))
222  s0_src_ready_vec(0) := true.B
223  for(i <- 1 until SRC_NUM){
224    s0_src_ready_vec(i) := !s0_src_valid_vec.take(i).reduce(_ || _)
225  }
226  // load flow source select (OH)
227  val s0_src_select_vec = WireInit(VecInit((0 until SRC_NUM).map{i => s0_src_valid_vec(i) && s0_src_ready_vec(i)}))
228  val s0_hw_prf_select = s0_src_select_vec(high_pf_idx) || s0_src_select_vec(low_pf_idx)
229  dontTouch(s0_src_valid_vec)
230  dontTouch(s0_src_ready_vec)
231  dontTouch(s0_src_select_vec)
232
233  s0_valid := s0_src_valid_vec.reduce(_ || _) && !s0_kill
234
235  // which is S0's out is ready and dcache is ready
236  val s0_try_ptr_chasing      = s0_src_select_vec(l2l_fwd_idx)
237  val s0_do_try_ptr_chasing   = s0_try_ptr_chasing && s0_can_go && io.ldu_io.dcache.req.ready
238  val s0_ptr_chasing_vaddr    = io.ldu_io.l2l_fwd_in.data(5, 0) +& io.ldu_io.ld_fast_imm(5, 0)
239  val s0_ptr_chasing_canceled = WireInit(false.B)
240  s0_kill := s0_ptr_chasing_canceled || (s0_out.uop.robIdx.needFlush(io.redirect) && !s0_try_ptr_chasing)
241
242  // prefetch related ctrl signal
243  val s0_prf    = Wire(Bool())
244  val s0_prf_rd = Wire(Bool())
245  val s0_prf_wr = Wire(Bool())
246  val s0_hw_prf = s0_hw_prf_select
247
248  io.canAcceptLowConfPrefetch  := s0_src_ready_vec(low_pf_idx) && io.ldu_io.dcache.req.ready
249  io.canAcceptHighConfPrefetch := s0_src_ready_vec(high_pf_idx) && io.ldu_io.dcache.req.ready
250
251  if (StorePrefetchL1Enabled) {
252    s0_dcache_ready := Mux(s0_ld_flow, io.ldu_io.dcache.req.ready, io.stu_io.dcache.req.ready)
253  } else {
254    s0_dcache_ready := Mux(s0_ld_flow, io.ldu_io.dcache.req.ready, true.B)
255  }
256
257  // query DTLB
258  io.tlb.req.valid                   := s0_valid && s0_dcache_ready
259  io.tlb.req.bits.cmd                := Mux(s0_prf,
260                                         Mux(s0_prf_wr, TlbCmd.write, TlbCmd.read),
261                                         Mux(s0_ld_flow, TlbCmd.read, TlbCmd.write)
262                                       )
263  io.tlb.req.bits.vaddr              := Mux(s0_hw_prf_select, io.ldu_io.prefetch_req.bits.paddr, s0_vaddr)
264  io.tlb.req.bits.size               := Mux(s0_isvec, io.vec_stu_io.in.bits.alignedType(1, 0), LSUOpType.size(s0_uop.fuOpType)) // may broken if use it in feature
265  io.tlb.req.bits.kill               := s0_kill
266  io.tlb.req.bits.memidx.is_ld       := s0_ld_flow
267  io.tlb.req.bits.memidx.is_st       := !s0_ld_flow
268  io.tlb.req.bits.memidx.idx         := s0_uop.lqIdx.value
269  io.tlb.req.bits.debug.robIdx       := s0_uop.robIdx
270  io.tlb.req.bits.no_translate       := s0_hw_prf_select  // hw b.reqetch addr does not need to be translated
271  io.tlb.req.bits.debug.pc           := s0_uop.pc
272  io.tlb.req.bits.debug.isFirstIssue := s0_isFirstIssue
273
274  // query DCache
275  // for load
276  io.ldu_io.dcache.req.valid             := s0_valid && s0_dcache_ready && s0_ld_flow
277  io.ldu_io.dcache.req.bits.cmd          :=  Mux(s0_prf_rd, MemoryOpConstants.M_PFR,
278                                              Mux(s0_prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD))
279  io.ldu_io.dcache.req.bits.vaddr        := s0_vaddr
280  io.ldu_io.dcache.req.bits.mask         := s0_mask
281  io.ldu_io.dcache.req.bits.data         := DontCare
282  io.ldu_io.dcache.req.bits.isFirstIssue := s0_isFirstIssue
283  io.ldu_io.dcache.req.bits.instrtype    := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U)
284  io.ldu_io.dcache.req.bits.debug_robIdx := s0_uop.robIdx.value
285  io.ldu_io.dcache.req.bits.replayCarry  := s0_rep_carry
286  io.ldu_io.dcache.req.bits.id           := DontCare // TODO: update cache meta
287  io.ldu_io.dcache.pf_source             := Mux(s0_hw_prf_select, io.ldu_io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL)
288  io.ldu_io.dcache.is128Req              := is128Bit(io.vec_stu_io.in.bits.alignedType) && io.vec_stu_io.in.valid && s0_src_select_vec(vec_iss_idx)
289
290  // for store
291  io.stu_io.dcache.req.valid             := s0_valid && s0_dcache_ready && !s0_ld_flow && !s0_prf
292  io.stu_io.dcache.req.bits.cmd          := MemoryOpConstants.M_PFW
293  io.stu_io.dcache.req.bits.vaddr        := s0_vaddr
294  io.stu_io.dcache.req.bits.instrtype    := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U, STORE_SOURCE.U)
295
296  // load flow priority mux
297  def fromNullSource() = {
298    s0_vaddr         := 0.U
299    s0_mask          := 0.U
300    s0_uop           := 0.U.asTypeOf(new DynInst)
301    s0_try_l2l       := false.B
302    s0_has_rob_entry := false.B
303    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
304    s0_mshrid        := 0.U
305    s0_isFirstIssue  := false.B
306    s0_fast_rep      := false.B
307    s0_ld_rep        := false.B
308    s0_l2l_fwd       := false.B
309    s0_prf           := false.B
310    s0_prf_rd        := false.B
311    s0_prf_wr        := false.B
312    s0_sched_idx     := 0.U
313  }
314
315  def fromFastReplaySource(src: LqWriteBundle) = {
316    s0_vaddr         := src.vaddr
317    s0_mask          := src.mask
318    s0_uop           := src.uop
319    s0_try_l2l       := false.B
320    s0_has_rob_entry := src.hasROBEntry
321    s0_rep_carry     := src.rep_info.rep_carry
322    s0_mshrid        := src.rep_info.mshr_id
323    s0_isFirstIssue  := false.B
324    s0_fast_rep      := true.B
325    s0_ld_rep        := src.isLoadReplay
326    s0_l2l_fwd       := false.B
327    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
328    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
329    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
330    s0_sched_idx     := src.schedIndex
331  }
332
333  def fromNormalReplaySource(src: LsPipelineBundle) = {
334    s0_vaddr         := src.vaddr
335    s0_mask          := genVWmask(src.vaddr, src.uop.fuOpType(1, 0))
336    s0_uop           := src.uop
337    s0_try_l2l       := false.B
338    s0_has_rob_entry := true.B
339    s0_rep_carry     := src.replayCarry
340    s0_mshrid        := src.mshrid
341    s0_isFirstIssue  := false.B
342    s0_fast_rep      := false.B
343    s0_ld_rep        := true.B
344    s0_l2l_fwd       := false.B
345    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
346    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
347    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
348    s0_sched_idx     := src.schedIndex
349  }
350
351  def fromPrefetchSource(src: L1PrefetchReq) = {
352    s0_vaddr         := src.getVaddr()
353    s0_mask          := 0.U
354    s0_uop           := DontCare
355    s0_try_l2l       := false.B
356    s0_has_rob_entry := false.B
357    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
358    s0_mshrid        := 0.U
359    s0_isFirstIssue  := false.B
360    s0_fast_rep      := false.B
361    s0_ld_rep        := false.B
362    s0_l2l_fwd       := false.B
363    s0_prf           := true.B
364    s0_prf_rd        := !src.is_store
365    s0_prf_wr        := src.is_store
366    s0_sched_idx     := 0.U
367  }
368
369  def fromIntIssueSource(src: MemExuInput) = {
370    s0_vaddr         := src.src(0) + SignExt(src.uop.imm(11, 0), VAddrBits)
371    s0_mask          := genVWmask(s0_vaddr, src.uop.fuOpType(1,0))
372    s0_uop           := src.uop
373    s0_try_l2l       := false.B
374    s0_has_rob_entry := true.B
375    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
376    s0_mshrid        := 0.U
377    s0_isFirstIssue  := true.B
378    s0_fast_rep      := false.B
379    s0_ld_rep        := false.B
380    s0_l2l_fwd       := false.B
381    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
382    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
383    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
384    s0_sched_idx     := 0.U
385  }
386
387  def fromVecIssueSource(src: VecPipeBundle) = {
388    // For now, vector port handles only vector store flows
389    s0_vaddr         := src.vaddr
390    s0_mask          := src.mask
391    s0_uop           := src.uop
392    s0_try_l2l       := false.B
393    s0_has_rob_entry := true.B
394    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
395    s0_mshrid        := 0.U
396    // s0_isFirstIssue  := src.isFirstIssue
397    s0_fast_rep      := false.B
398    s0_ld_rep        := false.B
399    s0_l2l_fwd       := false.B
400    s0_prf           := false.B
401    s0_prf_rd        := false.B
402    s0_prf_wr        := false.B
403    s0_sched_idx     := 0.U
404
405    s0_isvec         := true.B
406    s0_vecActive           := io.vec_stu_io.in.bits.vecActive
407    // s0_flowPtr       := io.vec_stu_io.in.bits.flowPtr
408    // s0_isLastElem    := io.vec_stu_io.in.bits.isLastElem
409  }
410
411  def fromLoadToLoadSource(src: LoadToLoadIO) = {
412    s0_vaddr              := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0))
413    s0_mask               := genVWmask(s0_vaddr, io.ldu_io.ld_fast_fuOpType(1, 0))
414    // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding.
415    // Assume the pointer chasing is always ld.
416    s0_uop.fuOpType  := io.ldu_io.ld_fast_fuOpType
417    s0_try_l2l            := true.B
418    // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx in S0 when trying pointchasing
419    // because these signals will be updated in S1
420    s0_has_rob_entry      := false.B
421    s0_mshrid             := 0.U
422    s0_rep_carry          := 0.U.asTypeOf(s0_rep_carry.cloneType)
423    s0_isFirstIssue       := true.B
424    s0_fast_rep           := false.B
425    s0_ld_rep             := false.B
426    s0_l2l_fwd            := true.B
427    s0_prf                := false.B
428    s0_prf_rd             := false.B
429    s0_prf_wr             := false.B
430    s0_sched_idx          := 0.U
431  }
432
433  // set default
434  s0_uop := DontCare
435  when (s0_src_select_vec(super_rep_idx)) { fromNormalReplaySource(io.ldu_io.replay.bits)     }
436  .elsewhen (s0_src_select_vec(fast_rep_idx)) { fromFastReplaySource(io.ldu_io.fast_rep_in.bits)  }
437  .elsewhen (s0_src_select_vec(lsq_rep_idx)) { fromNormalReplaySource(io.ldu_io.replay.bits)     }
438  .elsewhen (s0_hw_prf_select) { fromPrefetchSource(io.ldu_io.prefetch_req.bits)   }
439  .elsewhen (s0_src_select_vec(int_iss_idx)) { fromIntIssueSource(io.lsin.bits)                  }
440  .elsewhen (s0_src_select_vec(vec_iss_idx)) { fromVecIssueSource(io.vec_stu_io.in.bits)         }
441  .otherwise {
442    if (EnableLoadToLoadForward) {
443      fromLoadToLoadSource(io.ldu_io.l2l_fwd_in)
444    } else {
445      fromNullSource()
446    }
447  }
448
449  // address align check
450  val s0_addr_aligned = LookupTree(Mux(s0_isvec, io.vec_stu_io.in.bits.alignedType(1,0), s0_uop.fuOpType(1, 0)), List(
451    "b00".U   -> true.B,                   //b
452    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
453    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
454    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
455  ))// may broken if use it in feature
456
457  // accept load flow if dcache ready (tlb is always ready)
458  // TODO: prefetch need writeback to loadQueueFlag
459  s0_out               := DontCare
460  s0_out.vaddr         := s0_vaddr
461  s0_out.mask          := s0_mask
462  s0_out.uop           := s0_uop
463  s0_out.isFirstIssue  := s0_isFirstIssue
464  s0_out.hasROBEntry   := s0_has_rob_entry
465  s0_out.isPrefetch    := s0_prf
466  s0_out.isHWPrefetch  := s0_hw_prf
467  s0_out.isFastReplay  := s0_fast_rep
468  s0_out.isLoadReplay  := s0_ld_rep
469  s0_out.isFastPath    := s0_l2l_fwd
470  s0_out.mshrid        := s0_mshrid
471  s0_out.isvec         := s0_isvec
472  s0_out.isLastElem    := s0_isLastElem
473  s0_out.vecActive           := s0_vecActive
474  // s0_out.sflowPtr      := s0_flowPtr
475  s0_out.uop.exceptionVec(loadAddrMisaligned)  := !s0_addr_aligned && s0_ld_flow
476  s0_out.uop.exceptionVec(storeAddrMisaligned) := !s0_addr_aligned && !s0_ld_flow
477  s0_out.forward_tlDchannel := s0_src_select_vec(super_rep_idx)
478  when(io.tlb.req.valid && s0_isFirstIssue) {
479    s0_out.uop.debugInfo.tlbFirstReqTime := GTimer()
480  }.otherwise{
481    s0_out.uop.debugInfo.tlbFirstReqTime := s0_uop.debugInfo.tlbFirstReqTime
482  }
483  s0_out.schedIndex     := s0_sched_idx
484
485  // load fast replay
486  io.ldu_io.fast_rep_in.ready := (s0_can_go && io.ldu_io.dcache.req.ready && s0_src_ready_vec(fast_rep_idx))
487
488  // load flow source ready
489  // cache missed load has highest priority
490  // always accept cache missed load flow from load replay queue
491  io.ldu_io.replay.ready := (s0_can_go && io.ldu_io.dcache.req.ready && (s0_src_ready_vec(lsq_rep_idx) && !s0_rep_stall || s0_src_select_vec(super_rep_idx)))
492
493  // accept load flow from rs when:
494  // 1) there is no lsq-replayed load
495  // 2) there is no fast replayed load
496  // 3) there is no high confidence prefetch request
497  io.lsin.ready := (s0_can_go &&
498                    Mux(FuType.isLoad(io.lsin.bits.uop.fuType), io.ldu_io.dcache.req.ready,
499                    (if (StorePrefetchL1Enabled) io.stu_io.dcache.req.ready else true.B)) && s0_src_ready_vec(int_iss_idx))
500  io.vec_stu_io.in.ready := s0_can_go && io.ldu_io.dcache.req.ready && s0_src_ready_vec(vec_iss_idx)
501
502
503  // for hw prefetch load flow feedback, to be added later
504  // io.prefetch_in.ready := s0_hw_prf_select
505
506  // dcache replacement extra info
507  // TODO: should prefetch load update replacement?
508  io.ldu_io.dcache.replacementUpdated := Mux(s0_src_select_vec(lsq_rep_idx) || s0_src_select_vec(super_rep_idx), io.ldu_io.replay.bits.replacementUpdated, false.B)
509
510  io.stu_io.prefetch_req.ready := s1_ready && io.stu_io.dcache.req.ready && !io.lsin.valid
511
512  // load debug
513  XSDebug(io.ldu_io.dcache.req.fire && s0_ld_flow,
514    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
515  )
516  XSDebug(s0_valid && s0_ld_flow,
517    p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, lqIdx ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " +
518    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
519
520  // store debug
521  XSDebug(io.stu_io.dcache.req.fire && !s0_ld_flow,
522    p"[DCACHE STORE REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
523  )
524  XSDebug(s0_valid && !s0_ld_flow,
525    p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, sqIdx ${Hexadecimal(s0_out.uop.sqIdx.asUInt)}, " +
526    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
527
528
529  // Pipeline
530  // --------------------------------------------------------------------------------
531  // stage 1
532  // --------------------------------------------------------------------------------
533  // TLB resp (send paddr to dcache)
534  val s1_valid      = RegInit(false.B)
535  val s1_in         = Wire(new LqWriteBundle)
536  val s1_out        = Wire(new LqWriteBundle)
537  val s1_kill       = Wire(Bool())
538  val s1_can_go     = s2_ready
539  val s1_fire       = s1_valid && !s1_kill && s1_can_go
540  val s1_ld_flow    = RegNext(s0_ld_flow)
541  val s1_isvec      = RegEnable(s0_out.isvec, false.B, s0_fire)
542  val s1_isLastElem = RegEnable(s0_out.isLastElem, false.B, s0_fire)
543
544  s1_ready := !s1_valid || s1_kill || s2_ready
545  when (s0_fire) { s1_valid := true.B }
546  .elsewhen (s1_fire) { s1_valid := false.B }
547  .elsewhen (s1_kill) { s1_valid := false.B }
548  s1_in   := RegEnable(s0_out, s0_fire)
549
550  val s1_fast_rep_dly_err = RegNext(io.ldu_io.fast_rep_in.bits.delayedLoadError)
551  val s1_fast_rep_kill    = s1_fast_rep_dly_err && s1_in.isFastReplay
552  val s1_l2l_fwd_dly_err  = RegNext(io.ldu_io.l2l_fwd_in.dly_ld_err)
553  val s1_l2l_fwd_kill     = s1_l2l_fwd_dly_err && s1_in.isFastPath
554  val s1_late_kill        = s1_fast_rep_kill || s1_l2l_fwd_kill
555  val s1_vaddr_hi         = Wire(UInt())
556  val s1_vaddr_lo         = Wire(UInt())
557  val s1_vaddr            = Wire(UInt())
558  val s1_paddr_dup_lsu    = Wire(UInt())
559  val s1_paddr_dup_dcache = Wire(UInt())
560  val s1_ld_exception     = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, LduCfg).asUInt.orR   // af & pf exception were modified below.
561  val s1_st_exception     = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, StaCfg).asUInt.orR   // af & pf exception were modified below.
562  val s1_exception        = (s1_ld_flow && s1_ld_exception) || (!s1_ld_flow && s1_st_exception)
563  val s1_tlb_miss         = io.tlb.resp.bits.miss
564  val s1_prf              = s1_in.isPrefetch
565  val s1_hw_prf           = s1_in.isHWPrefetch
566  val s1_sw_prf           = s1_prf && !s1_hw_prf
567  val s1_tlb_memidx       = io.tlb.resp.bits.memidx
568
569  // mmio cbo decoder
570  val s1_mmio_cbo  = (s1_in.uop.fuOpType === LSUOpType.cbo_clean ||
571                      s1_in.uop.fuOpType === LSUOpType.cbo_flush ||
572                      s1_in.uop.fuOpType === LSUOpType.cbo_inval) && !s1_ld_flow && !s1_prf
573  val s1_mmio = s1_mmio_cbo
574
575  s1_vaddr_hi         := s1_in.vaddr(VAddrBits - 1, 6)
576  s1_vaddr_lo         := s1_in.vaddr(5, 0)
577  s1_vaddr            := Cat(s1_vaddr_hi, s1_vaddr_lo)
578  s1_paddr_dup_lsu    := io.tlb.resp.bits.paddr(0)
579  s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1)
580
581  when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss &&
582        s1_tlb_memidx.idx === s1_in.uop.lqIdx.value && s1_ld_flow) {
583    // printf("Load idx = %d\n", s1_tlb_memidx.idx)
584    s1_out.uop.debugInfo.tlbRespTime := GTimer()
585  } .elsewhen(s1_tlb_memidx.is_st && io.tlb.resp.valid && !s1_tlb_miss &&
586              s1_tlb_memidx.idx === s1_out.uop.sqIdx.value && !s1_ld_flow) {
587    // printf("Store idx = %d\n", s1_tlb_memidx.idx)
588    s1_out.uop.debugInfo.tlbRespTime := GTimer()
589  }
590
591  io.tlb.req_kill   := s1_kill
592  io.tlb.resp.ready := true.B
593
594  io.ldu_io.dcache.s1_paddr_dup_lsu    <> s1_paddr_dup_lsu
595  io.ldu_io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache
596  io.ldu_io.dcache.s1_kill             := s1_kill || s1_tlb_miss || s1_exception
597
598  // store to load forwarding
599  io.ldu_io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow)
600  io.ldu_io.sbuffer.vaddr := s1_vaddr
601  io.ldu_io.sbuffer.paddr := s1_paddr_dup_lsu
602  io.ldu_io.sbuffer.uop   := s1_in.uop
603  io.ldu_io.sbuffer.sqIdx := s1_in.uop.sqIdx
604  io.ldu_io.sbuffer.mask  := s1_in.mask
605  io.ldu_io.sbuffer.pc    := s1_in.uop.pc // FIXME: remove it
606
607  io.ldu_io.vec_forward.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow)
608  io.ldu_io.vec_forward.vaddr := s1_vaddr
609  io.ldu_io.vec_forward.paddr := s1_paddr_dup_lsu
610  io.ldu_io.vec_forward.uop   := s1_in.uop
611  io.ldu_io.vec_forward.sqIdx := s1_in.uop.sqIdx
612  io.ldu_io.vec_forward.mask  := s1_in.mask
613  io.ldu_io.vec_forward.pc    := s1_in.uop.pc // FIXME: remove it
614
615  io.ldu_io.lsq.forward.valid     := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_fast_rep_kill || s1_prf || !s1_ld_flow)
616  io.ldu_io.lsq.forward.vaddr     := s1_vaddr
617  io.ldu_io.lsq.forward.paddr     := s1_paddr_dup_lsu
618  io.ldu_io.lsq.forward.uop       := s1_in.uop
619  io.ldu_io.lsq.forward.sqIdx     := s1_in.uop.sqIdx
620  io.ldu_io.lsq.forward.sqIdxMask := 0.U
621  io.ldu_io.lsq.forward.mask      := s1_in.mask
622  io.ldu_io.lsq.forward.pc        := s1_in.uop.pc // FIXME: remove it
623
624  // st-ld violation query
625  val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => {
626                       io.ldu_io.stld_nuke_query(w).valid && // query valid
627                       isAfter(s1_in.uop.robIdx, io.ldu_io.stld_nuke_query(w).bits.robIdx) && // older store
628                       // TODO: Fix me when vector instruction
629                       (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.ldu_io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
630                       (s1_in.mask & io.ldu_io.stld_nuke_query(w).bits.mask).orR // data mask contain
631                      })).asUInt.orR && !s1_tlb_miss && s1_ld_flow
632
633  s1_out                   := s1_in
634  s1_out.vaddr             := s1_vaddr
635  s1_out.paddr             := s1_paddr_dup_lsu
636  s1_out.tlbMiss           := s1_tlb_miss
637  s1_out.ptwBack           := io.tlb.resp.bits.ptwBack
638  s1_out.rep_info.debug    := s1_in.uop.debugInfo
639  s1_out.rep_info.nuke     := s1_nuke && !s1_sw_prf
640  s1_out.lateKill          := s1_late_kill
641
642  // trigger
643  val storeTrigger = Module(new StoreTrigger)
644  storeTrigger.io.fromCsrTrigger.tdataVec             := io.fromCsrTrigger.tdataVec
645  storeTrigger.io.fromCsrTrigger.tEnableVec           := io.fromCsrTrigger.tEnableVec
646  storeTrigger.io.fromCsrTrigger.triggerCanRaiseBpExp := io.fromCsrTrigger.triggerCanRaiseBpExp
647  storeTrigger.io.fromCsrTrigger.debugMode            := io.fromCsrTrigger.debugMode
648  storeTrigger.io.fromStore.vaddr                     := s1_in.vaddr
649
650  when (s1_ld_flow) {
651    when (!s1_late_kill) {
652      // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
653      // af & pf exception were modified
654      s1_out.uop.exceptionVec(loadPageFault)       := io.tlb.resp.bits.excp(0).pf.ld
655      s1_out.uop.exceptionVec(loadGuestPageFault)  := io.tlb.resp.bits.excp(0).gpf.ld
656      s1_out.uop.exceptionVec(loadAccessFault)     := io.tlb.resp.bits.excp(0).af.ld
657    } .otherwise {
658      s1_out.uop.exceptionVec(loadAddrMisaligned)  := false.B
659      s1_out.uop.exceptionVec(loadAccessFault)     := s1_late_kill
660    }
661  } .otherwise {
662    s1_out.uop.exceptionVec(storePageFault)        := io.tlb.resp.bits.excp(0).pf.st
663    s1_out.uop.exceptionVec(storeGuestPageFault)   := io.tlb.resp.bits.excp(0).gpf.st
664    s1_out.uop.exceptionVec(storeAccessFault)      := io.tlb.resp.bits.excp(0).af.st
665    s1_out.uop.trigger                             := storeTrigger.io.toStore.triggerAction
666    s1_out.uop.exceptionVec(breakPoint)            := TriggerAction.isExp(storeTrigger.io.toStore.triggerAction)
667  }
668
669  // pointer chasing
670  val s1_try_ptr_chasing       = RegNext(s0_do_try_ptr_chasing, false.B)
671  val s1_ptr_chasing_vaddr     = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing)
672  val s1_fu_op_type_not_ld     = WireInit(false.B)
673  val s1_not_fast_match        = WireInit(false.B)
674  val s1_addr_mismatch         = WireInit(false.B)
675  val s1_addr_misaligned       = WireInit(false.B)
676  val s1_ptr_chasing_canceled  = WireInit(false.B)
677  val s1_cancel_ptr_chasing    = WireInit(false.B)
678
679  s1_kill := s1_late_kill ||
680             s1_cancel_ptr_chasing ||
681             s1_in.uop.robIdx.needFlush(io.redirect) ||
682             RegEnable(s0_kill, false.B, io.lsin.valid || io.ldu_io.replay.valid || io.ldu_io.l2l_fwd_in.valid || io.ldu_io.fast_rep_in.valid || io.vec_stu_io.in.valid)
683
684  if (EnableLoadToLoadForward) {
685    // Sometimes, we need to cancel the load-load forwarding.
686    // These can be put at S0 if timing is bad at S1.
687    // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow)
688    s1_addr_mismatch      := s1_ptr_chasing_vaddr(6) || RegEnable(io.ldu_io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing)
689    // Case 1: the address is misaligned, kill s1
690    s1_addr_misaligned    := LookupTree(s1_in.uop.fuOpType(1, 0), List(
691                             "b00".U   -> false.B,                  //b
692                             "b01".U   -> (s1_vaddr(0)    =/= 0.U), //h
693                             "b10".U   -> (s1_vaddr(1, 0) =/= 0.U), //w
694                             "b11".U   -> (s1_vaddr(2, 0) =/= 0.U)  //d
695                          ))
696    // Case 2: this load-load uop is cancelled
697    s1_ptr_chasing_canceled := !io.lsin.valid || FuType.isStore(io.lsin.bits.uop.fuType)
698
699    when (s1_try_ptr_chasing) {
700      s1_cancel_ptr_chasing := s1_addr_mismatch || s1_addr_misaligned || s1_ptr_chasing_canceled
701
702      s1_in.uop           := io.lsin.bits.uop
703      s1_in.isFirstIssue  := io.lsin.bits.isFirstIssue
704      s1_vaddr_lo         := s1_ptr_chasing_vaddr(5, 0)
705      s1_paddr_dup_lsu    := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
706      s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
707
708      // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb)
709      s1_in.uop.debugInfo.tlbFirstReqTime := GTimer()
710      s1_in.uop.debugInfo.tlbRespTime     := GTimer()
711    }
712    when (!s1_cancel_ptr_chasing) {
713      s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.ldu_io.replay.fire && !io.ldu_io.fast_rep_in.fire && !(s0_src_valid_vec(high_pf_idx) && io.canAcceptHighConfPrefetch)
714      when (s1_try_ptr_chasing) {
715        io.lsin.ready := true.B
716      }
717    }
718  }
719
720  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
721  val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize))
722  // to enable load-load, sqIdxMask must be calculated based on lsin.uop
723  // If the timing here is not OK, load-load forwarding has to be disabled.
724  // Or we calculate sqIdxMask at RS??
725  io.ldu_io.lsq.forward.sqIdxMask := s1_sqIdx_mask
726  if (EnableLoadToLoadForward) {
727    when (s1_try_ptr_chasing) {
728      io.ldu_io.lsq.forward.sqIdxMask := UIntToMask(io.lsin.bits.uop.sqIdx.value, StoreQueueSize)
729    }
730  }
731
732  io.ldu_io.forward_mshr.valid  := s1_valid && s1_out.forward_tlDchannel && s1_ld_flow
733  io.ldu_io.forward_mshr.mshrid := s1_out.mshrid
734  io.ldu_io.forward_mshr.paddr  := s1_out.paddr
735
736  io.ldu_io.wakeup.valid := s0_fire && s0_ld_flow && (s0_src_select_vec(super_rep_idx) || s0_src_select_vec(fast_rep_idx) || s0_src_select_vec(lsq_rep_idx) || s0_src_select_vec(int_iss_idx))
737  io.ldu_io.wakeup.bits := s0_uop
738
739  io.stu_io.dcache.s1_kill := s1_tlb_miss || s1_exception || s1_mmio || s1_in.uop.robIdx.needFlush(io.redirect)
740  io.stu_io.dcache.s1_paddr := s1_paddr_dup_dcache
741
742
743  // load debug
744  XSDebug(s1_valid && s1_ld_flow,
745    p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
746    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
747
748  // store debug
749  XSDebug(s1_valid && !s1_ld_flow,
750    p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.sqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
751    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
752
753  // store out
754  io.stu_io.lsq.valid         := s1_valid && !s1_ld_flow && !s1_prf && !s1_isvec
755  io.stu_io.lsq.bits          := s1_out
756  io.stu_io.lsq.bits.miss     := s1_tlb_miss
757
758  io.vec_stu_io.lsq.valid     := s1_valid && !s1_ld_flow && !s1_prf && s1_isvec
759  io.vec_stu_io.lsq.bits          := s1_out
760  io.vec_stu_io.lsq.bits.miss     := s1_tlb_miss
761  io.vec_stu_io.lsq.bits.isLastElem := s1_isLastElem
762
763  io.stu_io.st_mask_out.valid       := s1_valid && !s1_ld_flow && !s1_prf
764  io.stu_io.st_mask_out.bits.mask   := s1_out.mask
765  io.stu_io.st_mask_out.bits.sqIdx  := s1_out.uop.sqIdx
766
767  io.stu_io.issue.valid       := s1_valid && !s1_tlb_miss && !s1_ld_flow && !s1_prf && !s1_isvec
768  io.stu_io.issue.bits        := RegEnable(io.lsin.bits, io.lsin.fire)
769
770  // st-ld violation dectect request
771  io.stu_io.stld_nuke_query.valid       := s1_valid && !s1_tlb_miss && !s1_ld_flow && !s1_prf
772  io.stu_io.stld_nuke_query.bits.robIdx := s1_in.uop.robIdx
773  io.stu_io.stld_nuke_query.bits.paddr  := s1_paddr_dup_lsu
774  io.stu_io.stld_nuke_query.bits.mask   := s1_in.mask
775
776  // Pipeline
777  // --------------------------------------------------------------------------------
778  // stage 2
779  // --------------------------------------------------------------------------------
780  // s2: DCache resp
781  val s2_valid  = RegInit(false.B)
782  val s2_in     = Wire(new LqWriteBundle)
783  val s2_out    = Wire(new LqWriteBundle)
784  val s2_kill   = Wire(Bool())
785  val s2_can_go = s3_ready
786  val s2_fire   = s2_valid && !s2_kill && s2_can_go
787  val s2_isvec  = RegEnable(s1_isvec, false.B, s1_fire)
788  val s2_vecActive    = RegEnable(s1_out.vecActive, true.B, s1_fire)
789  val s2_paddr  = RegEnable(s1_paddr_dup_lsu, s1_fire)
790
791  s2_kill := s2_in.uop.robIdx.needFlush(io.redirect)
792  s2_ready := !s2_valid || s2_kill || s3_ready
793  when (s1_fire) { s2_valid := true.B }
794  .elsewhen (s2_fire) { s2_valid := false.B }
795  .elsewhen (s2_kill) { s2_valid := false.B }
796  s2_in := RegEnable(s1_out, s1_fire)
797
798  val s2_pmp = WireInit(io.pmp)
799
800  val s2_prf    = s2_in.isPrefetch
801  val s2_hw_prf = s2_in.isHWPrefetch
802  val s2_ld_flow  = RegEnable(s1_ld_flow, s1_fire)
803
804  // exception that may cause load addr to be invalid / illegal
805  // if such exception happen, that inst and its exception info
806  // will be force writebacked to rob
807  val s2_exception_vec = WireInit(s2_in.uop.exceptionVec)
808  when (s2_ld_flow) {
809    when (!s2_in.lateKill) {
810      s2_exception_vec(loadAccessFault) := (s2_in.uop.exceptionVec(loadAccessFault) || s2_pmp.ld) && s2_vecActive
811      // soft prefetch will not trigger any exception (but ecc error interrupt may be triggered)
812      when (s2_prf || s2_in.tlbMiss) {
813        s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
814      }
815    }
816  } .otherwise {
817    s2_exception_vec(storeAccessFault) := s2_in.uop.exceptionVec(storeAccessFault) || s2_pmp.st
818    when (s2_prf || s2_in.tlbMiss) {
819      s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
820    }
821  }
822  val s2_ld_exception = ExceptionNO.selectByFu(s2_exception_vec, LduCfg).asUInt.orR && s2_ld_flow
823  val s2_st_exception = ExceptionNO.selectByFu(s2_exception_vec, StaCfg).asUInt.orR && !s2_ld_flow
824  val s2_exception    = s2_ld_exception || s2_st_exception
825
826  val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.ldu_io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr)
827  val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.ldu_io.forward_mshr.forward()
828  val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr)
829
830  // writeback access fault caused by ecc error / bus error
831  // * ecc data error is slow to generate, so we will not use it until load stage 3
832  // * in load stage 3, an extra signal io.load_error will be used to
833  val s2_actually_mmio = s2_pmp.mmio
834  val s2_ld_mmio       = !s2_prf &&
835                          s2_actually_mmio &&
836                         !s2_exception &&
837                         !s2_in.tlbMiss &&
838                         s2_ld_flow
839  val s2_st_mmio       = !s2_prf &&
840                          (RegNext(s1_mmio) || s2_pmp.mmio) &&
841                         !s2_exception &&
842                         !s2_in.tlbMiss &&
843                         !s2_ld_flow
844  val s2_st_atomic     = !s2_prf &&
845                          (RegNext(s1_mmio) || s2_pmp.atomic) &&
846                         !s2_exception &&
847                         !s2_in.tlbMiss &&
848                         !s2_ld_flow
849  val s2_full_fwd      = Wire(Bool())
850  val s2_mem_amb       = s2_in.uop.storeSetHit &&
851                         io.ldu_io.lsq.forward.addrInvalid
852
853  val s2_tlb_miss      = s2_in.tlbMiss
854  val s2_fwd_fail      = io.ldu_io.lsq.forward.dataInvalid || io.ldu_io.vec_forward.dataInvalid
855  val s2_dcache_miss   = io.ldu_io.dcache.resp.bits.miss &&
856                         !s2_fwd_frm_d_chan_or_mshr &&
857                         !s2_full_fwd
858
859  val s2_mq_nack       = io.ldu_io.dcache.s2_mq_nack &&
860                         !s2_fwd_frm_d_chan_or_mshr &&
861                         !s2_full_fwd
862
863  val s2_bank_conflict = io.ldu_io.dcache.s2_bank_conflict &&
864                         !s2_fwd_frm_d_chan_or_mshr &&
865                         !s2_full_fwd
866
867  val s2_wpu_pred_fail = io.ldu_io.dcache.s2_wpu_pred_fail &&
868                        !s2_fwd_frm_d_chan_or_mshr &&
869                        !s2_full_fwd
870
871  val s2_rar_nack      = io.ldu_io.lsq.ldld_nuke_query.req.valid &&
872                         !io.ldu_io.lsq.ldld_nuke_query.req.ready
873
874  val s2_raw_nack      = io.ldu_io.lsq.stld_nuke_query.req.valid &&
875                         !io.ldu_io.lsq.stld_nuke_query.req.ready
876
877  // st-ld violation query
878  //  NeedFastRecovery Valid when
879  //  1. Fast recovery query request Valid.
880  //  2. Load instruction is younger than requestors(store instructions).
881  //  3. Physical address match.
882  //  4. Data contains.
883  val s2_nuke = VecInit((0 until StorePipelineWidth).map(w => {
884                        io.ldu_io.stld_nuke_query(w).valid && // query valid
885                        isAfter(s2_in.uop.robIdx, io.ldu_io.stld_nuke_query(w).bits.robIdx) && // older store
886                        // TODO: Fix me when vector instruction
887                        (s2_in.paddr(PAddrBits-1, 3) === io.ldu_io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
888                        (s2_in.mask & io.ldu_io.stld_nuke_query(w).bits.mask).orR // data mask contain
889                      })).asUInt.orR && s2_ld_flow || s2_in.rep_info.nuke
890
891  val s2_cache_handled   = io.ldu_io.dcache.resp.bits.handled
892  val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) &&
893                           io.ldu_io.dcache.resp.bits.tag_error
894
895  val s2_troublem        = !s2_exception &&
896                           !s2_ld_mmio &&
897                           !s2_prf &&
898                           !s2_in.lateKill &&
899                           s2_ld_flow
900
901  io.ldu_io.dcache.resp.ready := true.B
902  io.stu_io.dcache.resp.ready := true.B
903  val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_ld_mmio || s2_prf || s2_in.lateKill) && s2_ld_flow
904  assert(!(s2_valid && (s2_dcache_should_resp && !io.ldu_io.dcache.resp.valid)), "DCache response got lost")
905
906  // fast replay require
907  val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail))
908  val s2_nuke_fast_rep   = !s2_mq_nack &&
909                           !s2_dcache_miss &&
910                           !s2_bank_conflict &&
911                           !s2_wpu_pred_fail &&
912                           !s2_rar_nack &&
913                           !s2_raw_nack &&
914                           s2_nuke
915
916  val s2_fast_rep = !s2_mem_amb &&
917                    !s2_tlb_miss &&
918                    !s2_fwd_fail &&
919                    (s2_dcache_fast_rep || s2_nuke_fast_rep) &&
920                    s2_troublem
921
922  // need allocate new entry
923  val s2_can_query = !s2_mem_amb &&
924                     !s2_tlb_miss  &&
925                     !s2_fwd_fail &&
926                     !s2_dcache_fast_rep &&
927                     s2_troublem
928
929  val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error)
930
931  // ld-ld violation require
932  io.ldu_io.lsq.ldld_nuke_query.req.valid           := s2_valid && s2_can_query
933  io.ldu_io.lsq.ldld_nuke_query.req.bits.uop        := s2_in.uop
934  io.ldu_io.lsq.ldld_nuke_query.req.bits.mask       := s2_in.mask
935  io.ldu_io.lsq.ldld_nuke_query.req.bits.paddr      := s2_in.paddr
936  io.ldu_io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
937
938  // st-ld violation require
939  io.ldu_io.lsq.stld_nuke_query.req.valid           := s2_valid && s2_can_query
940  io.ldu_io.lsq.stld_nuke_query.req.bits.uop        := s2_in.uop
941  io.ldu_io.lsq.stld_nuke_query.req.bits.mask       := s2_in.mask
942  io.ldu_io.lsq.stld_nuke_query.req.bits.paddr      := s2_in.paddr
943  io.ldu_io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
944
945  // merge forward result
946  // lsq has higher priority than sbuffer
947  val s2_fwd_mask = Wire(Vec((VLEN/8), Bool()))
948  val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W)))
949  s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.ldu_io.lsq.forward.dataInvalid && !io.ldu_io.vec_forward.dataInvalid
950  // generate XLEN/8 Muxs
951  for (i <- 0 until VLEN / 8) {
952    s2_fwd_mask(i) := io.ldu_io.lsq.forward.forwardMask(i) || io.ldu_io.sbuffer.forwardMask(i) || io.ldu_io.vec_forward.forwardMask(i)
953    s2_fwd_data(i) := Mux(
954      io.ldu_io.lsq.forward.forwardMask(i),
955      io.ldu_io.lsq.forward.forwardData(i),
956      Mux(
957        io.ldu_io.vec_forward.forwardMask(i),
958        io.ldu_io.vec_forward.forwardData(i),
959        io.ldu_io.sbuffer.forwardData(i)
960      )
961    )
962  }
963
964  XSDebug(s2_fire && s2_ld_flow, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
965    s2_in.uop.pc,
966    io.ldu_io.lsq.forward.forwardData.asUInt, io.ldu_io.lsq.forward.forwardMask.asUInt,
967    s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt
968  )
969
970  //
971  s2_out                  := s2_in
972  s2_out.data             := 0.U // data will be generated in load s3
973  s2_out.uop.fpWen        := s2_in.uop.fpWen && !s2_exception && s2_ld_flow
974  s2_out.mmio             := s2_ld_mmio || s2_st_mmio
975  s2_out.atomic           := s2_st_atomic
976  s2_out.uop.flushPipe    := false.B
977  s2_out.uop.exceptionVec := s2_exception_vec
978  s2_out.forwardMask      := s2_fwd_mask
979  s2_out.forwardData      := s2_fwd_data
980  s2_out.handledByMSHR    := s2_cache_handled
981  s2_out.miss             := s2_dcache_miss && s2_troublem
982  s2_out.feedbacked       := io.feedback_fast.valid && !io.feedback_fast.bits.hit
983
984  // Generate replay signal caused by:
985  // * st-ld violation check
986  // * tlb miss
987  // * dcache replay
988  // * forward data invalid
989  // * dcache miss
990  s2_out.rep_info.mem_amb         := s2_mem_amb && s2_troublem
991  s2_out.rep_info.tlb_miss        := s2_tlb_miss && s2_troublem
992  s2_out.rep_info.fwd_fail        := s2_fwd_fail && s2_troublem
993  s2_out.rep_info.dcache_rep      := s2_mq_nack && s2_troublem
994  s2_out.rep_info.dcache_miss     := s2_dcache_miss && s2_troublem
995  s2_out.rep_info.bank_conflict   := s2_bank_conflict && s2_troublem
996  s2_out.rep_info.wpu_fail        := s2_wpu_pred_fail && s2_troublem
997  s2_out.rep_info.rar_nack        := s2_rar_nack && s2_troublem
998  s2_out.rep_info.raw_nack        := s2_raw_nack && s2_troublem
999  s2_out.rep_info.nuke            := s2_nuke && s2_troublem
1000  s2_out.rep_info.full_fwd        := s2_data_fwded
1001  s2_out.rep_info.data_inv_sq_idx := Mux(io.ldu_io.vec_forward.dataInvalid, s2_out.uop.sqIdx, io.ldu_io.lsq.forward.dataInvalidSqIdx)
1002  s2_out.rep_info.addr_inv_sq_idx := Mux(io.ldu_io.vec_forward.addrInvalid, s2_out.uop.sqIdx, io.ldu_io.lsq.forward.addrInvalidSqIdx)
1003  s2_out.rep_info.rep_carry       := io.ldu_io.dcache.resp.bits.replayCarry
1004  s2_out.rep_info.mshr_id         := io.ldu_io.dcache.resp.bits.mshr_id
1005  s2_out.rep_info.last_beat       := s2_in.paddr(log2Up(refillBytes))
1006  s2_out.rep_info.debug           := s2_in.uop.debugInfo
1007  s2_out.rep_info.tlb_id          := io.ldu_io.tlb_hint.id
1008  s2_out.rep_info.tlb_full        := io.ldu_io.tlb_hint.full
1009
1010  // if forward fail, replay this inst from fetch
1011  val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss
1012  // if ld-ld violation is detected, replay from this inst from fetch
1013  val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_ld_mmio && !s2_is_prefetch && !s2_in.tlbMiss
1014  // io.out.bits.uop.replayInst := false.B
1015
1016  // to be removed
1017  val s2_ld_need_fb = !s2_in.isLoadReplay &&      // already feedbacked
1018                      io.ldu_io.lq_rep_full &&           // LoadQueueReplay is full
1019                      s2_out.rep_info.need_rep && // need replay
1020                      !s2_exception &&            // no exception is triggered
1021                      !s2_hw_prf &&               // not hardware prefetch
1022                      !s2_isvec
1023  val s2_st_need_fb = !s2_ld_flow && !s2_hw_prf && !s2_isvec
1024  io.feedback_fast.valid                 := s2_valid && (s2_ld_need_fb || s2_st_need_fb)
1025  io.feedback_fast.bits.hit              := Mux(s2_ld_flow, false.B, !s2_tlb_miss)
1026  io.feedback_fast.bits.flushState       := s2_in.ptwBack
1027  io.feedback_fast.bits.robIdx           := s2_in.uop.robIdx
1028  io.feedback_fast.bits.sourceType       := Mux(s2_ld_flow, RSFeedbackType.lrqFull, RSFeedbackType.tlbMiss)
1029  io.feedback_fast.bits.dataInvalidSqIdx := DontCare
1030
1031  val s2_vec_feedback = Wire(Valid(new VSFQFeedback))
1032  s2_vec_feedback.valid := s2_valid && !s2_ld_flow && !s2_hw_prf && s2_isvec
1033  // s2_vec_feedback.bits.flowPtr := s2_out.sflowPtr
1034  s2_vec_feedback.bits.hit := !s2_tlb_miss
1035  s2_vec_feedback.bits.sourceType := RSFeedbackType.tlbMiss
1036  s2_vec_feedback.bits.paddr := s2_paddr
1037  s2_vec_feedback.bits.mmio := s2_st_mmio
1038  s2_vec_feedback.bits.atomic := s2_st_mmio
1039  s2_vec_feedback.bits.exceptionVec := s2_exception_vec
1040
1041  io.stu_io.lsq_replenish := s2_out
1042  io.stu_io.lsq_replenish.miss := io.ldu_io.dcache.resp.fire && io.ldu_io.dcache.resp.bits.miss
1043
1044  io.ldu_io.ldCancel.ld1Cancel := false.B
1045
1046  // fast wakeup
1047  io.ldu_io.fast_uop.valid := RegNext(
1048    !io.ldu_io.dcache.s1_disable_fast_wakeup &&
1049    s1_valid &&
1050    !s1_kill &&
1051    !io.tlb.resp.bits.miss &&
1052    !io.ldu_io.lsq.forward.dataInvalidFast
1053  ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_ld_mmio && s2_ld_flow) && !s2_isvec
1054  io.ldu_io.fast_uop.bits := RegNext(s1_out.uop)
1055
1056  //
1057  io.ldu_io.s2_ptr_chasing                    := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire)
1058
1059  // prefetch train
1060  io.s0_prefetch_spec := s0_fire
1061  io.s1_prefetch_spec := s1_fire
1062  io.prefetch_train.valid              := s2_valid && !s2_actually_mmio && !s2_in.tlbMiss
1063  io.prefetch_train.bits.fromLsPipelineBundle(s2_in)
1064  io.prefetch_train.bits.miss          := Mux(s2_ld_flow, io.ldu_io.dcache.resp.bits.miss, io.stu_io.dcache.resp.bits.miss) // TODO: use trace with bank conflict?
1065  io.prefetch_train.bits.meta_prefetch := Mux(s2_ld_flow, io.ldu_io.dcache.resp.bits.meta_prefetch, false.B)
1066  io.prefetch_train.bits.meta_access   := Mux(s2_ld_flow, io.ldu_io.dcache.resp.bits.meta_access, false.B)
1067
1068  io.prefetch_train_l1.valid              := s2_valid && !s2_actually_mmio && s2_ld_flow
1069  io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in)
1070  io.prefetch_train_l1.bits.miss          := io.ldu_io.dcache.resp.bits.miss
1071  io.prefetch_train_l1.bits.meta_prefetch := io.ldu_io.dcache.resp.bits.meta_prefetch
1072  io.prefetch_train_l1.bits.meta_access   := io.ldu_io.dcache.resp.bits.meta_access
1073  if (env.FPGAPlatform){
1074    io.ldu_io.dcache.s0_pc := DontCare
1075    io.ldu_io.dcache.s1_pc := DontCare
1076    io.ldu_io.dcache.s2_pc := DontCare
1077  }else{
1078    io.ldu_io.dcache.s0_pc := s0_out.uop.pc
1079    io.ldu_io.dcache.s1_pc := s1_out.uop.pc
1080    io.ldu_io.dcache.s2_pc := s2_out.uop.pc
1081  }
1082  io.ldu_io.dcache.s2_kill := s2_pmp.ld  || s2_actually_mmio || s2_kill
1083  io.stu_io.dcache.s2_kill := s2_pmp.st || s2_actually_mmio || s2_kill
1084  io.stu_io.dcache.s2_pc := s2_out.uop.pc
1085
1086  val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready && s1_ld_flow
1087  val s2_ld_valid_dup = RegInit(0.U(6.W))
1088  s2_ld_valid_dup := 0x0.U(6.W)
1089  when (s1_ld_left_fire && !s1_out.isHWPrefetch && s1_ld_flow) { s2_ld_valid_dup := 0x3f.U(6.W) }
1090  when (s1_kill || s1_out.isHWPrefetch || !s1_ld_flow) { s2_ld_valid_dup := 0x0.U(6.W) }
1091  assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch) || RegNext(!s1_ld_flow)))
1092
1093  // Pipeline
1094  // --------------------------------------------------------------------------------
1095  // stage 3
1096  // --------------------------------------------------------------------------------
1097  // writeback and update load queue
1098  val s3_valid        = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect))
1099  val s3_in           = RegEnable(s2_out, s2_fire)
1100  val s3_out          = Wire(Valid(new MemExuOutput))
1101  val s3_dcache_rep   = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire)
1102  val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire)
1103  val s3_fast_rep     = Wire(Bool())
1104  val s3_ld_flow      = RegNext(s2_ld_flow)
1105  val s3_troublem     = RegNext(s2_troublem)
1106  val s3_kill         = s3_in.uop.robIdx.needFlush(io.redirect)
1107  val s3_isvec        = RegNext(s2_isvec)
1108  s3_ready := !s3_valid || s3_kill || sx_can_go
1109
1110  // forwrad last beat
1111  val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.ldu_io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr)
1112  val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1113  val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid) && s3_ld_flow
1114
1115
1116  // s3 load fast replay
1117  io.ldu_io.fast_rep_out.valid := s3_valid &&
1118                                  s3_fast_rep &&
1119                                  !s3_in.uop.robIdx.needFlush(io.redirect) &&
1120                                  s3_ld_flow &&
1121                                  !s3_isvec
1122  io.ldu_io.fast_rep_out.bits := s3_in
1123
1124  io.ldu_io.lsq.ldin.valid := s3_valid &&
1125                              (!s3_fast_rep || !io.ldu_io.fast_rep_out.ready) &&
1126                              !s3_in.feedbacked &&
1127                              !s3_in.lateKill &&
1128                              s3_ld_flow
1129  io.ldu_io.lsq.ldin.bits := s3_in
1130  io.ldu_io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid
1131
1132  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1133  io.ldu_io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools
1134  io.ldu_io.lsq.ldin.bits.replacementUpdated := io.ldu_io.dcache.resp.bits.replacementUpdated
1135  io.ldu_io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated)
1136
1137  val s3_dly_ld_err =
1138    if (EnableAccurateLoadError) {
1139      (s3_in.lateKill || io.ldu_io.dcache.resp.bits.error_delayed) && RegNext(io.csrCtrl.cache_error_enable)
1140    } else {
1141      WireInit(false.B)
1142    }
1143  io.ldu_io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid
1144  io.ldu_io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err
1145  io.ldu_io.lsq.ldin.bits.dcacheRequireReplay  := s3_dcache_rep
1146
1147  val s3_vp_match_fail = RegNext(io.ldu_io.lsq.forward.matchInvalid || io.ldu_io.sbuffer.matchInvalid) && s3_troublem
1148  val s3_ldld_rep_inst =
1149      io.ldu_io.lsq.ldld_nuke_query.resp.valid &&
1150      io.ldu_io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch &&
1151      RegNext(io.csrCtrl.ldld_vio_check_enable)
1152
1153  val s3_rep_info = WireInit(s3_in.rep_info)
1154  s3_rep_info.dcache_miss   := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid && s3_troublem
1155  val s3_rep_frm_fetch = s3_vp_match_fail
1156  val s3_flushPipe = s3_ldld_rep_inst
1157  val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt)
1158  val s3_force_rep     = s3_sel_rep_cause(LoadReplayCauses.C_TM) &&
1159                         !s3_in.uop.exceptionVec(loadAddrMisaligned) &&
1160                         s3_troublem
1161
1162  val s3_ld_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, LduCfg).asUInt.orR && s3_ld_flow
1163  val s3_st_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, StaCfg).asUInt.orR && !s3_ld_flow
1164  val s3_exception    = s3_ld_exception || s3_st_exception
1165  when ((s3_ld_exception || s3_dly_ld_err || s3_rep_frm_fetch) && !s3_force_rep) {
1166    io.ldu_io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType)
1167  } .otherwise {
1168    io.ldu_io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools)
1169  }
1170
1171  // Int flow, if hit, will be writebacked at s3
1172  s3_out.valid                := s3_valid &&
1173                                (!s3_ld_flow && !s3_in.feedbacked || !io.ldu_io.lsq.ldin.bits.rep_info.need_rep) && !s3_in.mmio
1174  s3_out.bits.uop             := s3_in.uop
1175  s3_out.bits.uop.exceptionVec(loadAccessFault) := (s3_dly_ld_err  || s3_in.uop.exceptionVec(loadAccessFault)) && s3_ld_flow
1176  s3_out.bits.uop.replayInst := s3_rep_frm_fetch
1177  s3_out.bits.data            := s3_in.data
1178  s3_out.bits.debug.isMMIO    := s3_in.mmio
1179  s3_out.bits.debug.isPerfCnt := false.B
1180  s3_out.bits.debug.paddr     := s3_in.paddr
1181  s3_out.bits.debug.vaddr     := s3_in.vaddr
1182
1183  when (s3_force_rep) {
1184    s3_out.bits.uop.exceptionVec := 0.U.asTypeOf(s3_in.uop.exceptionVec.cloneType)
1185  }
1186
1187  io.ldu_io.rollback.valid := s3_valid && (s3_rep_frm_fetch || s3_flushPipe) && !s3_exception && s3_ld_flow
1188  io.ldu_io.rollback.bits             := DontCare
1189  io.ldu_io.rollback.bits.isRVC       := s3_out.bits.uop.preDecodeInfo.isRVC
1190  io.ldu_io.rollback.bits.robIdx      := s3_out.bits.uop.robIdx
1191  io.ldu_io.rollback.bits.ftqIdx      := s3_out.bits.uop.ftqPtr
1192  io.ldu_io.rollback.bits.ftqOffset   := s3_out.bits.uop.ftqOffset
1193  io.ldu_io.rollback.bits.level       := Mux(s3_rep_frm_fetch, RedirectLevel.flush, RedirectLevel.flushAfter)
1194  io.ldu_io.rollback.bits.cfiUpdate.target := s3_out.bits.uop.pc
1195  io.ldu_io.rollback.bits.debug_runahead_checkpoint_id := s3_out.bits.uop.debugInfo.runahead_checkpoint_id
1196  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1197  io.ldu_io.lsq.ldin.bits.uop := s3_out.bits.uop
1198
1199  val s3_revoke = s3_exception || io.ldu_io.lsq.ldin.bits.rep_info.need_rep
1200  io.ldu_io.lsq.ldld_nuke_query.revoke := s3_revoke
1201  io.ldu_io.lsq.stld_nuke_query.revoke := s3_revoke
1202
1203  // feedback slow
1204  s3_fast_rep := RegNext(s2_fast_rep) &&
1205                 !s3_in.feedbacked &&
1206                 !s3_in.lateKill &&
1207                 !s3_rep_frm_fetch &&
1208                 !s3_exception
1209
1210  val s3_fb_no_waiting = !s3_in.isLoadReplay && !(s3_fast_rep && io.ldu_io.fast_rep_out.ready) && !s3_in.feedbacked
1211
1212  //
1213  io.feedback_slow.valid                 := s3_valid && !s3_in.uop.robIdx.needFlush(io.redirect) && s3_fb_no_waiting && s3_ld_flow
1214  io.feedback_slow.bits.hit              := !io.ldu_io.lsq.ldin.bits.rep_info.need_rep || io.ldu_io.lsq.ldin.ready
1215  io.feedback_slow.bits.flushState       := s3_in.ptwBack
1216  io.feedback_slow.bits.robIdx           := s3_in.uop.robIdx
1217  io.feedback_slow.bits.sourceType       := RSFeedbackType.lrqFull
1218  io.feedback_slow.bits.dataInvalidSqIdx := DontCare
1219
1220  io.vec_stu_io.feedbackSlow.valid := RegNext(s2_vec_feedback.valid && !s2_out.uop.robIdx.needFlush(io.redirect))
1221  io.vec_stu_io.feedbackSlow.bits := RegNext(s2_vec_feedback.bits)
1222
1223  io.ldu_io.ldCancel.ld2Cancel := s3_valid && s3_ld_flow && (                          // is load
1224    io.ldu_io.lsq.ldin.bits.rep_info.need_rep || s3_in.mmio                            // exe fail or is mmio
1225  )
1226
1227  // data from dcache hit
1228  val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle)
1229  s3_ld_raw_data_frm_cache.respDcacheData       := io.ldu_io.dcache.resp.bits.data_delayed
1230  s3_ld_raw_data_frm_cache.forwardMask          := RegEnable(s2_fwd_mask, s2_valid)
1231  s3_ld_raw_data_frm_cache.forwardData          := RegEnable(s2_fwd_data, s2_valid)
1232  s3_ld_raw_data_frm_cache.uop                  := RegEnable(s2_out.uop, s2_valid)
1233  s3_ld_raw_data_frm_cache.addrOffset           := RegEnable(s2_out.paddr(3, 0), s2_valid)
1234  s3_ld_raw_data_frm_cache.forward_D            := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid
1235  s3_ld_raw_data_frm_cache.forwardData_D        := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid))
1236  s3_ld_raw_data_frm_cache.forward_mshr         := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid)
1237  s3_ld_raw_data_frm_cache.forwardData_mshr     := RegEnable(s2_fwd_data_frm_mshr, s2_valid)
1238  s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1239
1240  val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData()
1241  val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List(
1242    "b0000".U -> s3_merged_data_frm_cache(63,    0),
1243    "b0001".U -> s3_merged_data_frm_cache(63,    8),
1244    "b0010".U -> s3_merged_data_frm_cache(63,   16),
1245    "b0011".U -> s3_merged_data_frm_cache(63,   24),
1246    "b0100".U -> s3_merged_data_frm_cache(63,   32),
1247    "b0101".U -> s3_merged_data_frm_cache(63,   40),
1248    "b0110".U -> s3_merged_data_frm_cache(63,   48),
1249    "b0111".U -> s3_merged_data_frm_cache(63,   56),
1250    "b1000".U -> s3_merged_data_frm_cache(127,  64),
1251    "b1001".U -> s3_merged_data_frm_cache(127,  72),
1252    "b1010".U -> s3_merged_data_frm_cache(127,  80),
1253    "b1011".U -> s3_merged_data_frm_cache(127,  88),
1254    "b1100".U -> s3_merged_data_frm_cache(127,  96),
1255    "b1101".U -> s3_merged_data_frm_cache(127, 104),
1256    "b1110".U -> s3_merged_data_frm_cache(127, 112),
1257    "b1111".U -> s3_merged_data_frm_cache(127, 120)
1258  ))
1259  val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache)
1260
1261  // FIXME: add 1 cycle delay ?
1262  io.ldout.bits      := s3_out.bits
1263  io.ldout.bits.data := s3_ld_data_frm_cache
1264  io.ldout.valid     := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) && s3_ld_flow && !s3_isvec
1265
1266  // for uncache
1267  io.ldu_io.lsq.uncache.ready := true.B
1268
1269  // fast load to load forward
1270  if (EnableLoadToLoadForward) {
1271    io.ldu_io.l2l_fwd_out.valid      := s3_out.valid && !s3_in.lateKill && s3_ld_flow
1272    io.ldu_io.l2l_fwd_out.data       := s3_ld_data_frm_cache
1273    io.ldu_io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err // ecc delayed error
1274  } else {
1275    io.ldu_io.l2l_fwd_out.valid      := false.B
1276    io.ldu_io.l2l_fwd_out.data       := DontCare
1277    io.ldu_io.l2l_fwd_out.dly_ld_err := DontCare
1278  }
1279
1280  // hybrid unit writeback to rob
1281  // delay params
1282  val SelectGroupSize   = RollbackGroupSize
1283  val lgSelectGroupSize = log2Ceil(SelectGroupSize)
1284  val TotalSelectCycles = scala.math.ceil(log2Ceil(LoadQueueRAWSize).toFloat / lgSelectGroupSize).toInt + 1
1285  val TotalDelayCycles  = TotalSelectCycles - 2
1286
1287  // writeback
1288  val sx_valid = Wire(Vec(TotalDelayCycles + 1, Bool()))
1289  val sx_ready = Wire(Vec(TotalDelayCycles + 1, Bool()))
1290  val sx_in    = Wire(Vec(TotalDelayCycles + 1, new MemExuOutput))
1291
1292  sx_can_go := sx_ready.head
1293  for (i <- 0 until TotalDelayCycles + 1) {
1294    if (i == 0) {
1295      sx_valid(i) := s3_valid &&
1296                    !s3_ld_flow &&
1297                    !s3_in.feedbacked &&
1298                    !s3_in.mmio
1299      sx_in(i)    := s3_out.bits
1300      sx_ready(i) := !s3_valid(i) || sx_in(i).uop.robIdx.needFlush(io.redirect) || (if (TotalDelayCycles == 0) io.stout.ready else sx_ready(i+1))
1301    } else {
1302      val cur_kill   = sx_in(i).uop.robIdx.needFlush(io.redirect)
1303      val cur_can_go = (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1))
1304      val cur_fire   = sx_valid(i) && !cur_kill && cur_can_go
1305      val prev_fire  = sx_valid(i-1) && !sx_in(i-1).uop.robIdx.needFlush(io.redirect) && sx_ready(i)
1306
1307      sx_ready(i) := !sx_valid(i) || cur_kill || (if (i == TotalDelayCycles) io.stout.ready else sx_ready(i+1))
1308      val sx_valid_can_go = prev_fire || cur_fire || cur_kill
1309      sx_valid(i) := RegEnable(Mux(prev_fire, true.B, false.B), sx_valid_can_go)
1310      sx_in(i) := RegEnable(sx_in(i-1), prev_fire)
1311    }
1312  }
1313
1314  val sx_last_valid = sx_valid.takeRight(1).head
1315  val sx_last_ready = sx_ready.takeRight(1).head
1316  val sx_last_in    = sx_in.takeRight(1).head
1317
1318  sx_last_ready  := !sx_last_valid || sx_last_in.uop.robIdx.needFlush(io.redirect) || io.stout.ready
1319  io.stout.valid := sx_last_valid && !sx_last_in.uop.robIdx.needFlush(io.redirect) && FuType.isStore(sx_last_in.uop.fuType)
1320  io.stout.bits  := sx_last_in
1321
1322   // trigger
1323  val ld_trigger = FuType.isLoad(io.ldout.bits.uop.fuType)
1324  val last_valid_data = RegEnable(io.ldout.bits.data, io.stout.fire)
1325  val hit_ld_addr_trig_hit_vec = Wire(Vec(TriggerNum, Bool()))
1326  val lq_ld_addr_trig_hit_vec = RegNext(io.ldu_io.lsq.trigger.lqLoadAddrTriggerHitVec)
1327  (0 until TriggerNum).map{i => {
1328    val tdata2    = RegNext(RegNext(io.ldu_io.trigger(i).tdata2))
1329    val matchType = RegNext(RegNext(io.ldu_io.trigger(i).matchType))
1330    val tEnable   = RegNext(RegNext(io.ldu_io.trigger(i).tEnable))
1331
1332    hit_ld_addr_trig_hit_vec(i)        := TriggerCmp(RegNext(s3_in.vaddr), tdata2, matchType, tEnable)
1333    io.ldu_io.trigger(i).addrHit       := Mux(io.ldout.valid && ld_trigger, hit_ld_addr_trig_hit_vec(i), lq_ld_addr_trig_hit_vec(i))
1334  }}
1335  io.ldu_io.lsq.trigger.hitLoadAddrTriggerHitVec := hit_ld_addr_trig_hit_vec
1336
1337  // FIXME: please move this part to LoadQueueReplay
1338  io.ldu_io.debug_ls := DontCare
1339  io.stu_io.debug_ls := DontCare
1340  io.stu_io.debug_ls.s1_isTlbFirstMiss := io.tlb.resp.valid && io.tlb.resp.bits.miss && io.tlb.resp.bits.debug.isFirstIssue && !s1_in.isHWPrefetch && !s1_ld_flow
1341  io.stu_io.debug_ls.s1_robIdx := s1_in.uop.robIdx.value
1342
1343 // Topdown
1344  io.ldu_io.lsTopdownInfo.s1.robIdx          := s1_in.uop.robIdx.value
1345  io.ldu_io.lsTopdownInfo.s1.vaddr_valid     := s1_valid && s1_in.hasROBEntry
1346  io.ldu_io.lsTopdownInfo.s1.vaddr_bits      := s1_vaddr
1347  io.ldu_io.lsTopdownInfo.s2.robIdx          := s2_in.uop.robIdx.value
1348  io.ldu_io.lsTopdownInfo.s2.paddr_valid     := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss
1349  io.ldu_io.lsTopdownInfo.s2.paddr_bits      := s2_in.paddr
1350  io.ldu_io.lsTopdownInfo.s2.first_real_miss := io.ldu_io.dcache.resp.bits.real_miss
1351  io.ldu_io.lsTopdownInfo.s2.cache_miss_en   := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated
1352
1353  // perf cnt
1354  XSPerfAccumulate("s0_in_valid",                  io.lsin.valid)
1355  XSPerfAccumulate("s0_in_block",                  io.lsin.valid && !io.lsin.fire)
1356  XSPerfAccumulate("s0_in_fire_first_issue",       s0_valid && s0_isFirstIssue)
1357  XSPerfAccumulate("s0_lsq_fire_first_issue",      io.ldu_io.replay.fire)
1358  XSPerfAccumulate("s0_ldu_fire_first_issue",      io.lsin.fire && s0_isFirstIssue)
1359  XSPerfAccumulate("s0_fast_replay_issue",         io.ldu_io.fast_rep_in.fire)
1360  XSPerfAccumulate("s0_stall_out",                 s0_valid && !s0_can_go)
1361  XSPerfAccumulate("s0_stall_ld_dcache",           s0_valid && !io.ldu_io.dcache.req.ready)
1362  XSPerfAccumulate("s0_stall_st_dcache",           s0_valid && !io.stu_io.dcache.req.ready)
1363  XSPerfAccumulate("s0_addr_spec_success",         s0_fire && s0_vaddr(VAddrBits-1, 12) === io.lsin.bits.src(0)(VAddrBits-1, 12))
1364  XSPerfAccumulate("s0_addr_spec_failed",          s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.lsin.bits.src(0)(VAddrBits-1, 12))
1365  XSPerfAccumulate("s0_addr_spec_success_once",    s0_fire && s0_vaddr(VAddrBits-1, 12) === io.lsin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1366  XSPerfAccumulate("s0_addr_spec_failed_once",     s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.lsin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1367  XSPerfAccumulate("s0_forward_tl_d_channel",      s0_out.forward_tlDchannel)
1368  XSPerfAccumulate("s0_hardware_prefetch_fire",    s0_fire && s0_hw_prf_select)
1369  XSPerfAccumulate("s0_software_prefetch_fire",    s0_fire && s0_prf && s0_src_select_vec(int_iss_idx))
1370  XSPerfAccumulate("s0_hardware_prefetch_blocked", io.ldu_io.prefetch_req.valid && !s0_hw_prf_select)
1371  XSPerfAccumulate("s0_hardware_prefetch_total",   io.ldu_io.prefetch_req.valid)
1372
1373  XSPerfAccumulate("s1_in_valid",                  s1_valid)
1374  XSPerfAccumulate("s1_in_fire",                   s1_fire)
1375  XSPerfAccumulate("s1_in_fire_first_issue",       s1_fire && s1_in.isFirstIssue)
1376  XSPerfAccumulate("s1_tlb_miss",                  s1_fire && s1_tlb_miss)
1377  XSPerfAccumulate("s1_tlb_miss_first_issue",      s1_fire && s1_tlb_miss && s1_in.isFirstIssue)
1378  XSPerfAccumulate("s1_stall_out",                 s1_valid && !s1_can_go)
1379  XSPerfAccumulate("s1_late_kill",                 s1_valid && s1_fast_rep_kill)
1380
1381  XSPerfAccumulate("s2_in_valid",                  s2_valid)
1382  XSPerfAccumulate("s2_in_fire",                   s2_fire)
1383  XSPerfAccumulate("s2_in_fire_first_issue",       s2_fire && s2_in.isFirstIssue)
1384  XSPerfAccumulate("s2_dcache_miss",               s2_fire && io.ldu_io.dcache.resp.bits.miss)
1385  XSPerfAccumulate("s2_dcache_miss_first_issue",   s2_fire && io.ldu_io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1386  XSPerfAccumulate("s2_dcache_real_miss_first_issue",   s2_fire && io.ldu_io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1387  XSPerfAccumulate("s2_full_forward",              s2_fire && s2_full_fwd)
1388  XSPerfAccumulate("s2_dcache_miss_full_forward",  s2_fire && s2_dcache_miss)
1389  XSPerfAccumulate("s2_fwd_frm_d_can",             s2_valid && s2_fwd_frm_d_chan)
1390  XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr",    s2_valid && s2_fwd_frm_d_chan_or_mshr)
1391  XSPerfAccumulate("s2_stall_out",                 s2_fire && !s2_can_go)
1392  XSPerfAccumulate("s2_prefetch",                  s2_fire && s2_prf)
1393  XSPerfAccumulate("s2_prefetch_ignored",          s2_fire && s2_prf && io.ldu_io.dcache.s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict
1394  XSPerfAccumulate("s2_prefetch_miss",             s2_fire && s2_prf && io.ldu_io.dcache.resp.bits.miss) // prefetch req miss in l1
1395  XSPerfAccumulate("s2_prefetch_hit",              s2_fire && s2_prf && !io.ldu_io.dcache.resp.bits.miss) // prefetch req hit in l1
1396  XSPerfAccumulate("s2_prefetch_accept",           s2_fire && s2_prf && io.ldu_io.dcache.resp.bits.miss && !io.ldu_io.dcache.s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it
1397  XSPerfAccumulate("s2_forward_req",               s2_fire && s2_in.forward_tlDchannel)
1398  XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid)
1399  XSPerfAccumulate("s2_successfully_forward_mshr",      s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid)
1400
1401  XSPerfAccumulate("s3_fwd_frm_d_chan",            s3_valid && s3_fwd_frm_d_chan_valid)
1402
1403  XSPerfAccumulate("load_to_load_forward",                      s1_try_ptr_chasing && !s1_ptr_chasing_canceled)
1404  XSPerfAccumulate("load_to_load_forward_try",                  s1_try_ptr_chasing)
1405  XSPerfAccumulate("load_to_load_forward_fail",                 s1_cancel_ptr_chasing)
1406  XSPerfAccumulate("load_to_load_forward_fail_cancelled",       s1_cancel_ptr_chasing && s1_ptr_chasing_canceled)
1407  XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match)
1408  XSPerfAccumulate("load_to_load_forward_fail_op_not_ld",       s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld)
1409  XSPerfAccumulate("load_to_load_forward_fail_addr_align",      s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned)
1410  XSPerfAccumulate("load_to_load_forward_fail_set_mismatch",    s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch)
1411
1412  // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design
1413  // hardware performance counter
1414  val perfEvents = Seq(
1415    ("load_s0_in_fire         ", s0_fire                                                        ),
1416    ("load_to_load_forward    ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled      ),
1417    ("stall_dcache            ", s0_valid && s0_can_go && !io.ldu_io.dcache.req.ready           ),
1418    ("load_s1_in_fire         ", s0_fire                                                        ),
1419    ("load_s1_tlb_miss        ", s1_fire && io.tlb.resp.bits.miss                               ),
1420    ("load_s2_in_fire         ", s1_fire                                                        ),
1421    ("load_s2_dcache_miss     ", s2_fire && io.ldu_io.dcache.resp.bits.miss                     ),
1422  )
1423  generatePerfEvent()
1424}