xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision 83ba63b34cf09b33c0a9e1b3203138e51af4491b)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan.ExceptionNO._
25import xiangshan._
26import xiangshan.backend.Bundles.{DynInst, MemExuInput, MemExuOutput}
27import xiangshan.backend.fu.PMPRespBundle
28import xiangshan.backend.fu.FuConfig._
29import xiangshan.backend.ctrlblock.{DebugLsInfoBundle, LsTopdownInfo}
30import xiangshan.backend.rob.RobPtr
31import xiangshan.cache._
32import xiangshan.cache.wpu.ReplayCarry
33import xiangshan.cache.mmu.{TlbCmd, TlbReq, TlbRequestIO, TlbResp}
34import xiangshan.mem.mdp._
35
36class LoadToLsqReplayIO(implicit p: Parameters) extends XSBundle with HasDCacheParameters {
37  // mshr refill index
38  val mshr_id         = UInt(log2Up(cfg.nMissEntries).W)
39  // get full data from store queue and sbuffer
40  val full_fwd        = Bool()
41  // wait for data from store inst's store queue index
42  val data_inv_sq_idx = new SqPtr
43  // wait for address from store queue index
44  val addr_inv_sq_idx = new SqPtr
45  // replay carry
46  val rep_carry       = new ReplayCarry(nWays)
47  // data in last beat
48  val last_beat       = Bool()
49  // replay cause
50  val cause           = Vec(LoadReplayCauses.allCauses, Bool())
51  // performance debug information
52  val debug           = new PerfDebugInfo
53
54  // alias
55  def mem_amb       = cause(LoadReplayCauses.C_MA)
56  def tlb_miss      = cause(LoadReplayCauses.C_TM)
57  def fwd_fail      = cause(LoadReplayCauses.C_FF)
58  def dcache_rep    = cause(LoadReplayCauses.C_DR)
59  def dcache_miss   = cause(LoadReplayCauses.C_DM)
60  def wpu_fail      = cause(LoadReplayCauses.C_WF)
61  def bank_conflict = cause(LoadReplayCauses.C_BC)
62  def rar_nack      = cause(LoadReplayCauses.C_RAR)
63  def raw_nack      = cause(LoadReplayCauses.C_RAW)
64  def nuke          = cause(LoadReplayCauses.C_NK)
65  def need_rep      = cause.asUInt.orR
66}
67
68
69class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
70  val ldin            = DecoupledIO(new LqWriteBundle)
71  val uncache         = Flipped(DecoupledIO(new MemExuOutput))
72  val ld_raw_data     = Input(new LoadDataFromLQBundle)
73  val forward         = new PipeLoadForwardQueryIO
74  val stld_nuke_query = new LoadNukeQueryIO
75  val ldld_nuke_query = new LoadNukeQueryIO
76  val trigger         = Flipped(new LqTriggerIO)
77}
78
79class LoadToLoadIO(implicit p: Parameters) extends XSBundle {
80  val valid      = Bool()
81  val data       = UInt(XLEN.W) // load to load fast path is limited to ld (64 bit) used as vaddr src1 only
82  val dly_ld_err = Bool()
83}
84
85class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle {
86  val tdata2      = Input(UInt(64.W))
87  val matchType   = Input(UInt(2.W))
88  val tEnable     = Input(Bool()) // timing is calculated before this
89  val addrHit     = Output(Bool())
90  val lastDataHit = Output(Bool())
91}
92
93class LoadUnit(implicit p: Parameters) extends XSModule
94  with HasLoadHelper
95  with HasPerfEvents
96  with HasDCacheParameters
97  with HasCircularQueuePtrHelper
98{
99  val io = IO(new Bundle() {
100    // control
101    val redirect      = Flipped(ValidIO(new Redirect))
102    val csrCtrl       = Flipped(new CustomCSRCtrlIO)
103
104    // int issue path
105    val ldin          = Flipped(Decoupled(new MemExuInput))
106    val ldout         = Decoupled(new MemExuOutput)
107
108    // data path
109    val tlb           = new TlbRequestIO(2)
110    val pmp           = Flipped(new PMPRespBundle()) // arrive same to tlb now
111    val dcache        = new DCacheLoadIO
112    val sbuffer       = new LoadForwardQueryIO
113    val lsq           = new LoadToLsqIO
114    val tl_d_channel  = Input(new DcacheToLduForwardIO)
115    val forward_mshr  = Flipped(new LduToMissqueueForwardIO)
116    val refill        = Flipped(ValidIO(new Refill))
117    val l2_hint       = Input(Valid(new L2ToL1Hint))
118
119    // fast wakeup
120    val fast_uop = ValidIO(new DynInst) // early wakeup signal generated in load_s1, send to RS in load_s2
121
122    // trigger
123    val trigger = Vec(3, new LoadUnitTriggerIO)
124
125    // prefetch
126    val prefetch_train            = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms
127    val prefetch_train_l1         = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride
128    val prefetch_req              = Flipped(ValidIO(new L1PrefetchReq)) // hardware prefetch to l1 cache req
129    val canAcceptLowConfPrefetch  = Output(Bool())
130    val canAcceptHighConfPrefetch = Output(Bool())
131
132    // load to load fast path
133    val l2l_fwd_in    = Input(new LoadToLoadIO)
134    val l2l_fwd_out   = Output(new LoadToLoadIO)
135
136    val ld_fast_match    = Input(Bool())
137    val ld_fast_fuOpType = Input(UInt())
138    val ld_fast_imm      = Input(UInt(12.W))
139
140    // rs feedback
141    val feedback_fast = ValidIO(new RSFeedback) // stage 2
142    val feedback_slow = ValidIO(new RSFeedback) // stage 3
143    val ldCancel = Output(new LoadCancelIO()) // use to cancel the uops waked by this load, and cancel load
144
145    // load ecc error
146    val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different
147
148    // schedule error query
149    val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO)))
150
151    // queue-based replay
152    val replay       = Flipped(Decoupled(new LsPipelineBundle))
153    val lq_rep_full  = Input(Bool())
154
155    // misc
156    val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch
157
158    // Load fast replay path
159    val fast_rep_in  = Flipped(Decoupled(new LqWriteBundle))
160    val fast_rep_out = Decoupled(new LqWriteBundle)
161
162    // perf
163    val debug_ls         = Output(new DebugLsInfoBundle)
164    val lsTopdownInfo    = Output(new LsTopdownInfo)
165    val correctMissTrain = Input(Bool())
166  })
167
168  val s1_ready, s2_ready, s3_ready = WireInit(false.B)
169
170  // Pipeline
171  // --------------------------------------------------------------------------------
172  // stage 0
173  // --------------------------------------------------------------------------------
174  // generate addr, use addr to query DCache and DTLB
175  val s0_valid         = Wire(Bool())
176  val s0_kill          = Wire(Bool())
177  val s0_vaddr         = Wire(UInt(VAddrBits.W))
178  val s0_mask          = Wire(UInt((VLEN/8).W))
179  val s0_uop           = Wire(new DynInst)
180  val s0_has_rob_entry = Wire(Bool())
181  val s0_rsIdx         = Wire(UInt(log2Up(MemIQSizeMax).W))
182  val s0_mshrid        = Wire(UInt())
183  val s0_try_l2l       = Wire(Bool())
184  val s0_rep_carry     = Wire(new ReplayCarry(nWays))
185  val s0_isFirstIssue  = Wire(Bool())
186  val s0_fast_rep      = Wire(Bool())
187  val s0_ld_rep        = Wire(Bool())
188  val s0_l2l_fwd       = Wire(Bool())
189  val s0_sched_idx     = Wire(UInt())
190  // Record the issue port idx of load issue queue. This signal is used by load cancel.
191  val s0_deqPortIdx    = Wire(UInt(log2Ceil(LoadPipelineWidth).W))
192  val s0_can_go        = s1_ready
193  val s0_fire          = s0_valid && s0_can_go
194  val s0_out           = Wire(new LqWriteBundle)
195
196  // load flow select/gen
197  // src0: super load replayed by LSQ (cache miss replay) (io.replay)
198  // src1: fast load replay (io.fast_rep_in)
199  // src2: load replayed by LSQ (io.replay)
200  // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch)
201  // src4: int read / software prefetch first issue from RS (io.in)
202  // src5: vec read first issue from RS (TODO)
203  // src6: load try pointchaising when no issued or replayed load (io.fastpath)
204  // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch)
205  // priority: high to low
206  val s0_rep_stall           = io.ldin.valid && isAfter(io.replay.bits.uop.robIdx, io.ldin.bits.uop.robIdx)
207  val s0_super_ld_rep_valid  = io.replay.valid && io.replay.bits.forward_tlDchannel
208  val s0_ld_fast_rep_valid   = io.fast_rep_in.valid
209  val s0_ld_rep_valid        = io.replay.valid && !io.replay.bits.forward_tlDchannel && !s0_rep_stall
210  val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U
211  val s0_int_iss_valid       = io.ldin.valid // int flow first issue or software prefetch
212  val s0_vec_iss_valid       = WireInit(false.B) // TODO
213  val s0_l2l_fwd_valid       = io.l2l_fwd_in.valid && io.ld_fast_match
214  val s0_low_conf_prf_valid  = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U
215  dontTouch(s0_super_ld_rep_valid)
216  dontTouch(s0_ld_fast_rep_valid)
217  dontTouch(s0_ld_rep_valid)
218  dontTouch(s0_high_conf_prf_valid)
219  dontTouch(s0_int_iss_valid)
220  dontTouch(s0_vec_iss_valid)
221  dontTouch(s0_l2l_fwd_valid)
222  dontTouch(s0_low_conf_prf_valid)
223
224  // load flow source ready
225  val s0_super_ld_rep_ready  = WireInit(true.B)
226  val s0_ld_fast_rep_ready   = !s0_super_ld_rep_valid
227  val s0_ld_rep_ready        = !s0_super_ld_rep_valid &&
228                               !s0_ld_fast_rep_valid
229  val s0_high_conf_prf_ready = !s0_super_ld_rep_valid &&
230                               !s0_ld_fast_rep_valid &&
231                               !s0_ld_rep_valid
232
233  val s0_int_iss_ready       = !s0_super_ld_rep_valid &&
234                               !s0_ld_fast_rep_valid &&
235                               !s0_ld_rep_valid &&
236                               !s0_high_conf_prf_valid
237
238  val s0_vec_iss_ready       = !s0_super_ld_rep_valid &&
239                               !s0_ld_fast_rep_valid &&
240                               !s0_ld_rep_valid &&
241                               !s0_high_conf_prf_valid &&
242                               !s0_int_iss_valid
243
244  val s0_l2l_fwd_ready       = !s0_super_ld_rep_valid &&
245                               !s0_ld_fast_rep_valid &&
246                               !s0_ld_rep_valid &&
247                               !s0_high_conf_prf_valid &&
248                               !s0_int_iss_valid &&
249                               !s0_vec_iss_valid
250
251  val s0_low_conf_prf_ready  = !s0_super_ld_rep_valid &&
252                               !s0_ld_fast_rep_valid &&
253                               !s0_ld_rep_valid &&
254                               !s0_high_conf_prf_valid &&
255                               !s0_int_iss_valid &&
256                               !s0_vec_iss_valid &&
257                               !s0_l2l_fwd_valid
258  dontTouch(s0_super_ld_rep_ready)
259  dontTouch(s0_ld_fast_rep_ready)
260  dontTouch(s0_ld_rep_ready)
261  dontTouch(s0_high_conf_prf_ready)
262  dontTouch(s0_int_iss_ready)
263  dontTouch(s0_vec_iss_ready)
264  dontTouch(s0_l2l_fwd_ready)
265  dontTouch(s0_low_conf_prf_ready)
266
267  // load flow source select (OH)
268  val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready
269  val s0_ld_fast_rep_select  = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready
270  val s0_ld_rep_select       = s0_ld_rep_valid && s0_ld_rep_ready
271  val s0_hw_prf_select       = s0_high_conf_prf_ready && s0_high_conf_prf_valid ||
272                               s0_low_conf_prf_ready && s0_low_conf_prf_valid
273  val s0_int_iss_select      = s0_int_iss_ready && s0_int_iss_valid
274  val s0_vec_iss_select      = s0_vec_iss_ready && s0_vec_iss_valid
275  val s0_l2l_fwd_select      = s0_l2l_fwd_ready && s0_l2l_fwd_valid
276  assert(!s0_vec_iss_select) // to be added
277  dontTouch(s0_super_ld_rep_select)
278  dontTouch(s0_ld_fast_rep_select)
279  dontTouch(s0_ld_rep_select)
280  dontTouch(s0_hw_prf_select)
281  dontTouch(s0_int_iss_select)
282  dontTouch(s0_vec_iss_select)
283  dontTouch(s0_l2l_fwd_select)
284
285  s0_valid := (s0_super_ld_rep_valid ||
286               s0_ld_fast_rep_valid ||
287               s0_ld_rep_valid ||
288               s0_high_conf_prf_valid ||
289               s0_int_iss_valid ||
290               s0_vec_iss_valid ||
291               s0_l2l_fwd_valid ||
292               s0_low_conf_prf_valid) && io.dcache.req.ready && !s0_kill
293
294  // which is S0's out is ready and dcache is ready
295  val s0_try_ptr_chasing      = s0_l2l_fwd_select
296  val s0_do_try_ptr_chasing   = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready
297  val s0_ptr_chasing_vaddr    = io.l2l_fwd_in.data(5, 0) +& io.ld_fast_imm(5, 0)
298  val s0_ptr_chasing_canceled = WireInit(false.B)
299  s0_kill := s0_ptr_chasing_canceled || (s0_out.uop.robIdx.needFlush(io.redirect) && !s0_try_ptr_chasing)
300
301  // prefetch related ctrl signal
302  val s0_prf    = Wire(Bool())
303  val s0_prf_rd = Wire(Bool())
304  val s0_prf_wr = Wire(Bool())
305  val s0_hw_prf = s0_hw_prf_select
306
307  io.canAcceptLowConfPrefetch  := s0_low_conf_prf_ready
308  io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready
309
310  // query DTLB
311  io.tlb.req.valid                   := s0_valid
312  io.tlb.req.bits.cmd                := Mux(s0_prf,
313                                         Mux(s0_prf_wr, TlbCmd.write, TlbCmd.read),
314                                         TlbCmd.read
315                                       )
316  io.tlb.req.bits.vaddr              := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_vaddr)
317  io.tlb.req.bits.size               := LSUOpType.size(s0_uop.fuOpType)
318  io.tlb.req.bits.kill               := s0_kill
319  io.tlb.req.bits.memidx.is_ld       := true.B
320  io.tlb.req.bits.memidx.is_st       := false.B
321  io.tlb.req.bits.memidx.idx         := s0_uop.lqIdx.value
322  io.tlb.req.bits.debug.robIdx       := s0_uop.robIdx
323  io.tlb.req.bits.no_translate       := s0_hw_prf_select  // hw b.reqetch addr does not need to be translated
324  io.tlb.req.bits.debug.pc           := s0_uop.pc
325  io.tlb.req.bits.debug.isFirstIssue := s0_isFirstIssue
326
327  // query DCache
328  io.dcache.req.valid             := s0_valid
329  io.dcache.req.bits.cmd          := Mux(s0_prf_rd,
330                                      MemoryOpConstants.M_PFR,
331                                      Mux(s0_prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD)
332                                    )
333  io.dcache.req.bits.vaddr        := s0_vaddr
334  io.dcache.req.bits.mask         := s0_mask
335  io.dcache.req.bits.data         := DontCare
336  io.dcache.req.bits.isFirstIssue := s0_isFirstIssue
337  io.dcache.req.bits.instrtype    := Mux(s0_prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U)
338  io.dcache.req.bits.debug_robIdx := s0_uop.robIdx.value
339  io.dcache.req.bits.replayCarry  := s0_rep_carry
340  io.dcache.req.bits.id           := DontCare // TODO: update cache meta
341  io.dcache.pf_source             := Mux(s0_hw_prf_select, io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL)
342
343  // load flow priority mux
344  def fromNullSource() = {
345    s0_vaddr         := 0.U
346    s0_mask          := 0.U
347    s0_uop           := 0.U.asTypeOf(new DynInst)
348    s0_try_l2l       := false.B
349    s0_has_rob_entry := false.B
350    s0_rsIdx         := 0.U
351    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
352    s0_mshrid        := 0.U
353    s0_isFirstIssue  := false.B
354    s0_fast_rep      := false.B
355    s0_ld_rep        := false.B
356    s0_l2l_fwd       := false.B
357    s0_prf           := false.B
358    s0_prf_rd        := false.B
359    s0_prf_wr        := false.B
360    s0_sched_idx     := 0.U
361    s0_deqPortIdx    := 0.U
362  }
363
364  def fromFastReplaySource(src: LqWriteBundle) = {
365    s0_vaddr         := src.vaddr
366    s0_mask          := src.mask
367    s0_uop           := src.uop
368    s0_try_l2l       := false.B
369    s0_has_rob_entry := src.hasROBEntry
370    s0_rep_carry     := src.rep_info.rep_carry
371    s0_mshrid        := src.rep_info.mshr_id
372    s0_rsIdx         := src.rsIdx
373    s0_isFirstIssue  := false.B
374    s0_fast_rep      := true.B
375    s0_ld_rep        := src.isLoadReplay
376    s0_l2l_fwd       := false.B
377    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
378    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
379    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
380    s0_sched_idx     := src.schedIndex
381    s0_deqPortIdx    := src.deqPortIdx
382  }
383
384  def fromNormalReplaySource(src: LsPipelineBundle) = {
385    s0_vaddr         := src.vaddr
386    s0_mask          := genVWmask(src.vaddr, src.uop.fuOpType(1, 0))
387    s0_uop           := src.uop
388    s0_try_l2l       := false.B
389    s0_has_rob_entry := true.B
390    s0_rsIdx         := src.rsIdx
391    s0_rep_carry     := src.replayCarry
392    s0_mshrid        := src.mshrid
393    s0_isFirstIssue  := false.B
394    s0_fast_rep      := false.B
395    s0_ld_rep        := true.B
396    s0_l2l_fwd       := false.B
397    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
398    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
399    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
400    s0_sched_idx     := src.schedIndex
401    s0_deqPortIdx    := src.deqPortIdx
402  }
403
404  def fromPrefetchSource(src: L1PrefetchReq) = {
405    s0_vaddr         := src.getVaddr()
406    s0_mask          := 0.U
407    s0_uop           := DontCare
408    s0_try_l2l       := false.B
409    s0_has_rob_entry := false.B
410    s0_rsIdx         := 0.U
411    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
412    s0_mshrid        := 0.U
413    s0_isFirstIssue  := false.B
414    s0_fast_rep      := false.B
415    s0_ld_rep        := false.B
416    s0_l2l_fwd       := false.B
417    s0_prf           := true.B
418    s0_prf_rd        := !src.is_store
419    s0_prf_wr        := src.is_store
420    s0_sched_idx     := 0.U
421    s0_deqPortIdx    := 0.U
422  }
423
424  def fromIntIssueSource(src: MemExuInput) = {
425    s0_vaddr         := src.src(0) + SignExt(src.uop.imm(11, 0), VAddrBits)
426    s0_mask          := genVWmask(s0_vaddr, src.uop.fuOpType(1,0))
427    s0_uop           := src.uop
428    s0_try_l2l       := false.B
429    s0_has_rob_entry := true.B
430    s0_rsIdx         := src.iqIdx
431    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
432    s0_mshrid        := 0.U
433    s0_isFirstIssue  := true.B
434    s0_fast_rep      := false.B
435    s0_ld_rep        := false.B
436    s0_l2l_fwd       := false.B
437    s0_prf           := LSUOpType.isPrefetch(src.uop.fuOpType)
438    s0_prf_rd        := src.uop.fuOpType === LSUOpType.prefetch_r
439    s0_prf_wr        := src.uop.fuOpType === LSUOpType.prefetch_w
440    s0_sched_idx     := 0.U
441    s0_deqPortIdx    := src.deqPortIdx
442  }
443
444  def fromVecIssueSource() = {
445    s0_vaddr         := 0.U
446    s0_mask          := 0.U
447    s0_uop           := 0.U.asTypeOf(new DynInst)
448    s0_try_l2l       := false.B
449    s0_has_rob_entry := false.B
450    s0_rsIdx         := 0.U
451    s0_rep_carry     := 0.U.asTypeOf(s0_rep_carry.cloneType)
452    s0_mshrid        := 0.U
453    s0_isFirstIssue  := false.B
454    s0_fast_rep      := false.B
455    s0_ld_rep        := false.B
456    s0_l2l_fwd       := false.B
457    s0_prf           := false.B
458    s0_prf_rd        := false.B
459    s0_prf_wr        := false.B
460    s0_sched_idx     := 0.U
461    s0_deqPortIdx    := 0.U
462  }
463
464  def fromLoadToLoadSource(src: LoadToLoadIO) = {
465    s0_vaddr              := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0))
466    s0_mask               := genVWmask(s0_vaddr, io.ld_fast_fuOpType(1, 0))
467    // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding.
468    // Assume the pointer chasing is always ld.
469    s0_uop.fuOpType       := io.ld_fast_fuOpType
470    s0_try_l2l            := true.B
471    // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx and s0_deqPortIdx in S0 when trying pointchasing
472    // because these signals will be updated in S1
473    s0_has_rob_entry      := false.B
474    s0_rsIdx              := 0.U
475    s0_mshrid             := 0.U
476    s0_rep_carry          := 0.U.asTypeOf(s0_rep_carry.cloneType)
477    s0_isFirstIssue       := true.B
478    s0_fast_rep           := false.B
479    s0_ld_rep             := false.B
480    s0_l2l_fwd            := true.B
481    s0_prf                := false.B
482    s0_prf_rd             := false.B
483    s0_prf_wr             := false.B
484    s0_sched_idx          := 0.U
485    s0_deqPortIdx         := 0.U
486  }
487
488  // set default
489  s0_uop := DontCare
490  when (s0_super_ld_rep_select)      { fromNormalReplaySource(io.replay.bits)     }
491  .elsewhen (s0_ld_fast_rep_select)  { fromFastReplaySource(io.fast_rep_in.bits)  }
492  .elsewhen (s0_ld_rep_select)       { fromNormalReplaySource(io.replay.bits)     }
493  .elsewhen (s0_hw_prf_select)       { fromPrefetchSource(io.prefetch_req.bits)   }
494  .elsewhen (s0_int_iss_select)      { fromIntIssueSource(io.ldin.bits)           }
495  .elsewhen (s0_vec_iss_select)      { fromVecIssueSource()                       }
496  .otherwise {
497    if (EnableLoadToLoadForward) {
498      fromLoadToLoadSource(io.l2l_fwd_in)
499    } else {
500      fromNullSource()
501    }
502  }
503
504  // address align check
505  val s0_addr_aligned = LookupTree(s0_uop.fuOpType(1, 0), List(
506    "b00".U   -> true.B,                   //b
507    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
508    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
509    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
510  ))
511
512  // accept load flow if dcache ready (tlb is always ready)
513  // TODO: prefetch need writeback to loadQueueFlag
514  s0_out               := DontCare
515  s0_out.rsIdx         := s0_rsIdx
516  s0_out.vaddr         := s0_vaddr
517  s0_out.mask          := s0_mask
518  s0_out.uop           := s0_uop
519  s0_out.isFirstIssue  := s0_isFirstIssue
520  s0_out.hasROBEntry   := s0_has_rob_entry
521  s0_out.isPrefetch    := s0_prf
522  s0_out.isHWPrefetch  := s0_hw_prf
523  s0_out.isFastReplay  := s0_fast_rep
524  s0_out.isLoadReplay  := s0_ld_rep
525  s0_out.isFastPath    := s0_l2l_fwd
526  s0_out.mshrid        := s0_mshrid
527  s0_out.uop.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned
528  s0_out.forward_tlDchannel := s0_super_ld_rep_select
529  when(io.tlb.req.valid && s0_isFirstIssue) {
530    s0_out.uop.debugInfo.tlbFirstReqTime := GTimer()
531  }.otherwise{
532    s0_out.uop.debugInfo.tlbFirstReqTime := s0_uop.debugInfo.tlbFirstReqTime
533  }
534  s0_out.schedIndex     := s0_sched_idx
535  s0_out.deqPortIdx     := s0_deqPortIdx
536
537  // load fast replay
538  io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready)
539
540  // load flow source ready
541  // cache missed load has highest priority
542  // always accept cache missed load flow from load replay queue
543  io.replay.ready := (s0_can_go && io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select))
544
545  // accept load flow from rs when:
546  // 1) there is no lsq-replayed load
547  // 2) there is no fast replayed load
548  // 3) there is no high confidence prefetch request
549  io.ldin.ready := (s0_can_go && io.dcache.req.ready && s0_int_iss_ready)
550
551  // for hw prefetch load flow feedback, to be added later
552  // io.prefetch_in.ready := s0_hw_prf_select
553
554  // dcache replacement extra info
555  // TODO: should prefetch load update replacement?
556  io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.replay.bits.replacementUpdated, false.B)
557
558  XSDebug(io.dcache.req.fire,
559    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
560  )
561  XSDebug(s0_valid,
562    p"S0: pc ${Hexadecimal(s0_out.uop.pc)}, lId ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " +
563    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
564
565  // Pipeline
566  // --------------------------------------------------------------------------------
567  // stage 1
568  // --------------------------------------------------------------------------------
569  // TLB resp (send paddr to dcache)
570  val s1_valid      = RegInit(false.B)
571  val s1_in         = Wire(new LqWriteBundle)
572  val s1_out        = Wire(new LqWriteBundle)
573  val s1_kill       = Wire(Bool())
574  val s1_can_go     = s2_ready
575  val s1_fire       = s1_valid && !s1_kill && s1_can_go
576
577  s1_ready := !s1_valid || s1_kill || s2_ready
578  when (s0_fire) { s1_valid := true.B }
579  .elsewhen (s1_fire) { s1_valid := false.B }
580  .elsewhen (s1_kill) { s1_valid := false.B }
581  s1_in   := RegEnable(s0_out, s0_fire)
582
583  val s1_fast_rep_dly_err = RegNext(io.fast_rep_in.bits.delayedLoadError)
584  val s1_fast_rep_kill    = s1_fast_rep_dly_err && s1_in.isFastReplay
585  val s1_l2l_fwd_dly_err  = RegNext(io.l2l_fwd_in.dly_ld_err)
586  val s1_l2l_fwd_kill     = s1_l2l_fwd_dly_err && s1_in.isFastPath
587  val s1_late_kill        = s1_fast_rep_kill || s1_l2l_fwd_kill
588  val s1_vaddr_hi         = Wire(UInt())
589  val s1_vaddr_lo         = Wire(UInt())
590  val s1_vaddr            = Wire(UInt())
591  val s1_paddr_dup_lsu    = Wire(UInt())
592  val s1_paddr_dup_dcache = Wire(UInt())
593  val s1_exception        = ExceptionNO.selectByFu(s1_out.uop.exceptionVec, LduCfg).asUInt.orR   // af & pf exception were modified below.
594  val s1_tlb_miss         = io.tlb.resp.bits.miss
595  val s1_prf              = s1_in.isPrefetch
596  val s1_hw_prf           = s1_in.isHWPrefetch
597  val s1_sw_prf           = s1_prf && !s1_hw_prf
598  val s1_tlb_memidx       = io.tlb.resp.bits.memidx
599
600  s1_vaddr_hi         := s1_in.vaddr(VAddrBits - 1, 6)
601  s1_vaddr_lo         := s1_in.vaddr(5, 0)
602  s1_vaddr            := Cat(s1_vaddr_hi, s1_vaddr_lo)
603  s1_paddr_dup_lsu    := io.tlb.resp.bits.paddr(0)
604  s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1)
605
606  when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_in.uop.lqIdx.value) {
607    // printf("load idx = %d\n", s1_tlb_memidx.idx)
608    s1_out.uop.debugInfo.tlbRespTime := GTimer()
609  }
610
611  io.tlb.req_kill   := s1_kill
612  io.tlb.resp.ready := true.B
613
614  io.dcache.s1_paddr_dup_lsu    <> s1_paddr_dup_lsu
615  io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache
616  io.dcache.s1_kill             := s1_kill || s1_tlb_miss || s1_exception
617
618  // store to load forwarding
619  io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_prf)
620  io.sbuffer.vaddr := s1_vaddr
621  io.sbuffer.paddr := s1_paddr_dup_lsu
622  io.sbuffer.uop   := s1_in.uop
623  io.sbuffer.sqIdx := s1_in.uop.sqIdx
624  io.sbuffer.mask  := s1_in.mask
625  io.sbuffer.pc    := s1_in.uop.pc // FIXME: remove it
626
627  io.lsq.forward.valid     := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_prf)
628  io.lsq.forward.vaddr     := s1_vaddr
629  io.lsq.forward.paddr     := s1_paddr_dup_lsu
630  io.lsq.forward.uop       := s1_in.uop
631  io.lsq.forward.sqIdx     := s1_in.uop.sqIdx
632  io.lsq.forward.sqIdxMask := 0.U
633  io.lsq.forward.mask      := s1_in.mask
634  io.lsq.forward.pc        := s1_in.uop.pc // FIXME: remove it
635
636  // st-ld violation query
637  val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => {
638                       io.stld_nuke_query(w).valid && // query valid
639                       isAfter(s1_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
640                       // TODO: Fix me when vector instruction
641                       (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
642                       (s1_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
643                      })).asUInt.orR && !s1_tlb_miss
644
645  s1_out                   := s1_in
646  s1_out.vaddr             := s1_vaddr
647  s1_out.paddr             := s1_paddr_dup_lsu
648  s1_out.tlbMiss           := s1_tlb_miss
649  s1_out.ptwBack           := io.tlb.resp.bits.ptwBack
650  s1_out.rsIdx             := s1_in.rsIdx
651  s1_out.rep_info.debug    := s1_in.uop.debugInfo
652  s1_out.rep_info.nuke     := s1_nuke && !s1_sw_prf
653  s1_out.lateKill          := s1_late_kill
654
655  when (!s1_late_kill) {
656    // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
657    // af & pf exception were modified
658    s1_out.uop.exceptionVec(loadPageFault)   := io.tlb.resp.bits.excp(0).pf.ld
659    s1_out.uop.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld
660  } .otherwise {
661    s1_out.uop.exceptionVec(loadAddrMisaligned) := false.B
662    s1_out.uop.exceptionVec(loadAccessFault)    := s1_late_kill
663  }
664
665  // pointer chasing
666  val s1_try_ptr_chasing       = RegNext(s0_do_try_ptr_chasing, false.B)
667  val s1_ptr_chasing_vaddr     = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing)
668  val s1_fu_op_type_not_ld     = WireInit(false.B)
669  val s1_not_fast_match        = WireInit(false.B)
670  val s1_addr_mismatch         = WireInit(false.B)
671  val s1_addr_misaligned       = WireInit(false.B)
672  val s1_ptr_chasing_canceled  = WireInit(false.B)
673  val s1_cancel_ptr_chasing    = WireInit(false.B)
674
675  s1_kill := s1_late_kill ||
676             s1_cancel_ptr_chasing ||
677             s1_in.uop.robIdx.needFlush(io.redirect) ||
678             RegEnable(s0_kill, false.B, io.ldin.valid || io.replay.valid || io.l2l_fwd_in.valid || io.fast_rep_in.valid)
679
680  if (EnableLoadToLoadForward) {
681    // Sometimes, we need to cancel the load-load forwarding.
682    // These can be put at S0 if timing is bad at S1.
683    // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow)
684    s1_addr_mismatch      := s1_ptr_chasing_vaddr(6) || RegEnable(io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing)
685    // Case 1: the address is misaligned, kill s1
686    s1_addr_misaligned    := LookupTree(s1_in.uop.fuOpType(1, 0), List(
687                             "b00".U   -> false.B,                   //b
688                             "b01".U   -> (s1_vaddr(0)    =/= 0.U), //h
689                             "b10".U   -> (s1_vaddr(1, 0) =/= 0.U), //w
690                             "b11".U   -> (s1_vaddr(2, 0) =/= 0.U)  //d
691                          ))
692    // Case 2: this load-load uop is cancelled
693    s1_ptr_chasing_canceled := !io.ldin.valid
694
695    when (s1_try_ptr_chasing) {
696      s1_cancel_ptr_chasing := s1_addr_mismatch || s1_addr_misaligned || s1_ptr_chasing_canceled
697
698      s1_in.uop           := io.ldin.bits.uop
699      s1_in.rsIdx         := io.ldin.bits.iqIdx
700      s1_in.isFirstIssue  := io.ldin.bits.isFirstIssue
701      s1_in.deqPortIdx    := io.ldin.bits.deqPortIdx
702      s1_vaddr_lo         := s1_ptr_chasing_vaddr(5, 0)
703      s1_paddr_dup_lsu    := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
704      s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
705
706      // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb)
707      s1_in.uop.debugInfo.tlbFirstReqTime := GTimer()
708      s1_in.uop.debugInfo.tlbRespTime     := GTimer()
709    }
710    when (!s1_cancel_ptr_chasing) {
711      s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.replay.fire && !io.fast_rep_in.fire
712      when (s1_try_ptr_chasing) {
713        io.ldin.ready := true.B
714      }
715    }
716  }
717
718  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
719  val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize))
720  // to enable load-load, sqIdxMask must be calculated based on ldin.uop
721  // If the timing here is not OK, load-load forwarding has to be disabled.
722  // Or we calculate sqIdxMask at RS??
723  io.lsq.forward.sqIdxMask := s1_sqIdx_mask
724  if (EnableLoadToLoadForward) {
725    when (s1_try_ptr_chasing) {
726      io.lsq.forward.sqIdxMask := UIntToMask(io.ldin.bits.uop.sqIdx.value, StoreQueueSize)
727    }
728  }
729
730  io.forward_mshr.valid  := s1_valid && s1_out.forward_tlDchannel
731  io.forward_mshr.mshrid := s1_out.mshrid
732  io.forward_mshr.paddr  := s1_out.paddr
733
734  XSDebug(s1_valid,
735    p"S1: pc ${Hexadecimal(s1_out.uop.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
736    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
737
738  // Pipeline
739  // --------------------------------------------------------------------------------
740  // stage 2
741  // --------------------------------------------------------------------------------
742  // s2: DCache resp
743  val s2_valid  = RegInit(false.B)
744  val s2_in     = Wire(new LqWriteBundle)
745  val s2_out    = Wire(new LqWriteBundle)
746  val s2_kill   = Wire(Bool())
747  val s2_can_go = s3_ready
748  val s2_fire   = s2_valid && !s2_kill && s2_can_go
749
750  s2_kill := s2_in.uop.robIdx.needFlush(io.redirect)
751  s2_ready := !s2_valid || s2_kill || s3_ready
752  when (s1_fire) { s2_valid := true.B }
753  .elsewhen (s2_fire) { s2_valid := false.B }
754  .elsewhen (s2_kill) { s2_valid := false.B }
755  s2_in := RegEnable(s1_out, s1_fire)
756
757  val s2_pmp = WireInit(io.pmp)
758
759  val s2_prf    = s2_in.isPrefetch
760  val s2_hw_prf = s2_in.isHWPrefetch
761
762  // exception that may cause load addr to be invalid / illegal
763  // if such exception happen, that inst and its exception info
764  // will be force writebacked to rob
765  val s2_exception_vec = WireInit(s2_in.uop.exceptionVec)
766  when (!s2_in.lateKill) {
767    s2_exception_vec(loadAccessFault) := s2_in.uop.exceptionVec(loadAccessFault) || s2_pmp.ld
768    // soft prefetch will not trigger any exception (but ecc error interrupt may be triggered)
769    when (s2_prf || s2_in.tlbMiss) {
770      s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
771    }
772  }
773  val s2_exception = ExceptionNO.selectByFu(s2_exception_vec, LduCfg).asUInt.orR
774
775  val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr)
776  val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.forward_mshr.forward()
777  val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr)
778
779  // writeback access fault caused by ecc error / bus error
780  // * ecc data error is slow to generate, so we will not use it until load stage 3
781  // * in load stage 3, an extra signal io.load_error will be used to
782  val s2_actually_mmio = s2_pmp.mmio
783  val s2_mmio          = !s2_prf &&
784                          s2_actually_mmio &&
785                         !s2_exception &&
786                         !s2_in.tlbMiss
787
788  val s2_full_fwd      = Wire(Bool())
789  val s2_mem_amb       = s2_in.uop.storeSetHit &&
790                         io.lsq.forward.addrInvalid
791
792  val s2_tlb_miss      = s2_in.tlbMiss
793  val s2_fwd_fail      = io.lsq.forward.dataInvalid
794  val s2_dcache_miss   = io.dcache.resp.bits.miss &&
795                         !s2_fwd_frm_d_chan_or_mshr &&
796                         !s2_full_fwd
797
798  val s2_mq_nack       = io.dcache.s2_mq_nack &&
799                         !s2_fwd_frm_d_chan_or_mshr &&
800                         !s2_full_fwd
801
802  val s2_bank_conflict = io.dcache.s2_bank_conflict &&
803                         !s2_fwd_frm_d_chan_or_mshr &&
804                         !s2_full_fwd
805
806  val s2_wpu_pred_fail = io.dcache.s2_wpu_pred_fail &&
807                        !s2_fwd_frm_d_chan_or_mshr &&
808                        !s2_full_fwd
809
810  val s2_rar_nack      = io.lsq.ldld_nuke_query.req.valid &&
811                         !io.lsq.ldld_nuke_query.req.ready
812
813  val s2_raw_nack      = io.lsq.stld_nuke_query.req.valid &&
814                         !io.lsq.stld_nuke_query.req.ready
815  // st-ld violation query
816  //  NeedFastRecovery Valid when
817  //  1. Fast recovery query request Valid.
818  //  2. Load instruction is younger than requestors(store instructions).
819  //  3. Physical address match.
820  //  4. Data contains.
821  val s2_nuke          = VecInit((0 until StorePipelineWidth).map(w => {
822                          io.stld_nuke_query(w).valid && // query valid
823                          isAfter(s2_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
824                          // TODO: Fix me when vector instruction
825                          (s2_in.paddr(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
826                          (s2_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
827                        })).asUInt.orR && !s2_tlb_miss || s2_in.rep_info.nuke
828
829  val s2_cache_handled   = io.dcache.resp.bits.handled
830  val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) &&
831                           io.dcache.resp.bits.tag_error
832
833  val s2_troublem        = !s2_exception &&
834                           !s2_mmio &&
835                           !s2_prf &&
836                           !s2_in.lateKill
837
838  io.dcache.resp.ready  := true.B
839  val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_mmio || s2_prf || s2_in.lateKill)
840  assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost")
841
842  // fast replay require
843  val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail))
844  val s2_nuke_fast_rep   = !s2_mq_nack &&
845                           !s2_dcache_miss &&
846                           !s2_bank_conflict &&
847                           !s2_wpu_pred_fail &&
848                           !s2_rar_nack &&
849                           !s2_raw_nack &&
850                           s2_nuke
851
852  val s2_fast_rep = !s2_mem_amb &&
853                    !s2_tlb_miss &&
854                    !s2_fwd_fail &&
855                    (s2_dcache_fast_rep || s2_nuke_fast_rep) &&
856                    s2_troublem
857
858  // need allocate new entry
859  val s2_can_query = !s2_mem_amb &&
860                     !s2_tlb_miss  &&
861                     !s2_fwd_fail &&
862                     !s2_dcache_fast_rep &&
863                     s2_troublem
864
865  val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error)
866
867  // ld-ld violation require
868  io.lsq.ldld_nuke_query.req.valid           := s2_valid && s2_can_query
869  io.lsq.ldld_nuke_query.req.bits.uop        := s2_in.uop
870  io.lsq.ldld_nuke_query.req.bits.mask       := s2_in.mask
871  io.lsq.ldld_nuke_query.req.bits.paddr      := s2_in.paddr
872  io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
873
874  // st-ld violation require
875  io.lsq.stld_nuke_query.req.valid           := s2_valid && s2_can_query
876  io.lsq.stld_nuke_query.req.bits.uop        := s2_in.uop
877  io.lsq.stld_nuke_query.req.bits.mask       := s2_in.mask
878  io.lsq.stld_nuke_query.req.bits.paddr      := s2_in.paddr
879  io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
880
881  // merge forward result
882  // lsq has higher priority than sbuffer
883  val s2_fwd_mask = Wire(Vec((VLEN/8), Bool()))
884  val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W)))
885  s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.lsq.forward.dataInvalid
886  // generate XLEN/8 Muxs
887  for (i <- 0 until VLEN / 8) {
888    s2_fwd_mask(i) := io.lsq.forward.forwardMask(i) || io.sbuffer.forwardMask(i)
889    s2_fwd_data(i) := Mux(io.lsq.forward.forwardMask(i), io.lsq.forward.forwardData(i), io.sbuffer.forwardData(i))
890  }
891
892  XSDebug(s2_fire, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
893    s2_in.uop.pc,
894    io.lsq.forward.forwardData.asUInt, io.lsq.forward.forwardMask.asUInt,
895    s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt
896  )
897
898  //
899  s2_out                     := s2_in
900  s2_out.data                := 0.U // data will be generated in load s3
901  s2_out.uop.fpWen           := s2_in.uop.fpWen && !s2_exception
902  s2_out.mmio                := s2_mmio
903  s2_out.uop.flushPipe       := false.B
904  s2_out.uop.exceptionVec    := s2_exception_vec
905  s2_out.forwardMask         := s2_fwd_mask
906  s2_out.forwardData         := s2_fwd_data
907  s2_out.handledByMSHR       := s2_cache_handled
908  s2_out.miss                := s2_dcache_miss && s2_troublem
909  s2_out.feedbacked          := io.feedback_fast.valid
910
911  // Generate replay signal caused by:
912  // * st-ld violation check
913  // * tlb miss
914  // * dcache replay
915  // * forward data invalid
916  // * dcache miss
917  s2_out.rep_info.mem_amb         := s2_mem_amb && s2_troublem
918  s2_out.rep_info.tlb_miss        := s2_tlb_miss && s2_troublem
919  s2_out.rep_info.fwd_fail        := s2_fwd_fail && s2_troublem
920  s2_out.rep_info.dcache_rep      := s2_mq_nack && s2_troublem
921  s2_out.rep_info.dcache_miss     := s2_dcache_miss && s2_troublem
922  s2_out.rep_info.bank_conflict   := s2_bank_conflict && s2_troublem
923  s2_out.rep_info.wpu_fail        := s2_wpu_pred_fail && s2_troublem
924  s2_out.rep_info.rar_nack        := s2_rar_nack && s2_troublem
925  s2_out.rep_info.raw_nack        := s2_raw_nack && s2_troublem
926  s2_out.rep_info.nuke            := s2_nuke && s2_troublem
927  s2_out.rep_info.full_fwd        := s2_data_fwded
928  s2_out.rep_info.data_inv_sq_idx := io.lsq.forward.dataInvalidSqIdx
929  s2_out.rep_info.addr_inv_sq_idx := io.lsq.forward.addrInvalidSqIdx
930  s2_out.rep_info.rep_carry       := io.dcache.resp.bits.replayCarry
931  s2_out.rep_info.mshr_id         := io.dcache.resp.bits.mshr_id
932  s2_out.rep_info.last_beat       := s2_in.paddr(log2Up(refillBytes))
933  s2_out.rep_info.debug           := s2_in.uop.debugInfo
934
935  // if forward fail, replay this inst from fetch
936  val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss
937  // if ld-ld violation is detected, replay from this inst from fetch
938  val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss
939  // io.out.bits.uop.replayInst := false.B
940
941  // to be removed
942  io.feedback_fast.valid                 := s2_valid &&                 // inst is valid
943                                            !s2_in.isLoadReplay &&      // already feedbacked
944                                            io.lq_rep_full &&           // LoadQueueReplay is full
945                                            s2_out.rep_info.need_rep && // need replay
946                                            !s2_exception &&            // no exception is triggered
947                                            !s2_hw_prf                  // not hardware prefetch
948  io.feedback_fast.bits.hit              := false.B
949  io.feedback_fast.bits.flushState       := s2_in.ptwBack
950  io.feedback_fast.bits.robIdx           := s2_in.uop.robIdx
951  io.feedback_fast.bits.sourceType       := RSFeedbackType.lrqFull
952  io.feedback_fast.bits.dataInvalidSqIdx := DontCare
953
954  io.ldCancel.ld1Cancel.valid := s2_valid && (
955    (s2_out.rep_info.need_rep && s2_out.isFirstIssue) ||                // exe fail and issued from IQ
956    s2_mmio                                                             // is mmio
957  )
958  io.ldCancel.ld1Cancel.bits := s2_out.deqPortIdx
959
960  // fast wakeup
961  io.fast_uop.valid := RegNext(
962    !io.dcache.s1_disable_fast_wakeup &&
963    s1_valid &&
964    !s1_kill &&
965    !io.tlb.resp.bits.miss &&
966    !io.lsq.forward.dataInvalidFast
967  ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_mmio)
968  io.fast_uop.bits := RegNext(s1_out.uop)
969
970  //
971  io.s2_ptr_chasing                    := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire)
972
973  io.prefetch_train.valid              := s2_valid && !s2_actually_mmio && !s2_in.tlbMiss
974  io.prefetch_train.bits.fromLsPipelineBundle(s2_in)
975  io.prefetch_train.bits.miss          := io.dcache.resp.bits.miss // TODO: use trace with bank conflict?
976  io.prefetch_train.bits.meta_prefetch := io.dcache.resp.bits.meta_prefetch
977  io.prefetch_train.bits.meta_access   := io.dcache.resp.bits.meta_access
978
979
980  io.prefetch_train_l1.valid              := s2_valid && !s2_actually_mmio
981  io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in)
982  io.prefetch_train_l1.bits.miss          := io.dcache.resp.bits.miss
983  io.prefetch_train_l1.bits.meta_prefetch := io.dcache.resp.bits.meta_prefetch
984  io.prefetch_train_l1.bits.meta_access   := io.dcache.resp.bits.meta_access
985  if (env.FPGAPlatform){
986    io.dcache.s0_pc := DontCare
987    io.dcache.s1_pc := DontCare
988    io.dcache.s2_pc := DontCare
989  }else{
990    io.dcache.s0_pc := s0_out.uop.pc
991    io.dcache.s1_pc := s1_out.uop.pc
992    io.dcache.s2_pc := s2_out.uop.pc
993  }
994  io.dcache.s2_kill := s2_pmp.ld || s2_actually_mmio || s2_kill
995
996  val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready
997  val s2_ld_valid_dup = RegInit(0.U(6.W))
998  s2_ld_valid_dup := 0x0.U(6.W)
999  when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) }
1000  when (s1_kill || s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x0.U(6.W) }
1001  assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch)))
1002
1003  // Pipeline
1004  // --------------------------------------------------------------------------------
1005  // stage 3
1006  // --------------------------------------------------------------------------------
1007  // writeback and update load queue
1008  val s3_valid        = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect))
1009  val s3_in           = RegEnable(s2_out, s2_fire)
1010  val s3_out          = Wire(Valid(new MemExuOutput))
1011  val s3_dcache_rep   = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire)
1012  val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire)
1013  val s3_fast_rep     = Wire(Bool())
1014  val s3_troublem     = RegNext(s2_troublem)
1015  val s3_kill         = s3_in.uop.robIdx.needFlush(io.redirect)
1016  s3_ready := !s3_valid || s3_kill || io.ldout.ready
1017
1018  // forwrad last beat
1019  val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr)
1020  val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1021  val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid)
1022  val s3_nuke          = VecInit((0 until StorePipelineWidth).map(w => {
1023                          io.stld_nuke_query(w).valid && // query valid
1024                          isAfter(s3_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
1025                          // TODO: Fix me when vector instruction
1026                          (s3_in.paddr(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
1027                          (s3_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
1028                        })).asUInt.orR && !s3_in.tlbMiss || s3_in.rep_info.nuke
1029
1030
1031  // s3 load fast replay
1032  io.fast_rep_out.valid := s3_valid && s3_fast_rep && !s3_in.uop.robIdx.needFlush(io.redirect)
1033  io.fast_rep_out.bits := s3_in
1034
1035  io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || !io.fast_rep_out.ready) && !s3_in.feedbacked && !s3_in.lateKill
1036  io.lsq.ldin.bits := s3_in
1037  io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid
1038
1039  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1040  io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools
1041  io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated
1042  io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated)
1043
1044  val s3_dly_ld_err =
1045    if (EnableAccurateLoadError) {
1046      (s3_in.lateKill || io.dcache.resp.bits.error_delayed) && RegNext(io.csrCtrl.cache_error_enable)
1047    } else {
1048      WireInit(false.B)
1049    }
1050  io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid
1051  io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err
1052  io.lsq.ldin.bits.dcacheRequireReplay  := s3_dcache_rep
1053
1054  val s3_vp_match_fail = RegNext(io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid) && s3_troublem
1055  val s3_ldld_rep_inst =
1056      io.lsq.ldld_nuke_query.resp.valid &&
1057      io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch &&
1058      RegNext(io.csrCtrl.ldld_vio_check_enable)
1059
1060  val s3_rep_info = WireInit(s3_in.rep_info)
1061  s3_rep_info.wpu_fail      := s3_in.rep_info.wpu_fail && !s3_fwd_frm_d_chan_valid && s3_troublem
1062  s3_rep_info.bank_conflict := s3_in.rep_info.bank_conflict && !s3_fwd_frm_d_chan_valid && s3_troublem
1063  s3_rep_info.dcache_miss   := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid && s3_troublem
1064  s3_rep_info.nuke          := s3_nuke && s3_troublem
1065  val s3_rep_frm_fetch = s3_vp_match_fail || s3_ldld_rep_inst
1066  val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt)
1067  val s3_force_rep     = s3_sel_rep_cause(LoadReplayCauses.C_TM) &&
1068                         !s3_in.uop.exceptionVec(loadAddrMisaligned) &&
1069                         s3_troublem
1070
1071  val s3_exception = ExceptionNO.selectByFu(s3_in.uop.exceptionVec, LduCfg).asUInt.orR
1072  when ((s3_exception || s3_dly_ld_err || s3_rep_frm_fetch) && !s3_force_rep) {
1073    io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType)
1074  } .otherwise {
1075    io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools)
1076  }
1077
1078  // Int load, if hit, will be writebacked at s3
1079  s3_out.valid                := s3_valid && !io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio
1080  s3_out.bits.uop             := s3_in.uop
1081  s3_out.bits.uop.exceptionVec(loadAccessFault) := s3_dly_ld_err  || s3_in.uop.exceptionVec(loadAccessFault)
1082  s3_out.bits.uop.replayInst := s3_rep_frm_fetch
1083  s3_out.bits.data            := s3_in.data
1084  s3_out.bits.debug.isMMIO    := s3_in.mmio
1085  s3_out.bits.debug.isPerfCnt := false.B
1086  s3_out.bits.debug.paddr     := s3_in.paddr
1087  s3_out.bits.debug.vaddr     := s3_in.vaddr
1088
1089  when (s3_force_rep) {
1090    s3_out.bits.uop.exceptionVec := 0.U.asTypeOf(s3_in.uop.exceptionVec.cloneType)
1091  }
1092
1093  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1094
1095  io.lsq.ldin.bits.uop := s3_out.bits.uop
1096
1097  val s3_revoke = s3_exception || io.lsq.ldin.bits.rep_info.need_rep
1098  io.lsq.ldld_nuke_query.revoke := s3_revoke
1099  io.lsq.stld_nuke_query.revoke := s3_revoke
1100
1101  // feedback slow
1102  s3_fast_rep := RegNext(s2_fast_rep) &&
1103                 !s3_in.feedbacked &&
1104                 !s3_in.lateKill &&
1105                 !s3_rep_frm_fetch &&
1106                 !s3_exception
1107
1108  val s3_fb_no_waiting = !s3_in.isLoadReplay && !(s3_fast_rep && io.fast_rep_out.ready) && !s3_in.feedbacked
1109
1110  //
1111  io.feedback_slow.valid                 := s3_valid && !s3_in.uop.robIdx.needFlush(io.redirect) && s3_fb_no_waiting
1112  io.feedback_slow.bits.hit              := !io.lsq.ldin.bits.rep_info.need_rep || io.lsq.ldin.ready
1113  io.feedback_slow.bits.flushState       := s3_in.ptwBack
1114  io.feedback_slow.bits.robIdx           := s3_in.uop.robIdx
1115  io.feedback_slow.bits.sourceType       := RSFeedbackType.lrqFull
1116  io.feedback_slow.bits.dataInvalidSqIdx := DontCare
1117
1118  io.ldCancel.ld2Cancel.valid := s3_valid && (
1119    (io.lsq.ldin.bits.rep_info.need_rep && s3_in.isFirstIssue) ||
1120    s3_in.mmio
1121  )
1122  io.ldCancel.ld2Cancel.bits := s3_in.deqPortIdx
1123
1124  val s3_ld_wb_meta = Mux(s3_out.valid, s3_out.bits, io.lsq.uncache.bits)
1125
1126  // data from load queue refill
1127  val s3_ld_raw_data_frm_uncache = io.lsq.ld_raw_data
1128  val s3_merged_data_frm_uncache = s3_ld_raw_data_frm_uncache.mergedData()
1129  val s3_picked_data_frm_uncache = LookupTree(s3_ld_raw_data_frm_uncache.addrOffset, List(
1130    "b000".U -> s3_merged_data_frm_uncache(63,  0),
1131    "b001".U -> s3_merged_data_frm_uncache(63,  8),
1132    "b010".U -> s3_merged_data_frm_uncache(63, 16),
1133    "b011".U -> s3_merged_data_frm_uncache(63, 24),
1134    "b100".U -> s3_merged_data_frm_uncache(63, 32),
1135    "b101".U -> s3_merged_data_frm_uncache(63, 40),
1136    "b110".U -> s3_merged_data_frm_uncache(63, 48),
1137    "b111".U -> s3_merged_data_frm_uncache(63, 56)
1138  ))
1139  val s3_ld_data_frm_uncache = rdataHelper(s3_ld_raw_data_frm_uncache.uop, s3_picked_data_frm_uncache)
1140
1141  // data from dcache hit
1142  val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle)
1143  s3_ld_raw_data_frm_cache.respDcacheData       := io.dcache.resp.bits.data_delayed
1144  s3_ld_raw_data_frm_cache.forwardMask          := RegEnable(s2_fwd_mask, s2_valid)
1145  s3_ld_raw_data_frm_cache.forwardData          := RegEnable(s2_fwd_data, s2_valid)
1146  s3_ld_raw_data_frm_cache.uop                  := RegEnable(s2_out.uop, s2_valid)
1147  s3_ld_raw_data_frm_cache.addrOffset           := RegEnable(s2_out.paddr(3, 0), s2_valid)
1148  s3_ld_raw_data_frm_cache.forward_D            := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid
1149  s3_ld_raw_data_frm_cache.forwardData_D        := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid))
1150  s3_ld_raw_data_frm_cache.forward_mshr         := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid)
1151  s3_ld_raw_data_frm_cache.forwardData_mshr     := RegEnable(s2_fwd_data_frm_mshr, s2_valid)
1152  s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1153
1154  val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData()
1155  val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List(
1156    "b0000".U -> s3_merged_data_frm_cache(63,    0),
1157    "b0001".U -> s3_merged_data_frm_cache(63,    8),
1158    "b0010".U -> s3_merged_data_frm_cache(63,   16),
1159    "b0011".U -> s3_merged_data_frm_cache(63,   24),
1160    "b0100".U -> s3_merged_data_frm_cache(63,   32),
1161    "b0101".U -> s3_merged_data_frm_cache(63,   40),
1162    "b0110".U -> s3_merged_data_frm_cache(63,   48),
1163    "b0111".U -> s3_merged_data_frm_cache(63,   56),
1164    "b1000".U -> s3_merged_data_frm_cache(127,  64),
1165    "b1001".U -> s3_merged_data_frm_cache(127,  72),
1166    "b1010".U -> s3_merged_data_frm_cache(127,  80),
1167    "b1011".U -> s3_merged_data_frm_cache(127,  88),
1168    "b1100".U -> s3_merged_data_frm_cache(127,  96),
1169    "b1101".U -> s3_merged_data_frm_cache(127, 104),
1170    "b1110".U -> s3_merged_data_frm_cache(127, 112),
1171    "b1111".U -> s3_merged_data_frm_cache(127, 120)
1172  ))
1173  val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache)
1174
1175  // FIXME: add 1 cycle delay ?
1176  io.lsq.uncache.ready := !s3_out.valid
1177  io.ldout.bits        := s3_ld_wb_meta
1178  io.ldout.bits.data   := Mux(s3_out.valid, s3_ld_data_frm_cache, s3_ld_data_frm_uncache)
1179  io.ldout.valid       := s3_out.valid && !s3_out.bits.uop.robIdx.needFlush(io.redirect) ||
1180                         io.lsq.uncache.valid && !io.lsq.uncache.bits.uop.robIdx.needFlush(io.redirect) && !s3_out.valid
1181
1182
1183  // fast load to load forward
1184  io.l2l_fwd_out.valid      := s3_out.valid && !s3_in.lateKill
1185  io.l2l_fwd_out.data       := s3_ld_data_frm_cache
1186  io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err // ecc delayed error
1187
1188   // trigger
1189  val last_valid_data = RegNext(RegEnable(io.ldout.bits.data, io.ldout.fire))
1190  val hit_ld_addr_trig_hit_vec = Wire(Vec(3, Bool()))
1191  val lq_ld_addr_trig_hit_vec = io.lsq.trigger.lqLoadAddrTriggerHitVec
1192  (0 until 3).map{i => {
1193    val tdata2    = RegNext(io.trigger(i).tdata2)
1194    val matchType = RegNext(io.trigger(i).matchType)
1195    val tEnable   = RegNext(io.trigger(i).tEnable)
1196
1197    hit_ld_addr_trig_hit_vec(i) := TriggerCmp(RegNext(s2_out.vaddr), tdata2, matchType, tEnable)
1198    io.trigger(i).addrHit       := Mux(s3_out.valid, hit_ld_addr_trig_hit_vec(i), lq_ld_addr_trig_hit_vec(i))
1199    io.trigger(i).lastDataHit   := TriggerCmp(last_valid_data, tdata2, matchType, tEnable)
1200  }}
1201  io.lsq.trigger.hitLoadAddrTriggerHitVec := hit_ld_addr_trig_hit_vec
1202
1203  // FIXME: please move this part to LoadQueueReplay
1204  io.debug_ls := DontCare
1205
1206  // Topdown
1207  io.lsTopdownInfo.s1.robIdx          := s1_in.uop.robIdx.value
1208  io.lsTopdownInfo.s1.vaddr_valid     := s1_valid && s1_in.hasROBEntry
1209  io.lsTopdownInfo.s1.vaddr_bits      := s1_vaddr
1210  io.lsTopdownInfo.s2.robIdx          := s2_in.uop.robIdx.value
1211  io.lsTopdownInfo.s2.paddr_valid     := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss
1212  io.lsTopdownInfo.s2.paddr_bits      := s2_in.paddr
1213  io.lsTopdownInfo.s2.first_real_miss := io.dcache.resp.bits.real_miss
1214  io.lsTopdownInfo.s2.cache_miss_en   := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated
1215
1216  // perf cnt
1217  XSPerfAccumulate("s0_in_valid",                  io.ldin.valid)
1218  XSPerfAccumulate("s0_in_block",                  io.ldin.valid && !io.ldin.fire)
1219  XSPerfAccumulate("s0_in_fire_first_issue",       s0_valid && s0_isFirstIssue)
1220  XSPerfAccumulate("s0_lsq_fire_first_issue",      io.replay.fire)
1221  XSPerfAccumulate("s0_ldu_fire_first_issue",      io.ldin.fire && s0_isFirstIssue)
1222  XSPerfAccumulate("s0_fast_replay_issue",         io.fast_rep_in.fire)
1223  XSPerfAccumulate("s0_stall_out",                 s0_valid && !s0_can_go)
1224  XSPerfAccumulate("s0_stall_dcache",              s0_valid && !io.dcache.req.ready)
1225  XSPerfAccumulate("s0_addr_spec_success",         s0_fire && s0_vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12))
1226  XSPerfAccumulate("s0_addr_spec_failed",          s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12))
1227  XSPerfAccumulate("s0_addr_spec_success_once",    s0_fire && s0_vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1228  XSPerfAccumulate("s0_addr_spec_failed_once",     s0_fire && s0_vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_isFirstIssue)
1229  XSPerfAccumulate("s0_forward_tl_d_channel",      s0_out.forward_tlDchannel)
1230  XSPerfAccumulate("s0_hardware_prefetch_fire",    s0_fire && s0_hw_prf_select)
1231  XSPerfAccumulate("s0_software_prefetch_fire",    s0_fire && s0_prf && s0_int_iss_select)
1232  XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select)
1233  XSPerfAccumulate("s0_hardware_prefetch_total",   io.prefetch_req.valid)
1234
1235  XSPerfAccumulate("s1_in_valid",                  s1_valid)
1236  XSPerfAccumulate("s1_in_fire",                   s1_fire)
1237  XSPerfAccumulate("s1_in_fire_first_issue",       s1_fire && s1_in.isFirstIssue)
1238  XSPerfAccumulate("s1_tlb_miss",                  s1_fire && s1_tlb_miss)
1239  XSPerfAccumulate("s1_tlb_miss_first_issue",      s1_fire && s1_tlb_miss && s1_in.isFirstIssue)
1240  XSPerfAccumulate("s1_stall_out",                 s1_valid && !s1_can_go)
1241  XSPerfAccumulate("s1_late_kill",                 s1_valid && s1_fast_rep_kill)
1242
1243  XSPerfAccumulate("s2_in_valid",                  s2_valid)
1244  XSPerfAccumulate("s2_in_fire",                   s2_fire)
1245  XSPerfAccumulate("s2_in_fire_first_issue",       s2_fire && s2_in.isFirstIssue)
1246  XSPerfAccumulate("s2_dcache_miss",               s2_fire && io.dcache.resp.bits.miss)
1247  XSPerfAccumulate("s2_dcache_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1248  XSPerfAccumulate("s2_dcache_real_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1249  XSPerfAccumulate("s2_full_forward",              s2_fire && s2_full_fwd)
1250  XSPerfAccumulate("s2_dcache_miss_full_forward",  s2_fire && s2_dcache_miss)
1251  XSPerfAccumulate("s2_fwd_frm_d_can",             s2_valid && s2_fwd_frm_d_chan)
1252  XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr",    s2_valid && s2_fwd_frm_d_chan_or_mshr)
1253  XSPerfAccumulate("s2_stall_out",                 s2_fire && !s2_can_go)
1254  XSPerfAccumulate("s2_prefetch",                  s2_fire && s2_prf)
1255  XSPerfAccumulate("s2_prefetch_ignored",          s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict
1256  XSPerfAccumulate("s2_prefetch_miss",             s2_fire && s2_prf && io.dcache.resp.bits.miss) // prefetch req miss in l1
1257  XSPerfAccumulate("s2_prefetch_hit",              s2_fire && s2_prf && !io.dcache.resp.bits.miss) // prefetch req hit in l1
1258  XSPerfAccumulate("s2_prefetch_accept",           s2_fire && s2_prf && io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it
1259  XSPerfAccumulate("s2_forward_req",               s2_fire && s2_in.forward_tlDchannel)
1260  XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid)
1261  XSPerfAccumulate("s2_successfully_forward_mshr",      s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid)
1262
1263  XSPerfAccumulate("s3_fwd_frm_d_chan",            s3_valid && s3_fwd_frm_d_chan_valid)
1264
1265  XSPerfAccumulate("load_to_load_forward",                      s1_try_ptr_chasing && !s1_ptr_chasing_canceled)
1266  XSPerfAccumulate("load_to_load_forward_try",                  s1_try_ptr_chasing)
1267  XSPerfAccumulate("load_to_load_forward_fail",                 s1_cancel_ptr_chasing)
1268  XSPerfAccumulate("load_to_load_forward_fail_cancelled",       s1_cancel_ptr_chasing && s1_ptr_chasing_canceled)
1269  XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match)
1270  XSPerfAccumulate("load_to_load_forward_fail_op_not_ld",       s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld)
1271  XSPerfAccumulate("load_to_load_forward_fail_addr_align",      s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned)
1272  XSPerfAccumulate("load_to_load_forward_fail_set_mismatch",    s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch)
1273
1274  // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design
1275  // hardware performance counter
1276  val perfEvents = Seq(
1277    ("load_s0_in_fire         ", s0_fire                                                        ),
1278    ("load_to_load_forward    ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled      ),
1279    ("stall_dcache            ", s0_valid && s0_can_go && !io.dcache.req.ready                  ),
1280    ("load_s1_in_fire         ", s0_fire                                                        ),
1281    ("load_s1_tlb_miss        ", s1_fire && io.tlb.resp.bits.miss                               ),
1282    ("load_s2_in_fire         ", s1_fire                                                        ),
1283    ("load_s2_dcache_miss     ", s2_fire && io.dcache.resp.bits.miss                            ),
1284  )
1285  generatePerfEvent()
1286
1287  when(io.ldout.fire){
1288    XSDebug("ldout %x\n", io.ldout.bits.uop.pc)
1289  }
1290  // end
1291}