xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision d0de7e4a4bcd4633260dda99dfedc2a5e543b8b4)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan.ExceptionNO._
25import xiangshan._
26import xiangshan.backend.fu.PMPRespBundle
27import xiangshan.backend.rob.{DebugLsInfoBundle, LsTopdownInfo, RobPtr}
28import xiangshan.cache._
29import xiangshan.cache.wpu.ReplayCarry
30import xiangshan.cache.mmu._
31import xiangshan.mem.mdp._
32
33class LoadToLsqReplayIO(implicit p: Parameters) extends XSBundle
34  with HasDCacheParameters
35  with HasTlbConst
36{
37  // mshr refill index
38  val mshr_id         = UInt(log2Up(cfg.nMissEntries).W)
39  // get full data from store queue and sbuffer
40  val full_fwd        = Bool()
41  // wait for data from store inst's store queue index
42  val data_inv_sq_idx = new SqPtr
43  // wait for address from store queue index
44  val addr_inv_sq_idx = new SqPtr
45  // replay carry
46  val rep_carry       = new ReplayCarry(nWays)
47  // data in last beat
48  val last_beat       = Bool()
49  // replay cause
50  val cause           = Vec(LoadReplayCauses.allCauses, Bool())
51  // performance debug information
52  val debug           = new PerfDebugInfo
53  // tlb hint
54  val tlb_id          = UInt(log2Up(loadfiltersize).W)
55  val tlb_full        = Bool()
56
57  // alias
58  def mem_amb       = cause(LoadReplayCauses.C_MA)
59  def tlb_miss      = cause(LoadReplayCauses.C_TM)
60  def fwd_fail      = cause(LoadReplayCauses.C_FF)
61  def dcache_rep    = cause(LoadReplayCauses.C_DR)
62  def dcache_miss   = cause(LoadReplayCauses.C_DM)
63  def wpu_fail      = cause(LoadReplayCauses.C_WF)
64  def bank_conflict = cause(LoadReplayCauses.C_BC)
65  def rar_nack      = cause(LoadReplayCauses.C_RAR)
66  def raw_nack      = cause(LoadReplayCauses.C_RAW)
67  def nuke          = cause(LoadReplayCauses.C_NK)
68  def need_rep      = cause.asUInt.orR
69}
70
71
72class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
73  val ldin            = DecoupledIO(new LqWriteBundle)
74  val uncache         = Flipped(DecoupledIO(new ExuOutput))
75  val ld_raw_data     = Input(new LoadDataFromLQBundle)
76  val forward         = new PipeLoadForwardQueryIO
77  val stld_nuke_query = new LoadNukeQueryIO
78  val ldld_nuke_query = new LoadNukeQueryIO
79}
80
81class LoadToLoadIO(implicit p: Parameters) extends XSBundle {
82  val valid      = Bool()
83  val data       = UInt(XLEN.W) // load to load fast path is limited to ld (64 bit) used as vaddr src1 only
84  val dly_ld_err = Bool()
85}
86
87class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle {
88  val tdata2      = Input(UInt(64.W))
89  val matchType   = Input(UInt(2.W))
90  val tEnable     = Input(Bool()) // timing is calculated before this
91  val addrHit     = Output(Bool())
92  val lastDataHit = Output(Bool())
93}
94
95class LoadUnit(implicit p: Parameters) extends XSModule
96  with HasLoadHelper
97  with HasPerfEvents
98  with HasDCacheParameters
99  with HasCircularQueuePtrHelper
100{
101  val io = IO(new Bundle() {
102    // control
103    val redirect      = Flipped(ValidIO(new Redirect))
104    val csrCtrl       = Flipped(new CustomCSRCtrlIO)
105
106    // int issue path
107    val ldin          = Flipped(Decoupled(new ExuInput))
108    val ldout         = Decoupled(new ExuOutput)
109    val rsIdx         = Input(UInt())
110    val isFirstIssue  = Input(Bool())
111
112    // data path
113    val tlb           = new TlbRequestIO(2)
114    val pmp           = Flipped(new PMPRespBundle()) // arrive same to tlb now
115    val dcache        = new DCacheLoadIO
116    val sbuffer       = new LoadForwardQueryIO
117    val lsq           = new LoadToLsqIO
118    val tl_d_channel  = Input(new DcacheToLduForwardIO)
119    val forward_mshr  = Flipped(new LduToMissqueueForwardIO)
120   // val refill        = Flipped(ValidIO(new Refill))
121    val l2_hint       = Input(Valid(new L2ToL1Hint))
122    val tlb_hint      = Flipped(new TlbHintReq)
123    // fast wakeup
124    val fast_uop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1, send to RS in load_s2
125
126    // prefetch
127    val prefetch_train            = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms
128    val prefetch_train_l1         = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride
129    val prefetch_req              = Flipped(ValidIO(new L1PrefetchReq)) // hardware prefetch to l1 cache req
130    val canAcceptLowConfPrefetch  = Output(Bool())
131    val canAcceptHighConfPrefetch = Output(Bool())
132
133    // load to load fast path
134    val l2l_fwd_in    = Input(new LoadToLoadIO)
135    val l2l_fwd_out   = Output(new LoadToLoadIO)
136
137    val ld_fast_match    = Input(Bool())
138    val ld_fast_fuOpType = Input(UInt())
139    val ld_fast_imm      = Input(UInt(12.W))
140
141    // rs feedback
142    val feedback_fast = ValidIO(new RSFeedback) // stage 2
143    val feedback_slow = ValidIO(new RSFeedback) // stage 3
144
145    // load ecc error
146    val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different
147
148    // schedule error query
149    val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO)))
150
151    // queue-based replay
152    val replay       = Flipped(Decoupled(new LsPipelineBundle))
153    val lq_rep_full  = Input(Bool())
154
155    // misc
156    val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch
157
158    // Load fast replay path
159    val fast_rep_in  = Flipped(Decoupled(new LqWriteBundle))
160    val fast_rep_out = Decoupled(new LqWriteBundle)
161
162    // Load RAR rollback
163    val rollback = Valid(new Redirect)
164
165    // perf
166    val debug_ls         = Output(new DebugLsInfoBundle)
167    val lsTopdownInfo    = Output(new LsTopdownInfo)
168    val correctMissTrain = Input(Bool())
169  })
170
171  val s1_ready, s2_ready, s3_ready = WireInit(false.B)
172
173  // Pipeline
174  // --------------------------------------------------------------------------------
175  // stage 0
176  // --------------------------------------------------------------------------------
177  // generate addr, use addr to query DCache and DTLB
178  val s0_valid         = Wire(Bool())
179  val s0_kill          = Wire(Bool())
180  val s0_can_go        = s1_ready
181  val s0_fire          = s0_valid && s0_can_go
182  val s0_out           = Wire(new LqWriteBundle)
183
184  // flow source bundle
185  class FlowSource extends Bundle {
186    val vaddr         = UInt(VAddrBits.W)
187    val mask          = UInt((VLEN/8).W)
188    val uop           = new MicroOp
189    val try_l2l       = Bool()
190    val has_rob_entry = Bool()
191    val rsIdx         = UInt(log2Up(IssQueSize).W)
192    val rep_carry     = new ReplayCarry(nWays)
193    val mshrid        = UInt(log2Up(cfg.nMissEntries).W)
194    val isFirstIssue  = Bool()
195    val fast_rep      = Bool()
196    val ld_rep        = Bool()
197    val l2l_fwd       = Bool()
198    val prf           = Bool()
199    val prf_rd        = Bool()
200    val prf_wr        = Bool()
201    val sched_idx     = UInt(log2Up(LoadQueueReplaySize+1).W)
202  }
203  val s0_sel_src = Wire(new FlowSource)
204
205  // load flow select/gen
206  // src0: super load replayed by LSQ (cache miss replay) (io.replay)
207  // src1: fast load replay (io.fast_rep_in)
208  // src2: load replayed by LSQ (io.replay)
209  // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch)
210  // src4: int read / software prefetch first issue from RS (io.in)
211  // src5: vec read first issue from RS (TODO)
212  // src6: load try pointchaising when no issued or replayed load (io.fastpath)
213  // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch)
214  // priority: high to low
215  val s0_rep_stall           = io.ldin.valid && isAfter(io.replay.bits.uop.robIdx, io.ldin.bits.uop.robIdx)
216  val s0_super_ld_rep_valid  = io.replay.valid && io.replay.bits.forward_tlDchannel
217  val s0_ld_fast_rep_valid   = io.fast_rep_in.valid
218  val s0_ld_rep_valid        = io.replay.valid && !io.replay.bits.forward_tlDchannel && !s0_rep_stall
219  val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U
220  val s0_int_iss_valid       = io.ldin.valid // int flow first issue or software prefetch
221  val s0_vec_iss_valid       = WireInit(false.B) // TODO
222  val s0_l2l_fwd_valid       = io.l2l_fwd_in.valid
223  val s0_low_conf_prf_valid  = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U
224  dontTouch(s0_super_ld_rep_valid)
225  dontTouch(s0_ld_fast_rep_valid)
226  dontTouch(s0_ld_rep_valid)
227  dontTouch(s0_high_conf_prf_valid)
228  dontTouch(s0_int_iss_valid)
229  dontTouch(s0_vec_iss_valid)
230  dontTouch(s0_l2l_fwd_valid)
231  dontTouch(s0_low_conf_prf_valid)
232
233  // load flow source ready
234  val s0_super_ld_rep_ready  = WireInit(true.B)
235  val s0_ld_fast_rep_ready   = !s0_super_ld_rep_valid
236  val s0_ld_rep_ready        = !s0_super_ld_rep_valid &&
237                               !s0_ld_fast_rep_valid
238  val s0_high_conf_prf_ready = !s0_super_ld_rep_valid &&
239                               !s0_ld_fast_rep_valid &&
240                               !s0_ld_rep_valid
241
242  val s0_int_iss_ready       = !s0_super_ld_rep_valid &&
243                               !s0_ld_fast_rep_valid &&
244                               !s0_ld_rep_valid &&
245                               !s0_high_conf_prf_valid
246
247  val s0_vec_iss_ready       = !s0_super_ld_rep_valid &&
248                               !s0_ld_fast_rep_valid &&
249                               !s0_ld_rep_valid &&
250                               !s0_high_conf_prf_valid &&
251                               !s0_int_iss_valid
252
253  val s0_l2l_fwd_ready       = !s0_super_ld_rep_valid &&
254                               !s0_ld_fast_rep_valid &&
255                               !s0_ld_rep_valid &&
256                               !s0_high_conf_prf_valid &&
257                               !s0_int_iss_valid &&
258                               !s0_vec_iss_valid
259
260  val s0_low_conf_prf_ready  = !s0_super_ld_rep_valid &&
261                               !s0_ld_fast_rep_valid &&
262                               !s0_ld_rep_valid &&
263                               !s0_high_conf_prf_valid &&
264                               !s0_int_iss_valid &&
265                               !s0_vec_iss_valid &&
266                               !s0_l2l_fwd_valid
267  dontTouch(s0_super_ld_rep_ready)
268  dontTouch(s0_ld_fast_rep_ready)
269  dontTouch(s0_ld_rep_ready)
270  dontTouch(s0_high_conf_prf_ready)
271  dontTouch(s0_int_iss_ready)
272  dontTouch(s0_vec_iss_ready)
273  dontTouch(s0_l2l_fwd_ready)
274  dontTouch(s0_low_conf_prf_ready)
275
276  // load flow source select (OH)
277  val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready
278  val s0_ld_fast_rep_select  = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready
279  val s0_ld_rep_select       = s0_ld_rep_valid && s0_ld_rep_ready
280  val s0_hw_prf_select       = s0_high_conf_prf_ready && s0_high_conf_prf_valid ||
281                               s0_low_conf_prf_ready && s0_low_conf_prf_valid
282  val s0_int_iss_select      = s0_int_iss_ready && s0_int_iss_valid
283  val s0_vec_iss_select      = s0_vec_iss_ready && s0_vec_iss_valid
284  val s0_l2l_fwd_select      = s0_l2l_fwd_ready && s0_l2l_fwd_valid
285  assert(!s0_vec_iss_select) // to be added
286  dontTouch(s0_super_ld_rep_select)
287  dontTouch(s0_ld_fast_rep_select)
288  dontTouch(s0_ld_rep_select)
289  dontTouch(s0_hw_prf_select)
290  dontTouch(s0_int_iss_select)
291  dontTouch(s0_vec_iss_select)
292  dontTouch(s0_l2l_fwd_select)
293
294  s0_valid := (s0_super_ld_rep_valid ||
295               s0_ld_fast_rep_valid ||
296               s0_ld_rep_valid ||
297               s0_high_conf_prf_valid ||
298               s0_int_iss_valid ||
299               s0_vec_iss_valid ||
300               s0_l2l_fwd_valid ||
301               s0_low_conf_prf_valid) && io.dcache.req.ready && !s0_kill
302
303  // which is S0's out is ready and dcache is ready
304  val s0_try_ptr_chasing      = s0_l2l_fwd_select
305  val s0_do_try_ptr_chasing   = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready
306  val s0_ptr_chasing_vaddr    = io.l2l_fwd_in.data(5, 0) +& io.ld_fast_imm(5, 0)
307  val s0_ptr_chasing_canceled = WireInit(false.B)
308  s0_kill := s0_ptr_chasing_canceled
309
310  // prefetch related ctrl signal
311  io.canAcceptLowConfPrefetch  := s0_low_conf_prf_ready
312  io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready
313  val isHlv = WireInit(LSUOpType.isHlv(s0_uop.ctrl.fuOpType))
314  val isHlvx = WireInit(LSUOpType.isHlvx(s0_uop.ctrl.fuOpType))
315
316  // query DTLB
317  io.tlb.req.valid                   := s0_valid
318  io.tlb.req.bits.cmd                := Mux(s0_sel_src.prf,
319                                         Mux(s0_sel_src.prf_wr, TlbCmd.write, TlbCmd.read),
320                                         TlbCmd.read
321                                       )
322  io.tlb.req.bits.vaddr              := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_sel_src.vaddr)
323  io.tlb.req.bits.size               := LSUOpType.size(s0_sel_src.uop.ctrl.fuOpType)
324  io.tlb.req.bits.kill               := s0_kill
325  io.tlb.req.bits.memidx.is_ld       := true.B
326  io.tlb.req.bits.memidx.is_st       := false.B
327  io.tlb.req.bits.memidx.idx         := s0_sel_src.uop.lqIdx.value
328  io.tlb.req.bits.debug.robIdx       := s0_sel_src.uop.robIdx
329  io.tlb.req.bits.no_translate       := s0_hw_prf_select  // hw b.reqetch addr does not need to be translated
330  io.tlb.req.bits.debug.pc           := s0_sel_src.uop.cf.pc
331  io.tlb.req.bits.debug.isFirstIssue := s0_sel_src.isFirstIssue
332
333  // query DCache
334  io.dcache.req.valid             := s0_valid
335  io.dcache.req.bits.cmd          := Mux(s0_sel_src.prf_rd,
336                                      MemoryOpConstants.M_PFR,
337                                      Mux(s0_sel_src.prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD)
338                                    )
339  io.dcache.req.bits.vaddr        := s0_sel_src.vaddr
340  io.dcache.req.bits.mask         := s0_sel_src.mask
341  io.dcache.req.bits.data         := DontCare
342  io.dcache.req.bits.isFirstIssue := s0_sel_src.isFirstIssue
343  io.dcache.req.bits.instrtype    := Mux(s0_sel_src.prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U)
344  io.dcache.req.bits.debug_robIdx := s0_sel_src.uop.robIdx.value
345  io.dcache.req.bits.replayCarry  := s0_sel_src.rep_carry
346  io.dcache.req.bits.id           := DontCare // TODO: update cache meta
347  io.dcache.pf_source             := Mux(s0_hw_prf_select, io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL)
348  io.dcache.req.bits.lqIdx        := s0_sel_src.uop.lqIdx
349  // load flow priority mux
350  def fromNullSource(): FlowSource = {
351    val out = WireInit(0.U.asTypeOf(new FlowSource))
352    out
353  }
354
355  def fromFastReplaySource(src: LqWriteBundle): FlowSource = {
356    val out = WireInit(0.U.asTypeOf(new FlowSource))
357    out.vaddr         := src.vaddr
358    out.mask          := src.mask
359    out.uop           := src.uop
360    out.try_l2l       := false.B
361    out.has_rob_entry := src.hasROBEntry
362    out.rep_carry     := src.rep_info.rep_carry
363    out.mshrid        := src.rep_info.mshr_id
364    out.rsIdx         := src.rsIdx
365    out.isFirstIssue  := false.B
366    out.fast_rep      := true.B
367    out.ld_rep        := src.isLoadReplay
368    out.l2l_fwd       := false.B
369    out.prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
370    out.prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
371    out.prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
372    out.sched_idx     := src.schedIndex
373    out
374  }
375
376  def fromNormalReplaySource(src: LsPipelineBundle): FlowSource = {
377    val out = WireInit(0.U.asTypeOf(new FlowSource))
378    out.vaddr         := src.vaddr
379    out.mask          := genVWmask(src.vaddr, src.uop.ctrl.fuOpType(1, 0))
380    out.uop           := src.uop
381    out.try_l2l       := false.B
382    out.has_rob_entry := true.B
383    out.rsIdx         := src.rsIdx
384    out.rep_carry     := src.replayCarry
385    out.mshrid        := src.mshrid
386    out.isFirstIssue  := false.B
387    out.fast_rep      := false.B
388    out.ld_rep        := true.B
389    out.l2l_fwd       := false.B
390    out.prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
391    out.prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
392    out.prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
393    out.sched_idx     := src.schedIndex
394    out
395  }
396
397  def fromPrefetchSource(src: L1PrefetchReq): FlowSource = {
398    val out = WireInit(0.U.asTypeOf(new FlowSource))
399    out.vaddr         := src.getVaddr()
400    out.mask          := 0.U
401    out.uop           := DontCare
402    out.try_l2l       := false.B
403    out.has_rob_entry := false.B
404    out.rsIdx         := 0.U
405    out.rep_carry     := 0.U.asTypeOf(out.rep_carry.cloneType)
406    out.mshrid        := 0.U
407    out.isFirstIssue  := false.B
408    out.fast_rep      := false.B
409    out.ld_rep        := false.B
410    out.l2l_fwd       := false.B
411    out.prf           := true.B
412    out.prf_rd        := !src.is_store
413    out.prf_wr        := src.is_store
414    out.sched_idx     := 0.U
415    out
416  }
417
418  def fromIntIssueSource(src: ExuInput): FlowSource = {
419    val out = WireInit(0.U.asTypeOf(new FlowSource))
420    out.vaddr         := src.src(0) + SignExt(src.uop.ctrl.imm(11, 0), VAddrBits)
421    out.mask          := genVWmask(out.vaddr, src.uop.ctrl.fuOpType(1,0))
422    out.uop           := src.uop
423    out.try_l2l       := false.B
424    out.has_rob_entry := true.B
425    out.rsIdx         := io.rsIdx
426    out.rep_carry     := 0.U.asTypeOf(out.rep_carry.cloneType)
427    out.mshrid        := 0.U
428    out.isFirstIssue  := true.B
429    out.fast_rep      := false.B
430    out.ld_rep        := false.B
431    out.l2l_fwd       := false.B
432    out.prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
433    out.prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
434    out.prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
435    out.sched_idx     := 0.U
436    out
437  }
438
439  def fromVecIssueSource(): FlowSource = {
440    val out = WireInit(0.U.asTypeOf(new FlowSource))
441    out.vaddr         := 0.U
442    out.mask          := 0.U
443    out.uop           := 0.U.asTypeOf(new MicroOp)
444    out.try_l2l       := false.B
445    out.has_rob_entry := false.B
446    out.rsIdx         := 0.U
447    out.rep_carry     := 0.U.asTypeOf(out.rep_carry.cloneType)
448    out.mshrid        := 0.U
449    out.isFirstIssue  := false.B
450    out.fast_rep      := false.B
451    out.ld_rep        := false.B
452    out.l2l_fwd       := false.B
453    out.prf           := false.B
454    out.prf_rd        := false.B
455    out.prf_wr        := false.B
456    out.sched_idx     := 0.U
457    out
458  }
459
460  def fromLoadToLoadSource(src: LoadToLoadIO): FlowSource = {
461    val out = WireInit(0.U.asTypeOf(new FlowSource))
462    out.vaddr              := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0))
463    out.mask               := genVWmask(0.U, LSUOpType.ld)
464    // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding.
465    // Assume the pointer chasing is always ld.
466    out.uop.ctrl.fuOpType  := LSUOpType.ld
467    out.try_l2l            := true.B
468    // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx in S0 when trying pointchasing
469    // because these signals will be updated in S1
470    out.has_rob_entry      := false.B
471    out.rsIdx              := 0.U
472    out.mshrid             := 0.U
473    out.rep_carry          := 0.U.asTypeOf(out.rep_carry.cloneType)
474    out.isFirstIssue       := true.B
475    out.fast_rep           := false.B
476    out.ld_rep             := false.B
477    out.l2l_fwd            := true.B
478    out.prf                := false.B
479    out.prf_rd             := false.B
480    out.prf_wr             := false.B
481    out.sched_idx          := 0.U
482    out
483  }
484
485  // set default
486  val s0_src_selector = Seq(
487    s0_super_ld_rep_select,
488    s0_ld_fast_rep_select,
489    s0_ld_rep_select,
490    s0_hw_prf_select,
491    s0_int_iss_select,
492    s0_vec_iss_select,
493    (if (EnableLoadToLoadForward) s0_l2l_fwd_select else true.B)
494  )
495  val s0_src_format = Seq(
496    fromNormalReplaySource(io.replay.bits),
497    fromFastReplaySource(io.fast_rep_in.bits),
498    fromNormalReplaySource(io.replay.bits),
499    fromPrefetchSource(io.prefetch_req.bits),
500    fromIntIssueSource(io.ldin.bits),
501    fromVecIssueSource(),
502    (if (EnableLoadToLoadForward) fromLoadToLoadSource(io.l2l_fwd_in) else fromNullSource())
503  )
504  s0_sel_src := ParallelPriorityMux(s0_src_selector, s0_src_format)
505
506  // address align check
507  val s0_addr_aligned = LookupTree(s0_sel_src.uop.ctrl.fuOpType(1, 0), List(
508    "b00".U   -> true.B,                   //b
509    "b01".U   -> (s0_sel_src.vaddr(0)    === 0.U), //h
510    "b10".U   -> (s0_sel_src.vaddr(1, 0) === 0.U), //w
511    "b11".U   -> (s0_sel_src.vaddr(2, 0) === 0.U)  //d
512  ))
513
514  // accept load flow if dcache ready (tlb is always ready)
515  // TODO: prefetch need writeback to loadQueueFlag
516  s0_out               := DontCare
517  s0_out.rsIdx         := s0_sel_src.rsIdx
518  s0_out.vaddr         := s0_sel_src.vaddr
519  s0_out.mask          := s0_sel_src.mask
520  s0_out.uop           := s0_sel_src.uop
521  s0_out.isFirstIssue  := s0_sel_src.isFirstIssue
522  s0_out.hasROBEntry   := s0_sel_src.has_rob_entry
523  s0_out.isPrefetch    := s0_sel_src.prf
524  s0_out.isHWPrefetch  := s0_hw_prf_select
525  s0_out.isFastReplay  := s0_sel_src.fast_rep
526  s0_out.isLoadReplay  := s0_sel_src.ld_rep
527  s0_out.isFastPath    := s0_sel_src.l2l_fwd
528  s0_out.mshrid        := s0_sel_src.mshrid
529  s0_out.uop.cf.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned
530  s0_out.forward_tlDchannel := s0_super_ld_rep_select
531  when(io.tlb.req.valid && s0_sel_src.isFirstIssue) {
532    s0_out.uop.debugInfo.tlbFirstReqTime := GTimer()
533  }.otherwise{
534    s0_out.uop.debugInfo.tlbFirstReqTime := s0_sel_src.uop.debugInfo.tlbFirstReqTime
535  }
536  s0_out.schedIndex     := s0_sel_src.sched_idx
537
538  // load fast replay
539  io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready)
540
541  // load flow source ready
542  // cache missed load has highest priority
543  // always accept cache missed load flow from load replay queue
544  io.replay.ready := (s0_can_go && io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select))
545
546  // accept load flow from rs when:
547  // 1) there is no lsq-replayed load
548  // 2) there is no fast replayed load
549  // 3) there is no high confidence prefetch request
550  io.ldin.ready := (s0_can_go && io.dcache.req.ready && s0_int_iss_ready)
551
552  // for hw prefetch load flow feedback, to be added later
553  // io.prefetch_in.ready := s0_hw_prf_select
554
555  // dcache replacement extra info
556  // TODO: should prefetch load update replacement?
557  io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.replay.bits.replacementUpdated, false.B)
558
559  XSDebug(io.dcache.req.fire,
560    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_sel_src.uop.cf.pc)}, vaddr ${Hexadecimal(s0_sel_src.vaddr)}\n"
561  )
562  XSDebug(s0_valid,
563    p"S0: pc ${Hexadecimal(s0_out.uop.cf.pc)}, lId ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " +
564    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
565
566  // Pipeline
567  // --------------------------------------------------------------------------------
568  // stage 1
569  // --------------------------------------------------------------------------------
570  // TLB resp (send paddr to dcache)
571  val s1_valid      = RegInit(false.B)
572  val s1_in         = Wire(new LqWriteBundle)
573  val s1_out        = Wire(new LqWriteBundle)
574  val s1_kill       = Wire(Bool())
575  val s1_can_go     = s2_ready
576  val s1_fire       = s1_valid && !s1_kill && s1_can_go
577
578  s1_ready := !s1_valid || s1_kill || s2_ready
579  when (s0_fire) { s1_valid := true.B }
580  .elsewhen (s1_fire) { s1_valid := false.B }
581  .elsewhen (s1_kill) { s1_valid := false.B }
582  s1_in   := RegEnable(s0_out, s0_fire)
583
584  val s1_fast_rep_dly_kill = RegNext(io.fast_rep_in.bits.lateKill) && s1_in.isFastReplay
585  val s1_fast_rep_dly_err =  RegNext(io.fast_rep_in.bits.delayedLoadError) && s1_in.isFastReplay
586  val s1_l2l_fwd_dly_err  = RegNext(io.l2l_fwd_in.dly_ld_err) && s1_in.isFastPath
587  val s1_dly_err          = s1_fast_rep_dly_err || s1_l2l_fwd_dly_err
588  val s1_vaddr_hi         = Wire(UInt())
589  val s1_vaddr_lo         = Wire(UInt())
590  val s1_vaddr            = Wire(UInt())
591  val s1_paddr_dup_lsu    = Wire(UInt())
592  val s1_paddr_dup_dcache = Wire(UInt())
593  val s1_exception        = ExceptionNO.selectByFu(s1_out.uop.cf.exceptionVec, lduCfg).asUInt.orR   // af & pf exception were modified below.
594  val s1_tlb_miss         = io.tlb.resp.bits.miss
595  val s1_prf              = s1_in.isPrefetch
596  val s1_hw_prf           = s1_in.isHWPrefetch
597  val s1_sw_prf           = s1_prf && !s1_hw_prf
598  val s1_tlb_memidx       = io.tlb.resp.bits.memidx
599
600  s1_vaddr_hi         := s1_in.vaddr(VAddrBits - 1, 6)
601  s1_vaddr_lo         := s1_in.vaddr(5, 0)
602  s1_vaddr            := Cat(s1_vaddr_hi, s1_vaddr_lo)
603  s1_paddr_dup_lsu    := io.tlb.resp.bits.paddr(0)
604  s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1)
605
606  when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_in.uop.lqIdx.value) {
607    // printf("load idx = %d\n", s1_tlb_memidx.idx)
608    s1_out.uop.debugInfo.tlbRespTime := GTimer()
609  }
610
611  io.tlb.req_kill   := s1_kill || s1_dly_err
612  io.tlb.resp.ready := true.B
613
614  io.dcache.s1_paddr_dup_lsu    <> s1_paddr_dup_lsu
615  io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache
616  io.dcache.s1_kill             := s1_kill || s1_dly_err || s1_tlb_miss || s1_exception
617
618  // store to load forwarding
619  io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf)
620  io.sbuffer.vaddr := s1_vaddr
621  io.sbuffer.paddr := s1_paddr_dup_lsu
622  io.sbuffer.gpaddr := s1_gpaddr_dup_lsu
623  io.sbuffer.uop   := s1_in.uop
624  io.sbuffer.sqIdx := s1_in.uop.sqIdx
625  io.sbuffer.mask  := s1_in.mask
626  io.sbuffer.pc    := s1_in.uop.cf.pc // FIXME: remove it
627
628  io.lsq.forward.valid     := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf)
629  io.lsq.forward.vaddr     := s1_vaddr
630  io.lsq.forward.paddr     := s1_paddr_dup_lsu
631  io.lsq.gpaddr := s1_gpaddr_dup_lsu
632  io.lsq.forward.uop       := s1_in.uop
633  io.lsq.forward.sqIdx     := s1_in.uop.sqIdx
634  io.lsq.forward.sqIdxMask := 0.U
635  io.lsq.forward.mask      := s1_in.mask
636  io.lsq.forward.pc        := s1_in.uop.cf.pc // FIXME: remove it
637
638  // st-ld violation query
639  val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => {
640                       io.stld_nuke_query(w).valid && // query valid
641                       isAfter(s1_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
642                       // TODO: Fix me when vector instruction
643                       (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
644                       (s1_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
645                      })).asUInt.orR && !s1_tlb_miss
646
647  s1_out                   := s1_in
648  s1_out.vaddr             := s1_vaddr
649  s1_out.paddr             := s1_paddr_dup_lsu
650  s1_out.tlbMiss           := s1_tlb_miss
651  s1_out.ptwBack           := io.tlb.resp.bits.ptwBack
652  s1_out.rsIdx             := s1_in.rsIdx
653  s1_out.rep_info.debug    := s1_in.uop.debugInfo
654  s1_out.rep_info.nuke     := s1_nuke && !s1_sw_prf
655  s1_out.delayedLoadError  := s1_dly_err
656
657  when (!s1_dly_err) {
658    // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
659    // af & pf exception were modified
660    s1_out.uop.cf.exceptionVec(loadPageFault)   := io.tlb.resp.bits.excp(0).pf.ld && !s1_tlb_miss
661    s1_out.uop.cf.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld && !s1_tlb_miss
662  } .otherwise {
663    s1_out.uop.cf.exceptionVec(loadPageFault)      := false.B
664    s1_out.uop.cf.exceptionVec(loadAddrMisaligned) := false.B
665    s1_out.uop.cf.exceptionVec(loadAccessFault)    := s1_dly_err
666  }
667
668  // pointer chasing
669  val s1_try_ptr_chasing       = RegNext(s0_do_try_ptr_chasing, false.B)
670  val s1_ptr_chasing_vaddr     = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing)
671  val s1_fu_op_type_not_ld     = WireInit(false.B)
672  val s1_not_fast_match        = WireInit(false.B)
673  val s1_addr_mismatch         = WireInit(false.B)
674  val s1_addr_misaligned       = WireInit(false.B)
675  val s1_fast_mismatch         = WireInit(false.B)
676  val s1_ptr_chasing_canceled  = WireInit(false.B)
677  val s1_cancel_ptr_chasing    = WireInit(false.B)
678
679  s1_kill := s1_fast_rep_dly_kill ||
680             s1_cancel_ptr_chasing ||
681             s1_in.uop.robIdx.needFlush(io.redirect) ||
682            (s1_in.uop.robIdx.needFlush(RegNext(io.redirect)) && !RegNext(s0_try_ptr_chasing)) ||
683             RegEnable(s0_kill, false.B, io.ldin.valid || io.replay.valid || io.l2l_fwd_in.valid || io.fast_rep_in.valid)
684
685  if (EnableLoadToLoadForward) {
686    // Sometimes, we need to cancel the load-load forwarding.
687    // These can be put at S0 if timing is bad at S1.
688    // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow)
689    s1_addr_mismatch     := s1_ptr_chasing_vaddr(6) ||
690                             RegEnable(io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing)
691    // Case 1: the address is not 64-bit aligned or the fuOpType is not LD
692    s1_addr_misaligned := s1_ptr_chasing_vaddr(2, 0).orR
693    s1_fu_op_type_not_ld := io.ldin.bits.uop.ctrl.fuOpType =/= LSUOpType.ld
694    // Case 2: this load-load uop is cancelled
695    s1_ptr_chasing_canceled := !io.ldin.valid
696    // Case 3: fast mismatch
697    s1_fast_mismatch := RegEnable(!io.ld_fast_match, s0_do_try_ptr_chasing)
698
699    when (s1_try_ptr_chasing) {
700      s1_cancel_ptr_chasing := s1_addr_mismatch ||
701                               s1_addr_misaligned ||
702                               s1_fu_op_type_not_ld ||
703                               s1_ptr_chasing_canceled ||
704                               s1_fast_mismatch
705
706      s1_in.uop           := io.ldin.bits.uop
707      s1_in.rsIdx         := io.rsIdx
708      s1_in.isFirstIssue  := io.isFirstIssue
709      s1_vaddr_lo         := s1_ptr_chasing_vaddr(5, 0)
710      s1_paddr_dup_lsu    := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
711      s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
712
713      // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb)
714      s1_in.uop.debugInfo.tlbFirstReqTime := GTimer()
715      s1_in.uop.debugInfo.tlbRespTime     := GTimer()
716    }
717    when (!s1_cancel_ptr_chasing) {
718      s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.replay.fire && !io.fast_rep_in.fire
719      when (s1_try_ptr_chasing) {
720        io.ldin.ready := true.B
721      }
722    }
723  }
724
725  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
726  val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize))
727  // to enable load-load, sqIdxMask must be calculated based on ldin.uop
728  // If the timing here is not OK, load-load forwarding has to be disabled.
729  // Or we calculate sqIdxMask at RS??
730  io.lsq.forward.sqIdxMask := s1_sqIdx_mask
731  if (EnableLoadToLoadForward) {
732    when (s1_try_ptr_chasing) {
733      io.lsq.forward.sqIdxMask := UIntToMask(io.ldin.bits.uop.sqIdx.value, StoreQueueSize)
734    }
735  }
736
737  io.forward_mshr.valid  := s1_valid && s1_out.forward_tlDchannel
738  io.forward_mshr.mshrid := s1_out.mshrid
739  io.forward_mshr.paddr  := s1_out.paddr
740
741  XSDebug(s1_valid,
742    p"S1: pc ${Hexadecimal(s1_out.uop.cf.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
743    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
744
745  // Pipeline
746  // --------------------------------------------------------------------------------
747  // stage 2
748  // --------------------------------------------------------------------------------
749  // s2: DCache resp
750  val s2_valid  = RegInit(false.B)
751  val s2_in     = Wire(new LqWriteBundle)
752  val s2_out    = Wire(new LqWriteBundle)
753  val s2_kill   = Wire(Bool())
754  val s2_can_go = s3_ready
755  val s2_fire   = s2_valid && !s2_kill && s2_can_go
756
757  s2_kill := s2_in.uop.robIdx.needFlush(io.redirect)
758  s2_ready := !s2_valid || s2_kill || s3_ready
759  when (s1_fire) { s2_valid := true.B }
760  .elsewhen (s2_fire) { s2_valid := false.B }
761  .elsewhen (s2_kill) { s2_valid := false.B }
762  s2_in := RegEnable(s1_out, s1_fire)
763
764  val s2_pmp = WireInit(io.pmp)
765
766  val s2_prf    = s2_in.isPrefetch
767  val s2_hw_prf = s2_in.isHWPrefetch
768
769  // exception that may cause load addr to be invalid / illegal
770  // if such exception happen, that inst and its exception info
771  // will be force writebacked to rob
772  val s2_exception_vec = WireInit(s2_in.uop.cf.exceptionVec)
773  when (!s2_in.delayedLoadError) {
774    s2_exception_vec(loadAccessFault) := s2_in.uop.cf.exceptionVec(loadAccessFault) || s2_pmp.ld ||
775                                       (io.dcache.resp.bits.tag_error && RegNext(io.csrCtrl.cache_error_enable))
776  }
777
778  // soft prefetch will not trigger any exception (but ecc error interrupt may
779  // be triggered)
780  when (!s2_in.delayedLoadError && (s2_prf || s2_in.tlbMiss)) {
781    s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
782  }
783  val s2_exception = ExceptionNO.selectByFu(s2_exception_vec, lduCfg).asUInt.orR
784
785  val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr)
786  val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.forward_mshr.forward()
787  val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr)
788
789  // writeback access fault caused by ecc error / bus error
790  // * ecc data error is slow to generate, so we will not use it until load stage 3
791  // * in load stage 3, an extra signal io.load_error will be used to
792  val s2_actually_mmio = s2_pmp.mmio
793  val s2_mmio          = !s2_prf &&
794                          s2_actually_mmio &&
795                         !s2_exception &&
796                         !s2_in.tlbMiss
797
798  val s2_full_fwd      = Wire(Bool())
799  val s2_mem_amb       = s2_in.uop.cf.storeSetHit &&
800                         io.lsq.forward.addrInvalid
801
802  val s2_tlb_miss      = s2_in.tlbMiss
803  val s2_fwd_fail      = io.lsq.forward.dataInvalid
804  val s2_dcache_miss   = io.dcache.resp.bits.miss &&
805                         !s2_fwd_frm_d_chan_or_mshr &&
806                         !s2_full_fwd
807
808  val s2_mq_nack       = io.dcache.s2_mq_nack &&
809                         !s2_fwd_frm_d_chan_or_mshr &&
810                         !s2_full_fwd
811
812  val s2_bank_conflict = io.dcache.s2_bank_conflict &&
813                         !s2_fwd_frm_d_chan_or_mshr &&
814                         !s2_full_fwd
815
816  val s2_wpu_pred_fail = io.dcache.s2_wpu_pred_fail &&
817                        !s2_fwd_frm_d_chan_or_mshr &&
818                        !s2_full_fwd
819
820  val s2_rar_nack      = io.lsq.ldld_nuke_query.req.valid &&
821                         !io.lsq.ldld_nuke_query.req.ready
822
823  val s2_raw_nack      = io.lsq.stld_nuke_query.req.valid &&
824                         !io.lsq.stld_nuke_query.req.ready
825  // st-ld violation query
826  //  NeedFastRecovery Valid when
827  //  1. Fast recovery query request Valid.
828  //  2. Load instruction is younger than requestors(store instructions).
829  //  3. Physical address match.
830  //  4. Data contains.
831  val s2_nuke          = VecInit((0 until StorePipelineWidth).map(w => {
832                          io.stld_nuke_query(w).valid && // query valid
833                          isAfter(s2_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
834                          // TODO: Fix me when vector instruction
835                          (s2_in.paddr(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
836                          (s2_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
837                        })).asUInt.orR && !s2_tlb_miss || s2_in.rep_info.nuke
838
839  val s2_cache_handled   = io.dcache.resp.bits.handled
840  val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) &&
841                           io.dcache.resp.bits.tag_error
842
843  val s2_troublem        = !s2_exception &&
844                           !s2_mmio &&
845                           !s2_prf &&
846                           !s2_in.delayedLoadError
847
848  io.dcache.resp.ready  := true.B
849  val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_in.delayedLoadError || s2_mmio || s2_prf)
850  assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost")
851
852  // fast replay require
853  val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail))
854  val s2_nuke_fast_rep   = !s2_mq_nack &&
855                           !s2_dcache_miss &&
856                           !s2_bank_conflict &&
857                           !s2_wpu_pred_fail &&
858                           !s2_rar_nack &&
859                           !s2_raw_nack &&
860                           s2_nuke
861
862  val s2_fast_rep = !s2_mem_amb &&
863                    !s2_tlb_miss &&
864                    !s2_fwd_fail &&
865                    (s2_dcache_fast_rep || s2_nuke_fast_rep) &&
866                    s2_troublem
867
868  // need allocate new entry
869  val s2_can_query = !s2_mem_amb &&
870                     !s2_tlb_miss &&
871                     !s2_fwd_fail &&
872                     s2_troublem
873
874  val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error)
875
876  // ld-ld violation require
877  io.lsq.ldld_nuke_query.req.valid           := s2_valid && s2_can_query
878  io.lsq.ldld_nuke_query.req.bits.uop        := s2_in.uop
879  io.lsq.ldld_nuke_query.req.bits.mask       := s2_in.mask
880  io.lsq.ldld_nuke_query.req.bits.paddr      := s2_in.paddr
881  io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
882
883  // st-ld violation require
884  io.lsq.stld_nuke_query.req.valid           := s2_valid && s2_can_query
885  io.lsq.stld_nuke_query.req.bits.uop        := s2_in.uop
886  io.lsq.stld_nuke_query.req.bits.mask       := s2_in.mask
887  io.lsq.stld_nuke_query.req.bits.paddr      := s2_in.paddr
888  io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
889
890  // merge forward result
891  // lsq has higher priority than sbuffer
892  val s2_fwd_mask = Wire(Vec((VLEN/8), Bool()))
893  val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W)))
894  s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.lsq.forward.dataInvalid
895  // generate XLEN/8 Muxs
896  for (i <- 0 until VLEN / 8) {
897    s2_fwd_mask(i) := io.lsq.forward.forwardMask(i) || io.sbuffer.forwardMask(i)
898    s2_fwd_data(i) := Mux(io.lsq.forward.forwardMask(i), io.lsq.forward.forwardData(i), io.sbuffer.forwardData(i))
899  }
900
901  XSDebug(s2_fire, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
902    s2_in.uop.cf.pc,
903    io.lsq.forward.forwardData.asUInt, io.lsq.forward.forwardMask.asUInt,
904    s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt
905  )
906
907  //
908  s2_out                     := s2_in
909  s2_out.data                := 0.U // data will be generated in load s3
910  s2_out.uop.ctrl.fpWen      := s2_in.uop.ctrl.fpWen && !s2_exception
911  s2_out.mmio                := s2_mmio
912  s2_out.uop.ctrl.flushPipe  := false.B
913  s2_out.uop.cf.exceptionVec := s2_exception_vec
914  s2_out.forwardMask         := s2_fwd_mask
915  s2_out.forwardData         := s2_fwd_data
916  s2_out.handledByMSHR       := s2_cache_handled
917  s2_out.miss                := s2_dcache_miss && s2_troublem
918  s2_out.feedbacked          := io.feedback_fast.valid
919
920  // Generate replay signal caused by:
921  // * st-ld violation check
922  // * tlb miss
923  // * dcache replay
924  // * forward data invalid
925  // * dcache miss
926  s2_out.rep_info.mem_amb         := s2_mem_amb && s2_troublem
927  s2_out.rep_info.tlb_miss        := s2_tlb_miss && s2_troublem
928  s2_out.rep_info.fwd_fail        := s2_fwd_fail && s2_troublem
929  s2_out.rep_info.dcache_rep      := s2_mq_nack && s2_troublem
930  s2_out.rep_info.dcache_miss     := s2_dcache_miss && s2_troublem
931  s2_out.rep_info.bank_conflict   := s2_bank_conflict && s2_troublem
932  s2_out.rep_info.wpu_fail        := s2_wpu_pred_fail && s2_troublem
933  s2_out.rep_info.rar_nack        := s2_rar_nack && s2_troublem
934  s2_out.rep_info.raw_nack        := s2_raw_nack && s2_troublem
935  s2_out.rep_info.nuke            := s2_nuke && s2_troublem
936  s2_out.rep_info.full_fwd        := s2_data_fwded
937  s2_out.rep_info.data_inv_sq_idx := io.lsq.forward.dataInvalidSqIdx
938  s2_out.rep_info.addr_inv_sq_idx := io.lsq.forward.addrInvalidSqIdx
939  s2_out.rep_info.rep_carry       := io.dcache.resp.bits.replayCarry
940  s2_out.rep_info.mshr_id         := io.dcache.resp.bits.mshr_id
941  s2_out.rep_info.last_beat       := s2_in.paddr(log2Up(refillBytes))
942  s2_out.rep_info.debug           := s2_in.uop.debugInfo
943  s2_out.rep_info.tlb_id          := io.tlb_hint.id
944  s2_out.rep_info.tlb_full        := io.tlb_hint.full
945
946  // if forward fail, replay this inst from fetch
947  val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss
948  // if ld-ld violation is detected, replay from this inst from fetch
949  val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss
950  // io.out.bits.uop.ctrl.replayInst := false.B
951
952  // to be removed
953  io.feedback_fast.valid                 := false.B
954  io.feedback_fast.bits.hit              := false.B
955  io.feedback_fast.bits.flushState       := s2_in.ptwBack
956  io.feedback_fast.bits.rsIdx            := s2_in.rsIdx
957  io.feedback_fast.bits.sourceType       := RSFeedbackType.lrqFull
958  io.feedback_fast.bits.dataInvalidSqIdx := DontCare
959
960  // fast wakeup
961  io.fast_uop.valid := RegNext(
962    !io.dcache.s1_disable_fast_wakeup &&
963    s1_valid &&
964    !s1_kill &&
965    !io.tlb.resp.bits.miss &&
966    !io.lsq.forward.dataInvalidFast
967  ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_mmio)
968  io.fast_uop.bits := RegNext(s1_out.uop)
969
970  //
971  io.s2_ptr_chasing                    := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire)
972
973  // RegNext prefetch train for better timing
974  // ** Now, prefetch train is valid at load s3 **
975  io.prefetch_train.valid              := RegNext(s2_valid && !s2_actually_mmio && !s2_in.tlbMiss)
976  io.prefetch_train.bits.fromLsPipelineBundle(s2_in, latch = true)
977  io.prefetch_train.bits.miss          := RegNext(io.dcache.resp.bits.miss) // TODO: use trace with bank conflict?
978  io.prefetch_train.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch)
979  io.prefetch_train.bits.meta_access   := RegNext(io.dcache.resp.bits.meta_access)
980
981  io.prefetch_train_l1.valid              := RegNext(s2_valid && !s2_actually_mmio)
982  io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in, latch = true)
983  io.prefetch_train_l1.bits.miss          := RegNext(io.dcache.resp.bits.miss)
984  io.prefetch_train_l1.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch)
985  io.prefetch_train_l1.bits.meta_access   := RegNext(io.dcache.resp.bits.meta_access)
986  if (env.FPGAPlatform){
987    io.dcache.s0_pc := DontCare
988    io.dcache.s1_pc := DontCare
989    io.dcache.s2_pc := DontCare
990  }else{
991    io.dcache.s0_pc := s0_out.uop.cf.pc
992    io.dcache.s1_pc := s1_out.uop.cf.pc
993    io.dcache.s2_pc := s2_out.uop.cf.pc
994  }
995  io.dcache.s2_kill := s2_pmp.ld || s2_actually_mmio || s2_kill
996
997  val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready
998  val s2_ld_valid_dup = RegInit(0.U(6.W))
999  s2_ld_valid_dup := 0x0.U(6.W)
1000  when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) }
1001  when (s1_kill || s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x0.U(6.W) }
1002  assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch)))
1003
1004  // Pipeline
1005  // --------------------------------------------------------------------------------
1006  // stage 3
1007  // --------------------------------------------------------------------------------
1008  // writeback and update load queue
1009  val s3_valid        = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect))
1010  val s3_in           = RegEnable(s2_out, s2_fire)
1011  val s3_out          = Wire(Valid(new ExuOutput))
1012  val s3_dcache_rep   = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire)
1013  val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire)
1014  val s3_fast_rep     = Wire(Bool())
1015  val s3_troublem     = RegNext(s2_troublem)
1016  val s3_kill         = s3_in.uop.robIdx.needFlush(io.redirect)
1017  s3_ready := !s3_valid || s3_kill || io.ldout.ready
1018
1019  // forwrad last beat
1020  val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr)
1021  val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1022  val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid)
1023
1024  val s3_fast_rep_canceled = io.replay.valid && io.replay.bits.forward_tlDchannel || !io.dcache.req.ready
1025  io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || s3_fast_rep_canceled) && !s3_in.feedbacked
1026  io.lsq.ldin.bits := s3_in
1027  io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid
1028
1029  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1030  io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools
1031  io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated
1032  io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated)
1033
1034  val s3_dly_ld_err =
1035    if (EnableAccurateLoadError) {
1036      io.dcache.resp.bits.error_delayed && RegNext(io.csrCtrl.cache_error_enable) && s3_troublem
1037    } else {
1038      WireInit(false.B)
1039    }
1040  io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid
1041  io.lsq.ldin.bits.dcacheRequireReplay  := s3_dcache_rep
1042  io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err
1043
1044  val s3_vp_match_fail = RegNext(io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid) && s3_troublem
1045  val s3_rep_frm_fetch = s3_vp_match_fail
1046  val s3_ldld_rep_inst =
1047      io.lsq.ldld_nuke_query.resp.valid &&
1048      io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch &&
1049      RegNext(io.csrCtrl.ldld_vio_check_enable)
1050  val s3_flushPipe = s3_ldld_rep_inst
1051
1052  val s3_rep_info = WireInit(s3_in.rep_info)
1053  s3_rep_info.dcache_miss   := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid
1054  val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt)
1055
1056  val s3_exception = ExceptionNO.selectByFu(s3_in.uop.cf.exceptionVec, lduCfg).asUInt.orR
1057  when (s3_exception || s3_dly_ld_err || s3_rep_frm_fetch) {
1058    io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType)
1059  } .otherwise {
1060    io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools)
1061  }
1062
1063  // Int load, if hit, will be writebacked at s3
1064  s3_out.valid                := s3_valid && !io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio
1065  s3_out.bits.uop             := s3_in.uop
1066  s3_out.bits.uop.cf.exceptionVec(loadAccessFault) := s3_dly_ld_err  || s3_in.uop.cf.exceptionVec(loadAccessFault)
1067  s3_out.bits.uop.ctrl.flushPipe := false.B
1068  s3_out.bits.uop.ctrl.replayInst := false.B
1069  s3_out.bits.data            := s3_in.data
1070  s3_out.bits.redirectValid   := false.B
1071  s3_out.bits.redirect        := DontCare
1072  s3_out.bits.debug.isMMIO    := s3_in.mmio
1073  s3_out.bits.debug.isPerfCnt := false.B
1074  s3_out.bits.debug.paddr     := s3_in.paddr
1075  s3_out.bits.debug.vaddr     := s3_in.vaddr
1076  s3_out.bits.fflags          := DontCare
1077
1078  io.rollback.valid := s3_valid && (s3_rep_frm_fetch || s3_flushPipe) && !s3_exception
1079  io.rollback.bits             := DontCare
1080  io.rollback.bits.isRVC       := s3_out.bits.uop.cf.pd.isRVC
1081  io.rollback.bits.robIdx      := s3_out.bits.uop.robIdx
1082  io.rollback.bits.ftqIdx      := s3_out.bits.uop.cf.ftqPtr
1083  io.rollback.bits.ftqOffset   := s3_out.bits.uop.cf.ftqOffset
1084  io.rollback.bits.level       := Mux(s3_rep_frm_fetch, RedirectLevel.flush, RedirectLevel.flushAfter)
1085  io.rollback.bits.cfiUpdate.target := s3_out.bits.uop.cf.pc
1086  io.rollback.bits.debug_runahead_checkpoint_id := s3_out.bits.uop.debugInfo.runahead_checkpoint_id
1087  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1088
1089  io.lsq.ldin.bits.uop := s3_out.bits.uop
1090
1091  val s3_revoke = s3_exception || io.lsq.ldin.bits.rep_info.need_rep
1092  io.lsq.ldld_nuke_query.revoke := s3_revoke
1093  io.lsq.stld_nuke_query.revoke := s3_revoke
1094
1095  // feedback slow
1096  s3_fast_rep := RegNext(s2_fast_rep)
1097
1098  val s3_fb_no_waiting = !s3_in.isLoadReplay &&
1099                        (!(s3_fast_rep && !s3_fast_rep_canceled)) &&
1100                        !s3_in.feedbacked
1101
1102  //
1103  io.feedback_slow.valid                 := s3_valid && s3_fb_no_waiting
1104  io.feedback_slow.bits.hit              := !s3_rep_info.need_rep || io.lsq.ldin.ready
1105  io.feedback_slow.bits.flushState       := s3_in.ptwBack
1106  io.feedback_slow.bits.rsIdx            := s3_in.rsIdx
1107  io.feedback_slow.bits.sourceType       := RSFeedbackType.lrqFull
1108  io.feedback_slow.bits.dataInvalidSqIdx := DontCare
1109
1110  val s3_ld_wb_meta = Mux(s3_valid, s3_out.bits, io.lsq.uncache.bits)
1111
1112  // data from load queue refill
1113  val s3_ld_raw_data_frm_uncache = io.lsq.ld_raw_data
1114  val s3_merged_data_frm_uncache = s3_ld_raw_data_frm_uncache.mergedData()
1115  val s3_picked_data_frm_uncache = LookupTree(s3_ld_raw_data_frm_uncache.addrOffset, List(
1116    "b000".U -> s3_merged_data_frm_uncache(63,  0),
1117    "b001".U -> s3_merged_data_frm_uncache(63,  8),
1118    "b010".U -> s3_merged_data_frm_uncache(63, 16),
1119    "b011".U -> s3_merged_data_frm_uncache(63, 24),
1120    "b100".U -> s3_merged_data_frm_uncache(63, 32),
1121    "b101".U -> s3_merged_data_frm_uncache(63, 40),
1122    "b110".U -> s3_merged_data_frm_uncache(63, 48),
1123    "b111".U -> s3_merged_data_frm_uncache(63, 56)
1124  ))
1125  val s3_ld_data_frm_uncache = rdataHelper(s3_ld_raw_data_frm_uncache.uop, s3_picked_data_frm_uncache)
1126
1127  // data from dcache hit
1128  val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle)
1129  s3_ld_raw_data_frm_cache.respDcacheData       := io.dcache.resp.bits.data_delayed
1130  s3_ld_raw_data_frm_cache.forwardMask          := RegEnable(s2_fwd_mask, s2_valid)
1131  s3_ld_raw_data_frm_cache.forwardData          := RegEnable(s2_fwd_data, s2_valid)
1132  s3_ld_raw_data_frm_cache.uop                  := RegEnable(s2_out.uop, s2_valid)
1133  s3_ld_raw_data_frm_cache.addrOffset           := RegEnable(s2_out.paddr(3, 0), s2_valid)
1134  s3_ld_raw_data_frm_cache.forward_D            := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid
1135  s3_ld_raw_data_frm_cache.forwardData_D        := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid))
1136  s3_ld_raw_data_frm_cache.forward_mshr         := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid)
1137  s3_ld_raw_data_frm_cache.forwardData_mshr     := RegEnable(s2_fwd_data_frm_mshr, s2_valid)
1138  s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1139
1140  val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData()
1141  val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List(
1142    "b0000".U -> s3_merged_data_frm_cache(63,    0),
1143    "b0001".U -> s3_merged_data_frm_cache(63,    8),
1144    "b0010".U -> s3_merged_data_frm_cache(63,   16),
1145    "b0011".U -> s3_merged_data_frm_cache(63,   24),
1146    "b0100".U -> s3_merged_data_frm_cache(63,   32),
1147    "b0101".U -> s3_merged_data_frm_cache(63,   40),
1148    "b0110".U -> s3_merged_data_frm_cache(63,   48),
1149    "b0111".U -> s3_merged_data_frm_cache(63,   56),
1150    "b1000".U -> s3_merged_data_frm_cache(127,  64),
1151    "b1001".U -> s3_merged_data_frm_cache(127,  72),
1152    "b1010".U -> s3_merged_data_frm_cache(127,  80),
1153    "b1011".U -> s3_merged_data_frm_cache(127,  88),
1154    "b1100".U -> s3_merged_data_frm_cache(127,  96),
1155    "b1101".U -> s3_merged_data_frm_cache(127, 104),
1156    "b1110".U -> s3_merged_data_frm_cache(127, 112),
1157    "b1111".U -> s3_merged_data_frm_cache(127, 120)
1158  ))
1159  val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache)
1160
1161  // FIXME: add 1 cycle delay ?
1162  io.lsq.uncache.ready := !s3_valid
1163  io.ldout.bits        := s3_ld_wb_meta
1164  io.ldout.bits.data   := Mux(s3_valid, s3_ld_data_frm_cache, s3_ld_data_frm_uncache)
1165  io.ldout.valid       := s3_out.valid || (io.lsq.uncache.valid && !s3_valid)
1166
1167  // s3 load fast replay
1168  io.fast_rep_out.valid := s3_valid && s3_fast_rep
1169  io.fast_rep_out.bits := s3_in
1170  io.fast_rep_out.bits.lateKill := s3_rep_frm_fetch
1171
1172
1173  // fast load to load forward
1174  if (EnableLoadToLoadForward) {
1175    io.l2l_fwd_out.valid      := s3_valid && !s3_in.mmio && !s3_rep_info.need_rep
1176    io.l2l_fwd_out.data       := Mux(s3_in.vaddr(3), s3_merged_data_frm_cache(127, 64), s3_merged_data_frm_cache(63, 0))
1177    io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err || // ecc delayed error
1178                                 s3_ldld_rep_inst ||
1179                                 s3_rep_frm_fetch
1180  } else {
1181    io.l2l_fwd_out.valid := false.B
1182    io.l2l_fwd_out.data := DontCare
1183    io.l2l_fwd_out.dly_ld_err := DontCare
1184  }
1185
1186
1187  // FIXME: please move this part to LoadQueueReplay
1188  io.debug_ls := DontCare
1189
1190
1191  // Topdown
1192  io.lsTopdownInfo.s1.robIdx          := s1_in.uop.robIdx.value
1193  io.lsTopdownInfo.s1.vaddr_valid     := s1_valid && s1_in.hasROBEntry
1194  io.lsTopdownInfo.s1.vaddr_bits      := s1_vaddr
1195  io.lsTopdownInfo.s2.robIdx          := s2_in.uop.robIdx.value
1196  io.lsTopdownInfo.s2.paddr_valid     := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss
1197  io.lsTopdownInfo.s2.paddr_bits      := s2_in.paddr
1198  io.lsTopdownInfo.s2.first_real_miss := io.dcache.resp.bits.real_miss
1199  io.lsTopdownInfo.s2.cache_miss_en   := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated
1200
1201  // perf cnt
1202  XSPerfAccumulate("s0_in_valid",                  io.ldin.valid)
1203  XSPerfAccumulate("s0_in_block",                  io.ldin.valid && !io.ldin.fire)
1204  XSPerfAccumulate("s0_in_fire_first_issue",       s0_valid && s0_sel_src.isFirstIssue)
1205  XSPerfAccumulate("s0_lsq_fire_first_issue",      io.replay.fire)
1206  XSPerfAccumulate("s0_ldu_fire_first_issue",      io.ldin.fire && s0_sel_src.isFirstIssue)
1207  XSPerfAccumulate("s0_fast_replay_issue",         io.fast_rep_in.fire)
1208  XSPerfAccumulate("s0_stall_out",                 s0_valid && !s0_can_go)
1209  XSPerfAccumulate("s0_stall_dcache",              s0_valid && !io.dcache.req.ready)
1210  XSPerfAccumulate("s0_addr_spec_success",         s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12))
1211  XSPerfAccumulate("s0_addr_spec_failed",          s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12))
1212  XSPerfAccumulate("s0_addr_spec_success_once",    s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue)
1213  XSPerfAccumulate("s0_addr_spec_failed_once",     s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue)
1214  XSPerfAccumulate("s0_forward_tl_d_channel",      s0_out.forward_tlDchannel)
1215  XSPerfAccumulate("s0_hardware_prefetch_fire",    s0_fire && s0_hw_prf_select)
1216  XSPerfAccumulate("s0_software_prefetch_fire",    s0_fire && s0_sel_src.prf && s0_int_iss_select)
1217  XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select)
1218  XSPerfAccumulate("s0_hardware_prefetch_total",   io.prefetch_req.valid)
1219
1220  XSPerfAccumulate("s1_in_valid",                  s1_valid)
1221  XSPerfAccumulate("s1_in_fire",                   s1_fire)
1222  XSPerfAccumulate("s1_in_fire_first_issue",       s1_fire && s1_in.isFirstIssue)
1223  XSPerfAccumulate("s1_tlb_miss",                  s1_fire && s1_tlb_miss)
1224  XSPerfAccumulate("s1_tlb_miss_first_issue",      s1_fire && s1_tlb_miss && s1_in.isFirstIssue)
1225  XSPerfAccumulate("s1_stall_out",                 s1_valid && !s1_can_go)
1226  XSPerfAccumulate("s1_dly_err",                   s1_valid && s1_fast_rep_dly_err)
1227
1228  XSPerfAccumulate("s2_in_valid",                  s2_valid)
1229  XSPerfAccumulate("s2_in_fire",                   s2_fire)
1230  XSPerfAccumulate("s2_in_fire_first_issue",       s2_fire && s2_in.isFirstIssue)
1231  XSPerfAccumulate("s2_dcache_miss",               s2_fire && io.dcache.resp.bits.miss)
1232  XSPerfAccumulate("s2_dcache_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1233  XSPerfAccumulate("s2_dcache_real_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1234  XSPerfAccumulate("s2_full_forward",              s2_fire && s2_full_fwd)
1235  XSPerfAccumulate("s2_dcache_miss_full_forward",  s2_fire && s2_dcache_miss)
1236  XSPerfAccumulate("s2_fwd_frm_d_can",             s2_valid && s2_fwd_frm_d_chan)
1237  XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr",    s2_valid && s2_fwd_frm_d_chan_or_mshr)
1238  XSPerfAccumulate("s2_stall_out",                 s2_fire && !s2_can_go)
1239  XSPerfAccumulate("s2_prefetch",                  s2_fire && s2_prf)
1240  XSPerfAccumulate("s2_prefetch_ignored",          s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict
1241  XSPerfAccumulate("s2_prefetch_miss",             s2_fire && s2_prf && io.dcache.resp.bits.miss) // prefetch req miss in l1
1242  XSPerfAccumulate("s2_prefetch_hit",              s2_fire && s2_prf && !io.dcache.resp.bits.miss) // prefetch req hit in l1
1243  XSPerfAccumulate("s2_prefetch_accept",           s2_fire && s2_prf && io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it
1244  XSPerfAccumulate("s2_forward_req",               s2_fire && s2_in.forward_tlDchannel)
1245  XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid)
1246  XSPerfAccumulate("s2_successfully_forward_mshr",      s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid)
1247
1248  XSPerfAccumulate("s3_fwd_frm_d_chan",            s3_valid && s3_fwd_frm_d_chan_valid)
1249
1250  XSPerfAccumulate("load_to_load_forward",                      s1_try_ptr_chasing && !s1_ptr_chasing_canceled)
1251  XSPerfAccumulate("load_to_load_forward_try",                  s1_try_ptr_chasing)
1252  XSPerfAccumulate("load_to_load_forward_fail",                 s1_cancel_ptr_chasing)
1253  XSPerfAccumulate("load_to_load_forward_fail_cancelled",       s1_cancel_ptr_chasing && s1_ptr_chasing_canceled)
1254  XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match)
1255  XSPerfAccumulate("load_to_load_forward_fail_op_not_ld",       s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld)
1256  XSPerfAccumulate("load_to_load_forward_fail_addr_align",      s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned)
1257  XSPerfAccumulate("load_to_load_forward_fail_set_mismatch",    s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch)
1258
1259  // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design
1260  // hardware performance counter
1261  val perfEvents = Seq(
1262    ("load_s0_in_fire         ", s0_fire                                                        ),
1263    ("load_to_load_forward    ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled      ),
1264    ("stall_dcache            ", s0_valid && s0_can_go && !io.dcache.req.ready                  ),
1265    ("load_s1_in_fire         ", s0_fire                                                        ),
1266    ("load_s1_tlb_miss        ", s1_fire && io.tlb.resp.bits.miss                               ),
1267    ("load_s2_in_fire         ", s1_fire                                                        ),
1268    ("load_s2_dcache_miss     ", s2_fire && io.dcache.resp.bits.miss                            ),
1269  )
1270  generatePerfEvent()
1271
1272  when(io.ldout.fire){
1273    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
1274  }
1275  // end
1276}