xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LoadQueueReplay.scala (revision e4f69d78f24895ac36a5a6c704cec53e4af72485)
1*e4f69d78Ssfencevma/***************************************************************************************
2*e4f69d78Ssfencevma* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3*e4f69d78Ssfencevma* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*e4f69d78Ssfencevma*
5*e4f69d78Ssfencevma* XiangShan is licensed under Mulan PSL v2.
6*e4f69d78Ssfencevma* You can use this software according to the terms and conditions of the Mulan PSL v2.
7*e4f69d78Ssfencevma* You may obtain a copy of Mulan PSL v2 at:
8*e4f69d78Ssfencevma*          http://license.coscl.org.cn/MulanPSL2
9*e4f69d78Ssfencevma*
10*e4f69d78Ssfencevma* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11*e4f69d78Ssfencevma* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12*e4f69d78Ssfencevma* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*e4f69d78Ssfencevma*
14*e4f69d78Ssfencevma* See the Mulan PSL v2 for more details.
15*e4f69d78Ssfencevma***************************************************************************************/
16*e4f69d78Ssfencevmapackage xiangshan.mem
17*e4f69d78Ssfencevma
18*e4f69d78Ssfencevmaimport chisel3._
19*e4f69d78Ssfencevmaimport chisel3.util._
20*e4f69d78Ssfencevmaimport chipsalliance.rocketchip.config._
21*e4f69d78Ssfencevmaimport xiangshan._
22*e4f69d78Ssfencevmaimport xiangshan.backend.rob.{RobPtr, RobLsqIO}
23*e4f69d78Ssfencevmaimport xiangshan.cache._
24*e4f69d78Ssfencevmaimport xiangshan.backend.fu.fpu.FPU
25*e4f69d78Ssfencevmaimport xiangshan.cache._
26*e4f69d78Ssfencevmaimport xiangshan.frontend.FtqPtr
27*e4f69d78Ssfencevmaimport xiangshan.ExceptionNO._
28*e4f69d78Ssfencevmaimport xiangshan.cache.dcache.ReplayCarry
29*e4f69d78Ssfencevmaimport xiangshan.mem.mdp._
30*e4f69d78Ssfencevmaimport utils._
31*e4f69d78Ssfencevmaimport utility._
32*e4f69d78Ssfencevma
33*e4f69d78Ssfencevmaobject LoadReplayCauses {
34*e4f69d78Ssfencevma  // these causes have priority, lower coding has higher priority.
35*e4f69d78Ssfencevma  // when load replay happens, load unit will select highest priority
36*e4f69d78Ssfencevma  // from replay causes vector
37*e4f69d78Ssfencevma
38*e4f69d78Ssfencevma  /*
39*e4f69d78Ssfencevma   * Warning:
40*e4f69d78Ssfencevma   * ************************************************************
41*e4f69d78Ssfencevma   * * Don't change the priority. If the priority is changed,   *
42*e4f69d78Ssfencevma   * * deadlock may occur. If you really need to change or      *
43*e4f69d78Ssfencevma   * * add priority, please ensure that no deadlock will occur. *
44*e4f69d78Ssfencevma   * ************************************************************
45*e4f69d78Ssfencevma   *
46*e4f69d78Ssfencevma   */
47*e4f69d78Ssfencevma  // st-ld violation
48*e4f69d78Ssfencevma  val waitStore         = 0
49*e4f69d78Ssfencevma  // tlb miss check
50*e4f69d78Ssfencevma  val tlbMiss           = 1
51*e4f69d78Ssfencevma  // st-ld violation re-execute check
52*e4f69d78Ssfencevma  val schedError        = 2
53*e4f69d78Ssfencevma  // dcache bank conflict check
54*e4f69d78Ssfencevma  val bankConflict      = 3
55*e4f69d78Ssfencevma  // store-to-load-forwarding check
56*e4f69d78Ssfencevma  val forwardFail       = 4
57*e4f69d78Ssfencevma  // dcache replay check
58*e4f69d78Ssfencevma  val dcacheReplay      = 5
59*e4f69d78Ssfencevma  // dcache miss check
60*e4f69d78Ssfencevma  val dcacheMiss        = 6
61*e4f69d78Ssfencevma  // RAR/RAW queue accept check
62*e4f69d78Ssfencevma  val rejectEnq         = 7
63*e4f69d78Ssfencevma  // total causes
64*e4f69d78Ssfencevma  val allCauses         = 8
65*e4f69d78Ssfencevma}
66*e4f69d78Ssfencevma
67*e4f69d78Ssfencevmaclass AgeDetector(numEntries: Int, numEnq: Int, regOut: Boolean = true)(implicit p: Parameters) extends XSModule {
68*e4f69d78Ssfencevma  val io = IO(new Bundle {
69*e4f69d78Ssfencevma    // NOTE: deq and enq may come at the same cycle.
70*e4f69d78Ssfencevma    val enq = Vec(numEnq, Input(UInt(numEntries.W)))
71*e4f69d78Ssfencevma    val deq = Input(UInt(numEntries.W))
72*e4f69d78Ssfencevma    val ready = Input(UInt(numEntries.W))
73*e4f69d78Ssfencevma    val out = Output(UInt(numEntries.W))
74*e4f69d78Ssfencevma  })
75*e4f69d78Ssfencevma
76*e4f69d78Ssfencevma  // age(i)(j): entry i enters queue before entry j
77*e4f69d78Ssfencevma  val age = Seq.fill(numEntries)(Seq.fill(numEntries)(RegInit(false.B)))
78*e4f69d78Ssfencevma  val nextAge = Seq.fill(numEntries)(Seq.fill(numEntries)(Wire(Bool())))
79*e4f69d78Ssfencevma
80*e4f69d78Ssfencevma  // to reduce reg usage, only use upper matrix
81*e4f69d78Ssfencevma  def get_age(row: Int, col: Int): Bool = if (row <= col) age(row)(col) else !age(col)(row)
82*e4f69d78Ssfencevma  def get_next_age(row: Int, col: Int): Bool = if (row <= col) nextAge(row)(col) else !nextAge(col)(row)
83*e4f69d78Ssfencevma  def isFlushed(i: Int): Bool = io.deq(i)
84*e4f69d78Ssfencevma  def isEnqueued(i: Int, numPorts: Int = -1): Bool = {
85*e4f69d78Ssfencevma    val takePorts = if (numPorts == -1) io.enq.length else numPorts
86*e4f69d78Ssfencevma    takePorts match {
87*e4f69d78Ssfencevma      case 0 => false.B
88*e4f69d78Ssfencevma      case 1 => io.enq.head(i) && !isFlushed(i)
89*e4f69d78Ssfencevma      case n => VecInit(io.enq.take(n).map(_(i))).asUInt.orR && !isFlushed(i)
90*e4f69d78Ssfencevma    }
91*e4f69d78Ssfencevma  }
92*e4f69d78Ssfencevma
93*e4f69d78Ssfencevma  for ((row, i) <- nextAge.zipWithIndex) {
94*e4f69d78Ssfencevma    val thisValid = get_age(i, i) || isEnqueued(i)
95*e4f69d78Ssfencevma    for ((elem, j) <- row.zipWithIndex) {
96*e4f69d78Ssfencevma      when (isFlushed(i)) {
97*e4f69d78Ssfencevma        // (1) when entry i is flushed or dequeues, set row(i) to false.B
98*e4f69d78Ssfencevma        elem := false.B
99*e4f69d78Ssfencevma      }.elsewhen (isFlushed(j)) {
100*e4f69d78Ssfencevma        // (2) when entry j is flushed or dequeues, set column(j) to validVec
101*e4f69d78Ssfencevma        elem := thisValid
102*e4f69d78Ssfencevma      }.elsewhen (isEnqueued(i)) {
103*e4f69d78Ssfencevma        // (3) when entry i enqueues from port k,
104*e4f69d78Ssfencevma        // (3.1) if entry j enqueues from previous ports, set to false
105*e4f69d78Ssfencevma        // (3.2) otherwise, set to true if and only of entry j is invalid
106*e4f69d78Ssfencevma        // overall: !jEnqFromPreviousPorts && !jIsValid
107*e4f69d78Ssfencevma        val sel = io.enq.map(_(i))
108*e4f69d78Ssfencevma        val result = (0 until numEnq).map(k => isEnqueued(j, k))
109*e4f69d78Ssfencevma        // why ParallelMux: sel must be one-hot since enq is one-hot
110*e4f69d78Ssfencevma        elem := !get_age(j, j) && !ParallelMux(sel, result)
111*e4f69d78Ssfencevma      }.otherwise {
112*e4f69d78Ssfencevma        // default: unchanged
113*e4f69d78Ssfencevma        elem := get_age(i, j)
114*e4f69d78Ssfencevma      }
115*e4f69d78Ssfencevma      age(i)(j) := elem
116*e4f69d78Ssfencevma    }
117*e4f69d78Ssfencevma  }
118*e4f69d78Ssfencevma
119*e4f69d78Ssfencevma  def getOldest(get: (Int, Int) => Bool): UInt = {
120*e4f69d78Ssfencevma    VecInit((0 until numEntries).map(i => {
121*e4f69d78Ssfencevma      io.ready(i) & VecInit((0 until numEntries).map(j => if (i != j) !io.ready(j) || get(i, j) else true.B)).asUInt.andR
122*e4f69d78Ssfencevma    })).asUInt
123*e4f69d78Ssfencevma  }
124*e4f69d78Ssfencevma  val best = getOldest(get_age)
125*e4f69d78Ssfencevma  val nextBest = getOldest(get_next_age)
126*e4f69d78Ssfencevma
127*e4f69d78Ssfencevma  io.out := (if (regOut) best else nextBest)
128*e4f69d78Ssfencevma}
129*e4f69d78Ssfencevma
130*e4f69d78Ssfencevmaobject AgeDetector {
131*e4f69d78Ssfencevma  def apply(numEntries: Int, enq: Vec[UInt], deq: UInt, ready: UInt)(implicit p: Parameters): Valid[UInt] = {
132*e4f69d78Ssfencevma    val age = Module(new AgeDetector(numEntries, enq.length, regOut = true))
133*e4f69d78Ssfencevma    age.io.enq := enq
134*e4f69d78Ssfencevma    age.io.deq := deq
135*e4f69d78Ssfencevma    age.io.ready:= ready
136*e4f69d78Ssfencevma    val out = Wire(Valid(UInt(deq.getWidth.W)))
137*e4f69d78Ssfencevma    out.valid := age.io.out.orR
138*e4f69d78Ssfencevma    out.bits := age.io.out
139*e4f69d78Ssfencevma    out
140*e4f69d78Ssfencevma  }
141*e4f69d78Ssfencevma}
142*e4f69d78Ssfencevma
143*e4f69d78Ssfencevma
144*e4f69d78Ssfencevmaclass LoadQueueReplay(implicit p: Parameters) extends XSModule
145*e4f69d78Ssfencevma  with HasDCacheParameters
146*e4f69d78Ssfencevma  with HasCircularQueuePtrHelper
147*e4f69d78Ssfencevma  with HasLoadHelper
148*e4f69d78Ssfencevma  with HasPerfEvents
149*e4f69d78Ssfencevma{
150*e4f69d78Ssfencevma  val io = IO(new Bundle() {
151*e4f69d78Ssfencevma    val redirect = Flipped(ValidIO(new Redirect))
152*e4f69d78Ssfencevma    val enq = Vec(LoadPipelineWidth, Flipped(Decoupled(new LqWriteBundle)))
153*e4f69d78Ssfencevma    val storeAddrIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle)))
154*e4f69d78Ssfencevma    val storeDataIn = Vec(StorePipelineWidth, Flipped(Valid(new ExuOutput)))
155*e4f69d78Ssfencevma    val replay = Vec(LoadPipelineWidth, Decoupled(new LsPipelineBundle))
156*e4f69d78Ssfencevma    val refill = Flipped(ValidIO(new Refill))
157*e4f69d78Ssfencevma    val stAddrReadySqPtr = Input(new SqPtr)
158*e4f69d78Ssfencevma    val stAddrReadyVec = Input(Vec(StoreQueueSize, Bool()))
159*e4f69d78Ssfencevma    val stDataReadySqPtr = Input(new SqPtr)
160*e4f69d78Ssfencevma    val stDataReadyVec = Input(Vec(StoreQueueSize, Bool()))
161*e4f69d78Ssfencevma    val sqEmpty = Input(Bool())
162*e4f69d78Ssfencevma    val lqFull = Output(Bool())
163*e4f69d78Ssfencevma    val ldWbPtr = Input(new LqPtr)
164*e4f69d78Ssfencevma    val tlbReplayDelayCycleCtrl = Vec(4, Input(UInt(ReSelectLen.W)))
165*e4f69d78Ssfencevma  })
166*e4f69d78Ssfencevma
167*e4f69d78Ssfencevma  println("LoadQueueReplay size: " + LoadQueueReplaySize)
168*e4f69d78Ssfencevma  //  LoadQueueReplay field:
169*e4f69d78Ssfencevma  //  +-----------+---------+-------+-------------+--------+
170*e4f69d78Ssfencevma  //  | Allocated | MicroOp | VAddr |    Cause    |  Flags |
171*e4f69d78Ssfencevma  //  +-----------+---------+-------+-------------+--------+
172*e4f69d78Ssfencevma  //  Allocated   : entry has been allocated already
173*e4f69d78Ssfencevma  //  MicroOp     : inst's microOp
174*e4f69d78Ssfencevma  //  VAddr       : virtual address
175*e4f69d78Ssfencevma  //  Cause       : replay cause
176*e4f69d78Ssfencevma  //  Flags       : rar/raw queue allocate flags
177*e4f69d78Ssfencevma  val allocated = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B))) // The control signals need to explicitly indicate the initial value
178*e4f69d78Ssfencevma  val sleep = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
179*e4f69d78Ssfencevma  val uop = Reg(Vec(LoadQueueReplaySize, new MicroOp))
180*e4f69d78Ssfencevma  val vaddrModule = Module(new LqVAddrModule(
181*e4f69d78Ssfencevma    gen = UInt(VAddrBits.W),
182*e4f69d78Ssfencevma    numEntries = LoadQueueReplaySize,
183*e4f69d78Ssfencevma    numRead = LoadPipelineWidth,
184*e4f69d78Ssfencevma    numWrite = LoadPipelineWidth,
185*e4f69d78Ssfencevma    numWBank = LoadQueueNWriteBanks,
186*e4f69d78Ssfencevma    numWDelay = 2,
187*e4f69d78Ssfencevma    numCamPort = 0))
188*e4f69d78Ssfencevma  vaddrModule.io := DontCare
189*e4f69d78Ssfencevma  val cause = RegInit(VecInit(List.fill(LoadQueueReplaySize)(0.U(LoadReplayCauses.allCauses.W))))
190*e4f69d78Ssfencevma
191*e4f69d78Ssfencevma  // freeliset: store valid entries index.
192*e4f69d78Ssfencevma  // +---+---+--------------+-----+-----+
193*e4f69d78Ssfencevma  // | 0 | 1 |      ......  | n-2 | n-1 |
194*e4f69d78Ssfencevma  // +---+---+--------------+-----+-----+
195*e4f69d78Ssfencevma  val freeList = Module(new FreeList(
196*e4f69d78Ssfencevma    size = LoadQueueReplaySize,
197*e4f69d78Ssfencevma    allocWidth = LoadPipelineWidth,
198*e4f69d78Ssfencevma    freeWidth = 4,
199*e4f69d78Ssfencevma    moduleName = "LoadQueueReplay freelist"
200*e4f69d78Ssfencevma  ))
201*e4f69d78Ssfencevma  freeList.io := DontCare
202*e4f69d78Ssfencevma  /**
203*e4f69d78Ssfencevma   * used for re-select control
204*e4f69d78Ssfencevma   */
205*e4f69d78Ssfencevma  val credit = RegInit(VecInit(List.fill(LoadQueueReplaySize)(0.U(ReSelectLen.W))))
206*e4f69d78Ssfencevma  val selBlocked = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
207*e4f69d78Ssfencevma  //  Ptrs to control which cycle to choose
208*e4f69d78Ssfencevma  val blockPtrTlb = RegInit(VecInit(List.fill(LoadQueueReplaySize)(0.U(2.W))))
209*e4f69d78Ssfencevma  val blockPtrCache = RegInit(VecInit(List.fill(LoadQueueReplaySize)(0.U(2.W))))
210*e4f69d78Ssfencevma  val blockPtrOthers = RegInit(VecInit(List.fill(LoadQueueReplaySize)(0.U(2.W))))
211*e4f69d78Ssfencevma  //  Specific cycles to block
212*e4f69d78Ssfencevma  val blockCyclesTlb = Reg(Vec(4, UInt(ReSelectLen.W)))
213*e4f69d78Ssfencevma  blockCyclesTlb := io.tlbReplayDelayCycleCtrl
214*e4f69d78Ssfencevma  val blockCyclesCache = RegInit(VecInit(Seq(11.U(ReSelectLen.W), 18.U(ReSelectLen.W), 127.U(ReSelectLen.W), 17.U(ReSelectLen.W))))
215*e4f69d78Ssfencevma  val blockCyclesOthers = RegInit(VecInit(Seq(0.U(ReSelectLen.W), 0.U(ReSelectLen.W), 0.U(ReSelectLen.W), 0.U(ReSelectLen.W))))
216*e4f69d78Ssfencevma  val blockSqIdx = Reg(Vec(LoadQueueReplaySize, new SqPtr))
217*e4f69d78Ssfencevma  // block causes
218*e4f69d78Ssfencevma  val blockByTlbMiss = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
219*e4f69d78Ssfencevma  val blockByForwardFail = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
220*e4f69d78Ssfencevma  val blockByWaitStore = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
221*e4f69d78Ssfencevma  val blockByCacheMiss = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
222*e4f69d78Ssfencevma  val blockByOthers = RegInit(VecInit(List.fill(LoadQueueReplaySize)(false.B)))
223*e4f69d78Ssfencevma  //  DCache miss block
224*e4f69d78Ssfencevma  val missMSHRId = RegInit(VecInit(List.fill(LoadQueueReplaySize)(0.U((log2Up(cfg.nMissEntries).W)))))
225*e4f69d78Ssfencevma  val trueCacheMissReplay = WireInit(VecInit(cause.map(_(LoadReplayCauses.dcacheMiss))))
226*e4f69d78Ssfencevma  val creditUpdate = WireInit(VecInit(List.fill(LoadQueueReplaySize)(0.U(ReSelectLen.W))))
227*e4f69d78Ssfencevma  (0 until LoadQueueReplaySize).map(i => {
228*e4f69d78Ssfencevma    creditUpdate(i) := Mux(credit(i) > 0.U(ReSelectLen.W), credit(i)-1.U(ReSelectLen.W), credit(i))
229*e4f69d78Ssfencevma    selBlocked(i) := creditUpdate(i) =/= 0.U(ReSelectLen.W) || credit(i) =/= 0.U(ReSelectLen.W)
230*e4f69d78Ssfencevma  })
231*e4f69d78Ssfencevma  val replayCarryReg = RegInit(VecInit(List.fill(LoadQueueReplaySize)(ReplayCarry(0.U, false.B))))
232*e4f69d78Ssfencevma
233*e4f69d78Ssfencevma  /**
234*e4f69d78Ssfencevma   * Enqueue
235*e4f69d78Ssfencevma   */
236*e4f69d78Ssfencevma  val canEnqueue = io.enq.map(_.valid)
237*e4f69d78Ssfencevma  val cancelEnq = io.enq.map(enq => enq.bits.uop.robIdx.needFlush(io.redirect))
238*e4f69d78Ssfencevma  val needReplay = io.enq.map(enq => enq.bits.replayInfo.needReplay())
239*e4f69d78Ssfencevma  val hasExceptions = io.enq.map(enq => ExceptionNO.selectByFu(enq.bits.uop.cf.exceptionVec, lduCfg).asUInt.orR && !enq.bits.tlbMiss)
240*e4f69d78Ssfencevma  val loadReplay = io.enq.map(enq => enq.bits.isLoadReplay)
241*e4f69d78Ssfencevma  val needEnqueue = VecInit((0 until LoadPipelineWidth).map(w => {
242*e4f69d78Ssfencevma    canEnqueue(w) && !cancelEnq(w) && needReplay(w) && !hasExceptions(w)
243*e4f69d78Ssfencevma  }))
244*e4f69d78Ssfencevma  val canFreeVec = VecInit((0 until LoadPipelineWidth).map(w => {
245*e4f69d78Ssfencevma    canEnqueue(w) && loadReplay(w) && (!needReplay(w) || hasExceptions(w))
246*e4f69d78Ssfencevma  }))
247*e4f69d78Ssfencevma
248*e4f69d78Ssfencevma  // select LoadPipelineWidth valid index.
249*e4f69d78Ssfencevma  val lqFull = freeList.io.empty
250*e4f69d78Ssfencevma  val lqFreeNums = freeList.io.validCount
251*e4f69d78Ssfencevma
252*e4f69d78Ssfencevma  // replay logic
253*e4f69d78Ssfencevma  // release logic generation
254*e4f69d78Ssfencevma  val storeAddrInSameCycleVec = Wire(Vec(LoadQueueReplaySize, Bool()))
255*e4f69d78Ssfencevma  val storeDataInSameCycleVec = Wire(Vec(LoadQueueReplaySize, Bool()))
256*e4f69d78Ssfencevma  val addrNotBlockVec = Wire(Vec(LoadQueueReplaySize, Bool()))
257*e4f69d78Ssfencevma  val dataNotBlockVec = Wire(Vec(LoadQueueReplaySize, Bool()))
258*e4f69d78Ssfencevma  val storeAddrValidVec = addrNotBlockVec.asUInt | storeAddrInSameCycleVec.asUInt
259*e4f69d78Ssfencevma  val storeDataValidVec = dataNotBlockVec.asUInt | storeDataInSameCycleVec.asUInt
260*e4f69d78Ssfencevma
261*e4f69d78Ssfencevma  // store data valid check
262*e4f69d78Ssfencevma  val stAddrReadyVec = io.stAddrReadyVec
263*e4f69d78Ssfencevma  val stDataReadyVec = io.stDataReadyVec
264*e4f69d78Ssfencevma
265*e4f69d78Ssfencevma  for (i <- 0 until LoadQueueReplaySize) {
266*e4f69d78Ssfencevma    // dequeue
267*e4f69d78Ssfencevma    //  FIXME: store*Ptr is not accurate
268*e4f69d78Ssfencevma    dataNotBlockVec(i) := isAfter(io.stAddrReadySqPtr, blockSqIdx(i)) || stDataReadyVec(blockSqIdx(i).value) || io.sqEmpty // for better timing
269*e4f69d78Ssfencevma    addrNotBlockVec(i) := !isBefore(io.stAddrReadySqPtr, blockSqIdx(i)) || stAddrReadyVec(blockSqIdx(i).value) || io.sqEmpty // for better timing
270*e4f69d78Ssfencevma
271*e4f69d78Ssfencevma    // store address execute
272*e4f69d78Ssfencevma    storeAddrInSameCycleVec(i) := VecInit((0 until StorePipelineWidth).map(w => {
273*e4f69d78Ssfencevma      io.storeAddrIn(w).valid &&
274*e4f69d78Ssfencevma      !io.storeAddrIn(w).bits.miss &&
275*e4f69d78Ssfencevma      blockSqIdx(i) === io.storeAddrIn(w).bits.uop.sqIdx
276*e4f69d78Ssfencevma    })).asUInt.orR // for better timing
277*e4f69d78Ssfencevma
278*e4f69d78Ssfencevma    // store data execute
279*e4f69d78Ssfencevma    storeDataInSameCycleVec(i) := VecInit((0 until StorePipelineWidth).map(w => {
280*e4f69d78Ssfencevma      io.storeDataIn(w).valid &&
281*e4f69d78Ssfencevma      blockSqIdx(i) === io.storeDataIn(w).bits.uop.sqIdx
282*e4f69d78Ssfencevma    })).asUInt.orR // for better timing
283*e4f69d78Ssfencevma
284*e4f69d78Ssfencevma  }
285*e4f69d78Ssfencevma
286*e4f69d78Ssfencevma  // store addr issue check
287*e4f69d78Ssfencevma  val stAddrDeqVec = Wire(Vec(LoadQueueReplaySize, Bool()))
288*e4f69d78Ssfencevma  (0 until LoadQueueReplaySize).map(i => {
289*e4f69d78Ssfencevma    stAddrDeqVec(i) := allocated(i) && storeAddrValidVec(i)
290*e4f69d78Ssfencevma  })
291*e4f69d78Ssfencevma
292*e4f69d78Ssfencevma  // store data issue check
293*e4f69d78Ssfencevma  val stDataDeqVec = Wire(Vec(LoadQueueReplaySize, Bool()))
294*e4f69d78Ssfencevma  (0 until LoadQueueReplaySize).map(i => {
295*e4f69d78Ssfencevma    stDataDeqVec(i) := allocated(i) && storeDataValidVec(i)
296*e4f69d78Ssfencevma  })
297*e4f69d78Ssfencevma
298*e4f69d78Ssfencevma  // update block condition
299*e4f69d78Ssfencevma  (0 until LoadQueueReplaySize).map(i => {
300*e4f69d78Ssfencevma    blockByForwardFail(i) := Mux(blockByForwardFail(i) && stDataDeqVec(i), false.B, blockByForwardFail(i))
301*e4f69d78Ssfencevma    blockByWaitStore(i) := Mux(blockByWaitStore(i) && stAddrDeqVec(i), false.B, blockByWaitStore(i))
302*e4f69d78Ssfencevma    blockByCacheMiss(i) := Mux(blockByCacheMiss(i) && io.refill.valid && io.refill.bits.id === missMSHRId(i), false.B, blockByCacheMiss(i))
303*e4f69d78Ssfencevma
304*e4f69d78Ssfencevma    when (blockByCacheMiss(i) && io.refill.valid && io.refill.bits.id === missMSHRId(i)) { creditUpdate(i) := 0.U }
305*e4f69d78Ssfencevma    when (blockByCacheMiss(i) && creditUpdate(i) === 0.U) { blockByCacheMiss(i) := false.B }
306*e4f69d78Ssfencevma    when (blockByTlbMiss(i) && creditUpdate(i) === 0.U) { blockByTlbMiss(i) := false.B }
307*e4f69d78Ssfencevma    when (blockByOthers(i) && creditUpdate(i) === 0.U) { blockByOthers(i) := false.B }
308*e4f69d78Ssfencevma  })
309*e4f69d78Ssfencevma
310*e4f69d78Ssfencevma  //  Replay is splitted into 3 stages
311*e4f69d78Ssfencevma  def getRemBits(input: UInt)(rem: Int): UInt = {
312*e4f69d78Ssfencevma    VecInit((0 until LoadQueueReplaySize / LoadPipelineWidth).map(i => { input(LoadPipelineWidth * i + rem) })).asUInt
313*e4f69d78Ssfencevma  }
314*e4f69d78Ssfencevma
315*e4f69d78Ssfencevma  // stage1: select 2 entries and read their vaddr
316*e4f69d78Ssfencevma  val s1_oldestSel = Wire(Vec(LoadPipelineWidth, Valid(UInt(log2Up(LoadQueueReplaySize).W))))
317*e4f69d78Ssfencevma  val s2_oldestSel = Wire(Vec(LoadPipelineWidth, Valid(UInt(log2Up(LoadQueueReplaySize).W))))
318*e4f69d78Ssfencevma
319*e4f69d78Ssfencevma  // generate mask
320*e4f69d78Ssfencevma  val needCancel = Wire(Vec(LoadQueueReplaySize, Bool()))
321*e4f69d78Ssfencevma  // generate enq mask
322*e4f69d78Ssfencevma  val selectIndexOH = Wire(Vec(LoadPipelineWidth, UInt(LoadQueueReplaySize.W)))
323*e4f69d78Ssfencevma  val loadEnqFireMask = io.enq.map(x => x.fire && !x.bits.isLoadReplay).zip(selectIndexOH).map(x => Mux(x._1, x._2, 0.U))
324*e4f69d78Ssfencevma  val remLoadEnqFireVec = loadEnqFireMask.map(x => VecInit((0 until LoadPipelineWidth).map(rem => getRemBits(x)(rem))))
325*e4f69d78Ssfencevma  val remEnqSelVec = Seq.tabulate(LoadPipelineWidth)(w => VecInit(remLoadEnqFireVec.map(x => x(w))))
326*e4f69d78Ssfencevma
327*e4f69d78Ssfencevma  // generate free mask
328*e4f69d78Ssfencevma  val loadReplayFreeMask = io.enq.map(_.bits).zip(canFreeVec).map(x => Mux(x._2, UIntToOH(x._1.sleepIndex), 0.U)).reduce(_|_)
329*e4f69d78Ssfencevma  val loadFreeSelMask = VecInit((0 until LoadQueueReplaySize).map(i => {
330*e4f69d78Ssfencevma    needCancel(i) || loadReplayFreeMask(i)
331*e4f69d78Ssfencevma  })).asUInt
332*e4f69d78Ssfencevma  val remFreeSelVec = VecInit(Seq.tabulate(LoadPipelineWidth)(rem => getRemBits(loadFreeSelMask)(rem)))
333*e4f69d78Ssfencevma
334*e4f69d78Ssfencevma  // generate cancel mask
335*e4f69d78Ssfencevma  val loadReplayFireMask = (0 until LoadPipelineWidth).map(w => Mux(io.replay(w).fire, UIntToOH(s2_oldestSel(w).bits), 0.U)).reduce(_|_)
336*e4f69d78Ssfencevma  val loadCancelSelMask = VecInit((0 until LoadQueueReplaySize).map(i => {
337*e4f69d78Ssfencevma    needCancel(i) || loadReplayFireMask(i)
338*e4f69d78Ssfencevma  })).asUInt
339*e4f69d78Ssfencevma  val remCancelSelVec = VecInit(Seq.tabulate(LoadPipelineWidth)(rem => getRemBits(loadCancelSelMask)(rem)))
340*e4f69d78Ssfencevma
341*e4f69d78Ssfencevma  // generate replay mask
342*e4f69d78Ssfencevma  val loadReplaySelMask = VecInit((0 until LoadQueueReplaySize).map(i => {
343*e4f69d78Ssfencevma    val blocked = selBlocked(i) || blockByTlbMiss(i) || blockByForwardFail(i) || blockByCacheMiss(i) || blockByWaitStore(i) || blockByOthers(i)
344*e4f69d78Ssfencevma    allocated(i) && sleep(i) && !blocked && !loadCancelSelMask(i)
345*e4f69d78Ssfencevma  })).asUInt // use uint instead vec to reduce verilog lines
346*e4f69d78Ssfencevma  val oldestPtr = VecInit((0 until CommitWidth).map(x => io.ldWbPtr + x.U))
347*e4f69d78Ssfencevma  val oldestSelMask = VecInit((0 until LoadQueueReplaySize).map(i => {
348*e4f69d78Ssfencevma    loadReplaySelMask(i) && VecInit(oldestPtr.map(_ === uop(i).lqIdx)).asUInt.orR
349*e4f69d78Ssfencevma  })).asUInt // use uint instead vec to reduce verilog lines
350*e4f69d78Ssfencevma  val remReplaySelVec = VecInit(Seq.tabulate(LoadPipelineWidth)(rem => getRemBits(loadReplaySelMask)(rem)))
351*e4f69d78Ssfencevma  val remOldestSelVec = VecInit(Seq.tabulate(LoadPipelineWidth)(rem => getRemBits(oldestSelMask)(rem)))
352*e4f69d78Ssfencevma
353*e4f69d78Ssfencevma  // select oldest logic
354*e4f69d78Ssfencevma  s1_oldestSel := VecInit((0 until LoadPipelineWidth).map(rport => {
355*e4f69d78Ssfencevma    // select enqueue earlest inst
356*e4f69d78Ssfencevma    val ageOldest = AgeDetector(LoadQueueReplaySize / LoadPipelineWidth, remEnqSelVec(rport), remFreeSelVec(rport), remReplaySelVec(rport))
357*e4f69d78Ssfencevma    assert(!(ageOldest.valid && PopCount(ageOldest.bits) > 1.U), "oldest index must be one-hot!")
358*e4f69d78Ssfencevma    val ageOldestValid = ageOldest.valid
359*e4f69d78Ssfencevma    val ageOldestIndex = OHToUInt(ageOldest.bits)
360*e4f69d78Ssfencevma
361*e4f69d78Ssfencevma    // select program order oldest
362*e4f69d78Ssfencevma    val issOldestValid = remOldestSelVec(rport).orR
363*e4f69d78Ssfencevma    val issOldestIndex = OHToUInt(PriorityEncoderOH(remOldestSelVec(rport)))
364*e4f69d78Ssfencevma
365*e4f69d78Ssfencevma    val oldest = Wire(Valid(UInt()))
366*e4f69d78Ssfencevma    oldest.valid := ageOldest.valid || issOldestValid
367*e4f69d78Ssfencevma    oldest.bits := Cat(Mux(issOldestValid, issOldestIndex, ageOldestIndex), rport.U(log2Ceil(LoadPipelineWidth).W))
368*e4f69d78Ssfencevma    oldest
369*e4f69d78Ssfencevma  }))
370*e4f69d78Ssfencevma
371*e4f69d78Ssfencevma
372*e4f69d78Ssfencevma  (0 until LoadPipelineWidth).map(w => {
373*e4f69d78Ssfencevma    vaddrModule.io.raddr(w) := s1_oldestSel(w).bits
374*e4f69d78Ssfencevma  })
375*e4f69d78Ssfencevma
376*e4f69d78Ssfencevma  // stage2: send replay request to load unit
377*e4f69d78Ssfencevma  val hasBankConflictVec = RegNext(VecInit(s1_oldestSel.map(x => x.valid && cause(x.bits)(LoadReplayCauses.bankConflict))))
378*e4f69d78Ssfencevma  val hasBankConflict = hasBankConflictVec.asUInt.orR
379*e4f69d78Ssfencevma  val allBankConflict = hasBankConflictVec.asUInt.andR
380*e4f69d78Ssfencevma
381*e4f69d78Ssfencevma  // replay cold down
382*e4f69d78Ssfencevma  val ColdDownCycles = 16
383*e4f69d78Ssfencevma
384*e4f69d78Ssfencevma  val coldCounter = RegInit(VecInit(List.fill(LoadPipelineWidth)(0.U(log2Up(ColdDownCycles).W))))
385*e4f69d78Ssfencevma  val ColdDownThreshold = Wire(UInt(log2Up(ColdDownCycles).W))
386*e4f69d78Ssfencevma  ColdDownThreshold := Constantin.createRecord("ColdDownThreshold_"+p(XSCoreParamsKey).HartId.toString(), initValue = 12.U)
387*e4f69d78Ssfencevma  assert(ColdDownCycles.U > ColdDownThreshold, "ColdDownCycles must great than ColdDownThreshold!")
388*e4f69d78Ssfencevma
389*e4f69d78Ssfencevma  def replayCanFire(i: Int) = coldCounter(i) >= 0.U && coldCounter(i) < ColdDownThreshold
390*e4f69d78Ssfencevma  def coldDownNow(i: Int) = coldCounter(i) >= ColdDownThreshold
391*e4f69d78Ssfencevma
392*e4f69d78Ssfencevma  for (i <- 0 until LoadPipelineWidth) {
393*e4f69d78Ssfencevma    val s1_replayIdx = s1_oldestSel(i).bits
394*e4f69d78Ssfencevma    val s2_replayUop = RegNext(uop(s1_replayIdx))
395*e4f69d78Ssfencevma    val s2_replayMSHRId = RegNext(missMSHRId(s1_replayIdx))
396*e4f69d78Ssfencevma    val s2_replayCauses = RegNext(cause(s1_replayIdx))
397*e4f69d78Ssfencevma    val s2_replayCarry = RegNext(replayCarryReg(s1_replayIdx))
398*e4f69d78Ssfencevma    val s2_replayCacheMissReplay = RegNext(trueCacheMissReplay(s1_replayIdx))
399*e4f69d78Ssfencevma    val cancelReplay = s2_replayUop.robIdx.needFlush(io.redirect)
400*e4f69d78Ssfencevma    // In order to avoid deadlock, replay one inst which blocked by bank conflict
401*e4f69d78Ssfencevma    val bankConflictReplay = Mux(hasBankConflict && !allBankConflict, s2_replayCauses(LoadReplayCauses.bankConflict), true.B)
402*e4f69d78Ssfencevma
403*e4f69d78Ssfencevma    s2_oldestSel(i).valid := RegNext(s1_oldestSel(i).valid && !loadCancelSelMask(s1_replayIdx))
404*e4f69d78Ssfencevma    s2_oldestSel(i).bits := RegNext(s1_oldestSel(i).bits)
405*e4f69d78Ssfencevma
406*e4f69d78Ssfencevma    io.replay(i).valid := s2_oldestSel(i).valid && !cancelReplay && bankConflictReplay && replayCanFire(i)
407*e4f69d78Ssfencevma    io.replay(i).bits := DontCare
408*e4f69d78Ssfencevma    io.replay(i).bits.uop := s2_replayUop
409*e4f69d78Ssfencevma    io.replay(i).bits.vaddr := vaddrModule.io.rdata(i)
410*e4f69d78Ssfencevma    io.replay(i).bits.isFirstIssue := false.B
411*e4f69d78Ssfencevma    io.replay(i).bits.isLoadReplay := true.B
412*e4f69d78Ssfencevma    io.replay(i).bits.replayCarry := s2_replayCarry
413*e4f69d78Ssfencevma    io.replay(i).bits.mshrid := s2_replayMSHRId
414*e4f69d78Ssfencevma    io.replay(i).bits.forward_tlDchannel := s2_replayCauses(LoadReplayCauses.dcacheMiss)
415*e4f69d78Ssfencevma    io.replay(i).bits.sleepIndex := s2_oldestSel(i).bits
416*e4f69d78Ssfencevma
417*e4f69d78Ssfencevma    when (io.replay(i).fire) {
418*e4f69d78Ssfencevma      sleep(s2_oldestSel(i).bits) := false.B
419*e4f69d78Ssfencevma      assert(allocated(s2_oldestSel(i).bits), s"LoadQueueReplay: why replay an invalid entry ${s2_oldestSel(i).bits} ?\n")
420*e4f69d78Ssfencevma    }
421*e4f69d78Ssfencevma  }
422*e4f69d78Ssfencevma
423*e4f69d78Ssfencevma  // update cold counter
424*e4f69d78Ssfencevma  val lastReplay = RegNext(VecInit(io.replay.map(_.fire)))
425*e4f69d78Ssfencevma  for (i <- 0 until LoadPipelineWidth) {
426*e4f69d78Ssfencevma    when (lastReplay(i) && io.replay(i).fire) {
427*e4f69d78Ssfencevma      coldCounter(i) := coldCounter(i) + 1.U
428*e4f69d78Ssfencevma    } .elsewhen (coldDownNow(i)) {
429*e4f69d78Ssfencevma      coldCounter(i) := coldCounter(i) + 1.U
430*e4f69d78Ssfencevma    } .otherwise {
431*e4f69d78Ssfencevma      coldCounter(i) := 0.U
432*e4f69d78Ssfencevma    }
433*e4f69d78Ssfencevma  }
434*e4f69d78Ssfencevma
435*e4f69d78Ssfencevma  when(io.refill.valid) {
436*e4f69d78Ssfencevma    XSDebug("miss resp: paddr:0x%x data %x\n", io.refill.bits.addr, io.refill.bits.data)
437*e4f69d78Ssfencevma  }
438*e4f69d78Ssfencevma
439*e4f69d78Ssfencevma  //  LoadQueueReplay deallocate
440*e4f69d78Ssfencevma  val freeMaskVec = Wire(Vec(LoadQueueReplaySize, Bool()))
441*e4f69d78Ssfencevma
442*e4f69d78Ssfencevma  // init
443*e4f69d78Ssfencevma  freeMaskVec.map(e => e := false.B)
444*e4f69d78Ssfencevma
445*e4f69d78Ssfencevma  // Allocate logic
446*e4f69d78Ssfencevma  val enqValidVec = Wire(Vec(LoadPipelineWidth, Bool()))
447*e4f69d78Ssfencevma  val enqIndexVec = Wire(Vec(LoadPipelineWidth, UInt()))
448*e4f69d78Ssfencevma  val enqOffset = Wire(Vec(LoadPipelineWidth, UInt()))
449*e4f69d78Ssfencevma
450*e4f69d78Ssfencevma  val newEnqueue = (0 until LoadPipelineWidth).map(i => {
451*e4f69d78Ssfencevma    needEnqueue(i) && !io.enq(i).bits.isLoadReplay
452*e4f69d78Ssfencevma  })
453*e4f69d78Ssfencevma
454*e4f69d78Ssfencevma  for ((enq, w) <- io.enq.zipWithIndex) {
455*e4f69d78Ssfencevma    vaddrModule.io.wen(w) := false.B
456*e4f69d78Ssfencevma    freeList.io.doAllocate(w) := false.B
457*e4f69d78Ssfencevma
458*e4f69d78Ssfencevma    enqOffset(w) := PopCount(newEnqueue.take(w))
459*e4f69d78Ssfencevma    freeList.io.allocateReq(w) := newEnqueue(w)
460*e4f69d78Ssfencevma
461*e4f69d78Ssfencevma    //  Allocated ready
462*e4f69d78Ssfencevma    enqValidVec(w) := freeList.io.canAllocate(enqOffset(w))
463*e4f69d78Ssfencevma    enqIndexVec(w) := Mux(enq.bits.isLoadReplay, enq.bits.sleepIndex, freeList.io.allocateSlot(enqOffset(w)))
464*e4f69d78Ssfencevma    selectIndexOH(w) := UIntToOH(enqIndexVec(w))
465*e4f69d78Ssfencevma    enq.ready := Mux(enq.bits.isLoadReplay, true.B, enqValidVec(w))
466*e4f69d78Ssfencevma
467*e4f69d78Ssfencevma    val enqIndex = enqIndexVec(w)
468*e4f69d78Ssfencevma    when (needEnqueue(w) && enq.ready) {
469*e4f69d78Ssfencevma
470*e4f69d78Ssfencevma      val debug_robIdx = enq.bits.uop.robIdx.asUInt
471*e4f69d78Ssfencevma      XSError(allocated(enqIndex) && !enq.bits.isLoadReplay, p"LoadQueueReplay: can not accept more load, check: ldu $w, robIdx $debug_robIdx!")
472*e4f69d78Ssfencevma      XSError(hasExceptions(w), p"LoadQueueReplay: The instruction has exception, it can not be replay, check: ldu $w, robIdx $debug_robIdx!")
473*e4f69d78Ssfencevma
474*e4f69d78Ssfencevma      freeList.io.doAllocate(w) := !enq.bits.isLoadReplay
475*e4f69d78Ssfencevma
476*e4f69d78Ssfencevma      //  Allocate new entry
477*e4f69d78Ssfencevma      allocated(enqIndex) := true.B
478*e4f69d78Ssfencevma      sleep(enqIndex) := true.B
479*e4f69d78Ssfencevma      uop(enqIndex) := enq.bits.uop
480*e4f69d78Ssfencevma
481*e4f69d78Ssfencevma      vaddrModule.io.wen(w) := true.B
482*e4f69d78Ssfencevma      vaddrModule.io.waddr(w) := enqIndex
483*e4f69d78Ssfencevma      vaddrModule.io.wdata(w) := enq.bits.vaddr
484*e4f69d78Ssfencevma
485*e4f69d78Ssfencevma      /**
486*e4f69d78Ssfencevma       * used for feedback and replay
487*e4f69d78Ssfencevma       */
488*e4f69d78Ssfencevma      // set flags
489*e4f69d78Ssfencevma      val replayInfo = enq.bits.replayInfo
490*e4f69d78Ssfencevma      val dataInLastBeat = replayInfo.dataInLastBeat
491*e4f69d78Ssfencevma      cause(enqIndex) := replayInfo.cause.asUInt
492*e4f69d78Ssfencevma
493*e4f69d78Ssfencevma      // update credit
494*e4f69d78Ssfencevma      val blockCyclesTlbPtr = blockPtrTlb(enqIndex)
495*e4f69d78Ssfencevma      val blockCyclesCachePtr = blockPtrCache(enqIndex)
496*e4f69d78Ssfencevma      val blockCyclesOtherPtr = blockPtrOthers(enqIndex)
497*e4f69d78Ssfencevma      creditUpdate(enqIndex) := Mux(replayInfo.cause(LoadReplayCauses.tlbMiss), blockCyclesTlb(blockCyclesTlbPtr),
498*e4f69d78Ssfencevma                                Mux(replayInfo.cause(LoadReplayCauses.dcacheMiss), blockCyclesCache(blockCyclesCachePtr) + dataInLastBeat, blockCyclesOthers(blockCyclesOtherPtr)))
499*e4f69d78Ssfencevma
500*e4f69d78Ssfencevma      // init
501*e4f69d78Ssfencevma      blockByTlbMiss(enqIndex) := false.B
502*e4f69d78Ssfencevma      blockByWaitStore(enqIndex) := false.B
503*e4f69d78Ssfencevma      blockByForwardFail(enqIndex) := false.B
504*e4f69d78Ssfencevma      blockByCacheMiss(enqIndex) := false.B
505*e4f69d78Ssfencevma      blockByOthers(enqIndex) := false.B
506*e4f69d78Ssfencevma
507*e4f69d78Ssfencevma      // update block pointer
508*e4f69d78Ssfencevma      when (replayInfo.cause(LoadReplayCauses.dcacheReplay) || replayInfo.cause(LoadReplayCauses.rejectEnq)) {
509*e4f69d78Ssfencevma        // normal case: dcache replay or rar/raw reject
510*e4f69d78Ssfencevma        blockByOthers(enqIndex) := true.B
511*e4f69d78Ssfencevma        blockPtrOthers(enqIndex) :=  Mux(blockPtrOthers(enqIndex) === 3.U(2.W), blockPtrOthers(enqIndex), blockPtrOthers(enqIndex) + 1.U(2.W))
512*e4f69d78Ssfencevma      } .elsewhen (replayInfo.cause(LoadReplayCauses.bankConflict) || replayInfo.cause(LoadReplayCauses.schedError)) {
513*e4f69d78Ssfencevma        // normal case: bank conflict or schedule error
514*e4f69d78Ssfencevma        // can replay next cycle
515*e4f69d78Ssfencevma        creditUpdate(enqIndex) := 0.U
516*e4f69d78Ssfencevma        blockByOthers(enqIndex) := false.B
517*e4f69d78Ssfencevma      }
518*e4f69d78Ssfencevma
519*e4f69d78Ssfencevma      // special case: tlb miss
520*e4f69d78Ssfencevma      when (replayInfo.cause(LoadReplayCauses.tlbMiss)) {
521*e4f69d78Ssfencevma        blockByTlbMiss(enqIndex) := true.B
522*e4f69d78Ssfencevma        blockPtrTlb(enqIndex) := Mux(blockPtrTlb(enqIndex) === 3.U(2.W), blockPtrTlb(enqIndex), blockPtrTlb(enqIndex) + 1.U(2.W))
523*e4f69d78Ssfencevma      }
524*e4f69d78Ssfencevma
525*e4f69d78Ssfencevma      // special case: dcache miss
526*e4f69d78Ssfencevma      when (replayInfo.cause(LoadReplayCauses.dcacheMiss)) {
527*e4f69d78Ssfencevma        blockByCacheMiss(enqIndex) := !replayInfo.canForwardFullData && //  dcache miss
528*e4f69d78Ssfencevma                                  !(io.refill.valid && io.refill.bits.id === replayInfo.missMSHRId) && // no refill in this cycle
529*e4f69d78Ssfencevma                                  creditUpdate(enqIndex) =/= 0.U //  credit is not zero
530*e4f69d78Ssfencevma        blockPtrCache(enqIndex) := Mux(blockPtrCache(enqIndex) === 3.U(2.W), blockPtrCache(enqIndex), blockPtrCache(enqIndex) + 1.U(2.W))
531*e4f69d78Ssfencevma      }
532*e4f69d78Ssfencevma
533*e4f69d78Ssfencevma      // special case: st-ld violation
534*e4f69d78Ssfencevma      when (replayInfo.cause(LoadReplayCauses.waitStore)) {
535*e4f69d78Ssfencevma        blockByWaitStore(enqIndex) := true.B
536*e4f69d78Ssfencevma        blockSqIdx(enqIndex) := replayInfo.addrInvalidSqIdx
537*e4f69d78Ssfencevma        blockPtrOthers(enqIndex) :=  Mux(blockPtrOthers(enqIndex) === 3.U(2.W), blockPtrOthers(enqIndex), blockPtrOthers(enqIndex) + 1.U(2.W))
538*e4f69d78Ssfencevma      }
539*e4f69d78Ssfencevma
540*e4f69d78Ssfencevma      // special case: data forward fail
541*e4f69d78Ssfencevma      when (replayInfo.cause(LoadReplayCauses.forwardFail)) {
542*e4f69d78Ssfencevma        blockByForwardFail(enqIndex) := true.B
543*e4f69d78Ssfencevma        blockSqIdx(enqIndex) := replayInfo.dataInvalidSqIdx
544*e4f69d78Ssfencevma        blockPtrOthers(enqIndex) :=  Mux(blockPtrOthers(enqIndex) === 3.U(2.W), blockPtrOthers(enqIndex), blockPtrOthers(enqIndex) + 1.U(2.W))
545*e4f69d78Ssfencevma      }
546*e4f69d78Ssfencevma
547*e4f69d78Ssfencevma      //
548*e4f69d78Ssfencevma      replayCarryReg(enqIndex) := replayInfo.replayCarry
549*e4f69d78Ssfencevma      missMSHRId(enqIndex) := replayInfo.missMSHRId
550*e4f69d78Ssfencevma    }
551*e4f69d78Ssfencevma
552*e4f69d78Ssfencevma    //
553*e4f69d78Ssfencevma    val sleepIndex = enq.bits.sleepIndex
554*e4f69d78Ssfencevma    when (enq.valid && enq.bits.isLoadReplay) {
555*e4f69d78Ssfencevma      when (!needReplay(w) || hasExceptions(w)) {
556*e4f69d78Ssfencevma        allocated(sleepIndex) := false.B
557*e4f69d78Ssfencevma        freeMaskVec(sleepIndex) := true.B
558*e4f69d78Ssfencevma      } .otherwise {
559*e4f69d78Ssfencevma        sleep(sleepIndex) := true.B
560*e4f69d78Ssfencevma      }
561*e4f69d78Ssfencevma    }
562*e4f69d78Ssfencevma  }
563*e4f69d78Ssfencevma
564*e4f69d78Ssfencevma  // misprediction recovery / exception redirect
565*e4f69d78Ssfencevma  for (i <- 0 until LoadQueueReplaySize) {
566*e4f69d78Ssfencevma    needCancel(i) := uop(i).robIdx.needFlush(io.redirect) && allocated(i)
567*e4f69d78Ssfencevma    when (needCancel(i)) {
568*e4f69d78Ssfencevma      allocated(i) := false.B
569*e4f69d78Ssfencevma      freeMaskVec(i) := true.B
570*e4f69d78Ssfencevma    }
571*e4f69d78Ssfencevma  }
572*e4f69d78Ssfencevma
573*e4f69d78Ssfencevma  freeList.io.free := freeMaskVec.asUInt
574*e4f69d78Ssfencevma
575*e4f69d78Ssfencevma  io.lqFull := lqFull
576*e4f69d78Ssfencevma
577*e4f69d78Ssfencevma  //  perf cnt
578*e4f69d78Ssfencevma  val enqCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay))
579*e4f69d78Ssfencevma  val deqCount = PopCount(io.replay.map(_.fire))
580*e4f69d78Ssfencevma  val deqBlockCount = PopCount(io.replay.map(r => r.valid && !r.ready))
581*e4f69d78Ssfencevma  val replayTlbMissCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.tlbMiss)))
582*e4f69d78Ssfencevma  val replayWaitStoreCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.waitStore)))
583*e4f69d78Ssfencevma  val replaySchedErrorCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.schedError)))
584*e4f69d78Ssfencevma  val replayRejectEnqCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.rejectEnq)))
585*e4f69d78Ssfencevma  val replayBankConflictCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.bankConflict)))
586*e4f69d78Ssfencevma  val replayDCacheReplayCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.dcacheReplay)))
587*e4f69d78Ssfencevma  val replayForwardFailCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.forwardFail)))
588*e4f69d78Ssfencevma  val replayDCacheMissCount = PopCount(io.enq.map(enq => enq.fire && !enq.bits.isLoadReplay && enq.bits.replayInfo.cause(LoadReplayCauses.dcacheMiss)))
589*e4f69d78Ssfencevma  XSPerfAccumulate("enq", enqCount)
590*e4f69d78Ssfencevma  XSPerfAccumulate("deq", deqCount)
591*e4f69d78Ssfencevma  XSPerfAccumulate("deq_block", deqBlockCount)
592*e4f69d78Ssfencevma  XSPerfAccumulate("replay_full", io.lqFull)
593*e4f69d78Ssfencevma  XSPerfAccumulate("replay_reject_enq", replayRejectEnqCount)
594*e4f69d78Ssfencevma  XSPerfAccumulate("replay_sched_error", replaySchedErrorCount)
595*e4f69d78Ssfencevma  XSPerfAccumulate("replay_wait_store", replayWaitStoreCount)
596*e4f69d78Ssfencevma  XSPerfAccumulate("replay_tlb_miss", replayTlbMissCount)
597*e4f69d78Ssfencevma  XSPerfAccumulate("replay_bank_conflict", replayBankConflictCount)
598*e4f69d78Ssfencevma  XSPerfAccumulate("replay_dcache_replay", replayDCacheReplayCount)
599*e4f69d78Ssfencevma  XSPerfAccumulate("replay_forward_fail", replayForwardFailCount)
600*e4f69d78Ssfencevma  XSPerfAccumulate("replay_dcache_miss", replayDCacheMissCount)
601*e4f69d78Ssfencevma
602*e4f69d78Ssfencevma  val perfEvents: Seq[(String, UInt)] = Seq(
603*e4f69d78Ssfencevma    ("enq", enqCount),
604*e4f69d78Ssfencevma    ("deq", deqCount),
605*e4f69d78Ssfencevma    ("deq_block", deqBlockCount),
606*e4f69d78Ssfencevma    ("replay_full", io.lqFull),
607*e4f69d78Ssfencevma    ("replay_reject_enq", replayRejectEnqCount),
608*e4f69d78Ssfencevma    ("replay_advance_sched", replaySchedErrorCount),
609*e4f69d78Ssfencevma    ("replay_wait_store", replayWaitStoreCount),
610*e4f69d78Ssfencevma    ("replay_tlb_miss", replayTlbMissCount),
611*e4f69d78Ssfencevma    ("replay_bank_conflict", replayBankConflictCount),
612*e4f69d78Ssfencevma    ("replay_dcache_replay", replayDCacheReplayCount),
613*e4f69d78Ssfencevma    ("replay_forward_fail", replayForwardFailCount),
614*e4f69d78Ssfencevma    ("replay_dcache_miss", replayDCacheMissCount),
615*e4f69d78Ssfencevma  )
616*e4f69d78Ssfencevma  generatePerfEvent()
617*e4f69d78Ssfencevma  // end
618*e4f69d78Ssfencevma}
619