1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import utility._ 24import xiangshan._ 25import xiangshan.backend.Bundles.{DynInst, MemExuOutput} 26import xiangshan.backend._ 27import xiangshan.backend.rob.RobLsqIO 28import xiangshan.backend.fu.FuType 29import xiangshan.mem.Bundles._ 30import xiangshan.cache._ 31import xiangshan.cache.{DCacheWordIO, DCacheLineIO, MemoryOpConstants} 32import xiangshan.cache.{CMOReq, CMOResp} 33import xiangshan.cache.mmu.{TlbRequestIO, TlbHintIO} 34 35class ExceptionAddrIO(implicit p: Parameters) extends XSBundle { 36 val isStore = Input(Bool()) 37 val vaddr = Output(UInt(XLEN.W)) 38 val vaNeedExt = Output(Bool()) 39 val isHyper = Output(Bool()) 40 val vstart = Output(UInt((log2Up(VLEN) + 1).W)) 41 val vl = Output(UInt((log2Up(VLEN) + 1).W)) 42 val gpaddr = Output(UInt(XLEN.W)) 43 val isForVSnonLeafPTE = Output(Bool()) 44} 45 46class FwdEntry extends Bundle { 47 val validFast = Bool() // validFast is generated the same cycle with query 48 val valid = Bool() // valid is generated 1 cycle after query request 49 val data = UInt(8.W) // data is generated 1 cycle after query request 50} 51 52// inflight miss block reqs 53class InflightBlockInfo(implicit p: Parameters) extends XSBundle { 54 val block_addr = UInt(PAddrBits.W) 55 val valid = Bool() 56} 57 58class LsqEnqIO(implicit p: Parameters) extends MemBlockBundle { 59 val canAccept = Output(Bool()) 60 val needAlloc = Vec(LSQEnqWidth, Input(UInt(2.W))) 61 val req = Vec(LSQEnqWidth, Flipped(ValidIO(new DynInst))) 62 val iqAccept = Input(Vec(LSQEnqWidth, Bool())) 63 val resp = Vec(LSQEnqWidth, Output(new LSIdx)) 64} 65 66// Load / Store Queue Wrapper for XiangShan Out of Order LSU 67class LsqWrapper(implicit p: Parameters) extends XSModule with HasDCacheParameters with HasPerfEvents { 68 val io = IO(new Bundle() { 69 val hartId = Input(UInt(hartIdLen.W)) 70 val brqRedirect = Flipped(ValidIO(new Redirect)) 71 val stvecFeedback = Vec(VecStorePipelineWidth, Flipped(ValidIO(new FeedbackToLsqIO))) 72 val ldvecFeedback = Vec(VecLoadPipelineWidth, Flipped(ValidIO(new FeedbackToLsqIO))) 73 val enq = new LsqEnqIO 74 val ldu = new Bundle() { 75 val stld_nuke_query = Vec(LoadPipelineWidth, Flipped(new LoadNukeQueryIO)) // from load_s2 76 val ldld_nuke_query = Vec(LoadPipelineWidth, Flipped(new LoadNukeQueryIO)) // from load_s2 77 val ldin = Vec(LoadPipelineWidth, Flipped(Decoupled(new LqWriteBundle))) // from load_s3 78 } 79 val sta = new Bundle() { 80 val storeMaskIn = Vec(StorePipelineWidth, Flipped(Valid(new StoreMaskBundle))) // from store_s0, store mask, send to sq from rs 81 val storeAddrIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle))) // from store_s1 82 val storeAddrInRe = Vec(StorePipelineWidth, Input(new LsPipelineBundle())) // from store_s2 83 } 84 val std = new Bundle() { 85 val storeDataIn = Vec(StorePipelineWidth, Flipped(Valid(new MemExuOutput(isVector = true)))) // from store_s0, store data, send to sq from rs 86 } 87 val ldout = Vec(LoadPipelineWidth, DecoupledIO(new MemExuOutput)) 88 val ld_raw_data = Vec(LoadPipelineWidth, Output(new LoadDataFromLQBundle)) 89 val ncOut = Vec(LoadPipelineWidth, DecoupledIO(new LsPipelineBundle)) 90 val replay = Vec(LoadPipelineWidth, Decoupled(new LsPipelineBundle)) 91 val sbuffer = Vec(EnsbufferWidth, Decoupled(new DCacheWordReqWithVaddrAndPfFlag)) 92 val sbufferVecDifftestInfo = Vec(EnsbufferWidth, Decoupled(new DynInst)) // The vector store difftest needs is 93 val forward = Vec(LoadPipelineWidth, Flipped(new PipeLoadForwardQueryIO)) 94 val rob = Flipped(new RobLsqIO) 95 val nuke_rollback = Vec(StorePipelineWidth, Output(Valid(new Redirect))) 96 val nack_rollback = Vec(1, Output(Valid(new Redirect))) // uncahce 97 val release = Flipped(Valid(new Release)) 98 // val refill = Flipped(Valid(new Refill)) 99 val tl_d_channel = Input(new DcacheToLduForwardIO) 100 val maControl = Flipped(new StoreMaBufToSqControlIO) 101 val uncacheOutstanding = Input(Bool()) 102 val uncache = new UncacheWordIO 103 val mmioStout = DecoupledIO(new MemExuOutput) // writeback uncached store 104 val cboZeroStout = DecoupledIO(new MemExuOutput) 105 // TODO: implement vector store 106 val vecmmioStout = DecoupledIO(new MemExuOutput(isVector = true)) // vec writeback uncached store 107 val sqEmpty = Output(Bool()) 108 val lq_rep_full = Output(Bool()) 109 val sqFull = Output(Bool()) 110 val lqFull = Output(Bool()) 111 val sqCancelCnt = Output(UInt(log2Up(StoreQueueSize+1).W)) 112 val lqCancelCnt = Output(UInt(log2Up(VirtualLoadQueueSize+1).W)) 113 val lqDeq = Output(UInt(log2Up(CommitWidth + 1).W)) 114 val sqDeq = Output(UInt(log2Ceil(EnsbufferWidth + 1).W)) 115 val lqCanAccept = Output(Bool()) 116 val sqCanAccept = Output(Bool()) 117 val lqDeqPtr = Output(new LqPtr) 118 val sqDeqPtr = Output(new SqPtr) 119 val exceptionAddr = new ExceptionAddrIO 120 val loadMisalignFull = Input(Bool()) 121 val issuePtrExt = Output(new SqPtr) 122 val l2_hint = Input(Valid(new L2ToL1Hint())) 123 val tlb_hint = Flipped(new TlbHintIO) 124 val cmoOpReq = DecoupledIO(new CMOReq) 125 val cmoOpResp = Flipped(DecoupledIO(new CMOResp)) 126 val flushSbuffer = new SbufferFlushBundle 127 val force_write = Output(Bool()) 128 val lqEmpty = Output(Bool()) 129 130 // top-down 131 val debugTopDown = new LoadQueueTopDownIO 132 val noUopsIssued = Input(Bool()) 133 }) 134 135 val loadQueue = Module(new LoadQueue) 136 val storeQueue = Module(new StoreQueue) 137 138 storeQueue.io.hartId := io.hartId 139 storeQueue.io.uncacheOutstanding := io.uncacheOutstanding 140 141 if (backendParams.debugEn){ dontTouch(loadQueue.io.tlbReplayDelayCycleCtrl) } 142 143 // Todo: imm 144 val tlbReplayDelayCycleCtrl = WireInit(VecInit(Seq(14.U(ReSelectLen.W), 0.U(ReSelectLen.W), 125.U(ReSelectLen.W), 0.U(ReSelectLen.W)))) 145 loadQueue.io.tlbReplayDelayCycleCtrl := tlbReplayDelayCycleCtrl 146 147 // io.enq logic 148 // LSQ: send out canAccept when both load queue and store queue are ready 149 // Dispatch: send instructions to LSQ only when they are ready 150 io.enq.canAccept := loadQueue.io.enq.canAccept && storeQueue.io.enq.canAccept 151 io.lqCanAccept := loadQueue.io.enq.canAccept 152 io.sqCanAccept := storeQueue.io.enq.canAccept 153 loadQueue.io.enq.sqCanAccept := storeQueue.io.enq.canAccept 154 storeQueue.io.enq.lqCanAccept := loadQueue.io.enq.canAccept 155 io.lqDeqPtr := loadQueue.io.lqDeqPtr 156 io.sqDeqPtr := storeQueue.io.sqDeqPtr 157 for (i <- io.enq.req.indices) { 158 loadQueue.io.enq.needAlloc(i) := io.enq.needAlloc(i)(0) 159 loadQueue.io.enq.req(i).valid := io.enq.needAlloc(i)(0) && io.enq.req(i).valid 160 loadQueue.io.enq.req(i).bits := io.enq.req(i).bits 161 loadQueue.io.enq.req(i).bits.sqIdx := storeQueue.io.enq.resp(i) 162 163 storeQueue.io.enq.needAlloc(i) := io.enq.needAlloc(i)(1) 164 storeQueue.io.enq.req(i).valid := io.enq.needAlloc(i)(1) && io.enq.req(i).valid 165 storeQueue.io.enq.req(i).bits := io.enq.req(i).bits 166 storeQueue.io.enq.req(i).bits.lqIdx := loadQueue.io.enq.resp(i) 167 168 io.enq.resp(i).lqIdx := loadQueue.io.enq.resp(i) 169 io.enq.resp(i).sqIdx := storeQueue.io.enq.resp(i) 170 } 171 172 // store queue wiring 173 storeQueue.io.brqRedirect <> io.brqRedirect 174 storeQueue.io.vecFeedback <> io.stvecFeedback 175 storeQueue.io.storeAddrIn <> io.sta.storeAddrIn // from store_s1 176 storeQueue.io.storeAddrInRe <> io.sta.storeAddrInRe // from store_s2 177 storeQueue.io.storeDataIn <> io.std.storeDataIn // from store_s0 178 storeQueue.io.storeMaskIn <> io.sta.storeMaskIn // from store_s0 179 storeQueue.io.sbuffer <> io.sbuffer 180 storeQueue.io.sbufferVecDifftestInfo <> io.sbufferVecDifftestInfo 181 storeQueue.io.mmioStout <> io.mmioStout 182 storeQueue.io.cboZeroStout <> io.cboZeroStout 183 storeQueue.io.vecmmioStout <> io.vecmmioStout 184 storeQueue.io.rob <> io.rob 185 storeQueue.io.exceptionAddr.isStore := DontCare 186 storeQueue.io.sqCancelCnt <> io.sqCancelCnt 187 storeQueue.io.sqDeq <> io.sqDeq 188 storeQueue.io.sqEmpty <> io.sqEmpty 189 storeQueue.io.sqFull <> io.sqFull 190 storeQueue.io.forward <> io.forward // overlap forwardMask & forwardData, DO NOT CHANGE SEQUENCE 191 storeQueue.io.force_write <> io.force_write 192 storeQueue.io.cmoOpReq <> io.cmoOpReq 193 storeQueue.io.cmoOpResp <> io.cmoOpResp 194 storeQueue.io.flushSbuffer <> io.flushSbuffer 195 storeQueue.io.maControl <> io.maControl 196 197 /* <------- DANGEROUS: Don't change sequence here ! -------> */ 198 199 // load queue wiring 200 loadQueue.io.redirect <> io.brqRedirect 201 loadQueue.io.vecFeedback <> io.ldvecFeedback 202 loadQueue.io.ldu <> io.ldu 203 loadQueue.io.ldout <> io.ldout 204 loadQueue.io.ld_raw_data <> io.ld_raw_data 205 loadQueue.io.ncOut <> io.ncOut 206 loadQueue.io.rob <> io.rob 207 loadQueue.io.nuke_rollback <> io.nuke_rollback 208 loadQueue.io.nack_rollback <> io.nack_rollback 209 loadQueue.io.replay <> io.replay 210 // loadQueue.io.refill <> io.refill 211 loadQueue.io.tl_d_channel <> io.tl_d_channel 212 loadQueue.io.release <> io.release 213 loadQueue.io.exceptionAddr.isStore := DontCare 214 loadQueue.io.loadMisalignFull := io.loadMisalignFull 215 loadQueue.io.lqCancelCnt <> io.lqCancelCnt 216 loadQueue.io.sq.stAddrReadySqPtr <> storeQueue.io.stAddrReadySqPtr 217 loadQueue.io.sq.stAddrReadyVec <> storeQueue.io.stAddrReadyVec 218 loadQueue.io.sq.stDataReadySqPtr <> storeQueue.io.stDataReadySqPtr 219 loadQueue.io.sq.stDataReadyVec <> storeQueue.io.stDataReadyVec 220 loadQueue.io.sq.stIssuePtr <> storeQueue.io.stIssuePtr 221 loadQueue.io.sq.sqEmpty <> storeQueue.io.sqEmpty 222 loadQueue.io.sta.storeAddrIn <> io.sta.storeAddrIn // store_s1 223 loadQueue.io.std.storeDataIn <> io.std.storeDataIn // store_s0 224 loadQueue.io.lqFull <> io.lqFull 225 loadQueue.io.lq_rep_full <> io.lq_rep_full 226 loadQueue.io.lqDeq <> io.lqDeq 227 loadQueue.io.l2_hint <> io.l2_hint 228 loadQueue.io.tlb_hint <> io.tlb_hint 229 loadQueue.io.lqEmpty <> io.lqEmpty 230 231 // rob commits for lsq is delayed for two cycles, which causes the delayed update for deqPtr in lq/sq 232 // s0: commit 233 // s1: exception find 234 // s2: exception triggered 235 // s3: ptr updated & new address 236 // address will be used at the next cycle after exception is triggered 237 io.exceptionAddr.vaddr := Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.vaddr, loadQueue.io.exceptionAddr.vaddr) 238 io.exceptionAddr.vaNeedExt := Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.vaNeedExt, loadQueue.io.exceptionAddr.vaNeedExt) 239 io.exceptionAddr.isHyper := Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.isHyper, loadQueue.io.exceptionAddr.isHyper) 240 io.exceptionAddr.vstart := Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.vstart, loadQueue.io.exceptionAddr.vstart) 241 io.exceptionAddr.vl := Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.vl, loadQueue.io.exceptionAddr.vl) 242 io.exceptionAddr.gpaddr := Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.gpaddr, loadQueue.io.exceptionAddr.gpaddr) 243 io.exceptionAddr.isForVSnonLeafPTE:= Mux(RegNext(io.exceptionAddr.isStore), storeQueue.io.exceptionAddr.isForVSnonLeafPTE, loadQueue.io.exceptionAddr.isForVSnonLeafPTE) 244 io.issuePtrExt := storeQueue.io.stAddrReadySqPtr 245 246 // naive uncache arbiter 247 val s_idle :: s_load :: s_store :: Nil = Enum(3) 248 val pendingstate = RegInit(s_idle) 249 250 switch(pendingstate){ 251 is(s_idle){ 252 when(io.uncache.req.fire){ 253 pendingstate := 254 Mux(io.uncacheOutstanding && io.uncache.req.bits.nc, s_idle, 255 Mux(loadQueue.io.uncache.req.valid, s_load, 256 s_store)) 257 } 258 } 259 is(s_load){ 260 when(io.uncache.resp.fire){ 261 pendingstate := s_idle 262 } 263 } 264 is(s_store){ 265 when(io.uncache.resp.fire){ 266 pendingstate := s_idle 267 } 268 } 269 } 270 271 loadQueue.io.uncache := DontCare 272 storeQueue.io.uncache := DontCare 273 loadQueue.io.uncache.req.ready := false.B 274 storeQueue.io.uncache.req.ready := false.B 275 loadQueue.io.uncache.resp.valid := false.B 276 loadQueue.io.uncache.idResp.valid := false.B 277 storeQueue.io.uncache.resp.valid := false.B 278 storeQueue.io.uncache.idResp.valid := false.B 279 when(pendingstate === s_idle){ 280 when(loadQueue.io.uncache.req.valid){ 281 io.uncache.req <> loadQueue.io.uncache.req 282 }.otherwise{ 283 io.uncache.req <> storeQueue.io.uncache.req 284 } 285 }.otherwise{ 286 io.uncache.req.valid := false.B 287 io.uncache.req.bits := DontCare 288 } 289 when (io.uncache.resp.bits.is2lq) { 290 io.uncache.resp <> loadQueue.io.uncache.resp 291 } .otherwise { 292 io.uncache.resp <> storeQueue.io.uncache.resp 293 } 294 when(io.uncache.idResp.bits.is2lq) { 295 loadQueue.io.uncache.idResp <> io.uncache.idResp 296 }.otherwise { 297 storeQueue.io.uncache.idResp <> io.uncache.idResp 298 } 299 300 loadQueue.io.debugTopDown <> io.debugTopDown 301 loadQueue.io.noUopsIssed := io.noUopsIssued 302 303 assert(!(loadQueue.io.uncache.resp.valid && storeQueue.io.uncache.resp.valid)) 304 assert(!(loadQueue.io.uncache.idResp.valid && storeQueue.io.uncache.idResp.valid)) 305 when (!io.uncacheOutstanding) { 306 assert(!((loadQueue.io.uncache.resp.valid || storeQueue.io.uncache.resp.valid) && pendingstate === s_idle)) 307 } 308 309 310 val perfEvents = Seq(loadQueue, storeQueue).flatMap(_.getPerfEvents) 311 generatePerfEvent() 312} 313 314class LsqEnqCtrl(implicit p: Parameters) extends XSModule 315 with HasVLSUParameters { 316 val io = IO(new Bundle { 317 val redirect = Flipped(ValidIO(new Redirect)) 318 // to dispatch 319 val enq = new LsqEnqIO 320 // from `memBlock.io.lqDeq 321 val lcommit = Input(UInt(log2Up(CommitWidth + 1).W)) 322 // from `memBlock.io.sqDeq` 323 val scommit = Input(UInt(log2Ceil(EnsbufferWidth + 1).W)) 324 // from/tp lsq 325 val lqCancelCnt = Input(UInt(log2Up(VirtualLoadQueueSize + 1).W)) 326 val sqCancelCnt = Input(UInt(log2Up(StoreQueueSize + 1).W)) 327 val lqFreeCount = Output(UInt(log2Up(VirtualLoadQueueSize + 1).W)) 328 val sqFreeCount = Output(UInt(log2Up(StoreQueueSize + 1).W)) 329 val enqLsq = Flipped(new LsqEnqIO) 330 }) 331 332 val lqPtr = RegInit(0.U.asTypeOf(new LqPtr)) 333 val sqPtr = RegInit(0.U.asTypeOf(new SqPtr)) 334 val lqCounter = RegInit(VirtualLoadQueueSize.U(log2Up(VirtualLoadQueueSize + 1).W)) 335 val sqCounter = RegInit(StoreQueueSize.U(log2Up(StoreQueueSize + 1).W)) 336 val canAccept = RegInit(false.B) 337 338 val blockVec = io.enq.iqAccept.map(!_) :+ true.B 339 val numLsElem = io.enq.req.map(_.bits.numLsElem) 340 val needEnqLoadQueue = VecInit(io.enq.req.map(x => x.valid && (FuType.isLoad(x.bits.fuType) || FuType.isVNonsegLoad(x.bits.fuType)))) 341 val needEnqStoreQueue = VecInit(io.enq.req.map(x => x.valid && (FuType.isStore(x.bits.fuType) || FuType.isVNonsegStore(x.bits.fuType)))) 342 val loadQueueElem = needEnqLoadQueue.zip(numLsElem).map(x => Mux(x._1, x._2, 0.U)) 343 val storeQueueElem = needEnqStoreQueue.zip(numLsElem).map(x => Mux(x._1, x._2, 0.U)) 344 val loadFlowPopCount = 0.U +: loadQueueElem.zipWithIndex.map{ case (l, i) => 345 loadQueueElem.take(i + 1).reduce(_ +& _).asTypeOf(UInt(elemIdxBits.W)) 346 } 347 val storeFlowPopCount = 0.U +: storeQueueElem.zipWithIndex.map { case (s, i) => 348 storeQueueElem.take(i + 1).reduce(_ +& _).asTypeOf(UInt(elemIdxBits.W)) 349 } 350 val lqAllocNumber = PriorityMux(blockVec.zip(loadFlowPopCount)) 351 val sqAllocNumber = PriorityMux(blockVec.zip(storeFlowPopCount)) 352 353 io.lqFreeCount := lqCounter 354 io.sqFreeCount := sqCounter 355 // How to update ptr and counter: 356 // (1) by default, updated according to enq/commit 357 // (2) when redirect and dispatch queue is empty, update according to lsq 358 val t1_redirect = RegNext(io.redirect.valid) 359 val t2_redirect = RegNext(t1_redirect) 360 val t2_update = t2_redirect && !VecInit(io.enq.needAlloc.map(_.orR)).asUInt.orR 361 val t3_update = RegNext(t2_update) 362 val t3_lqCancelCnt = GatedRegNext(io.lqCancelCnt) 363 val t3_sqCancelCnt = GatedRegNext(io.sqCancelCnt) 364 when (t3_update) { 365 lqPtr := lqPtr - t3_lqCancelCnt 366 lqCounter := lqCounter + io.lcommit + t3_lqCancelCnt 367 sqPtr := sqPtr - t3_sqCancelCnt 368 sqCounter := sqCounter + io.scommit + t3_sqCancelCnt 369 }.elsewhen (!io.redirect.valid && io.enq.canAccept) { 370 lqPtr := lqPtr + lqAllocNumber 371 lqCounter := lqCounter + io.lcommit - lqAllocNumber 372 sqPtr := sqPtr + sqAllocNumber 373 sqCounter := sqCounter + io.scommit - sqAllocNumber 374 }.otherwise { 375 lqCounter := lqCounter + io.lcommit 376 sqCounter := sqCounter + io.scommit 377 } 378 379 380 //TODO MaxAllocate and width of lqOffset/sqOffset needs to be discussed 381 val lqMaxAllocate = LSQLdEnqWidth 382 val sqMaxAllocate = LSQStEnqWidth 383 val maxAllocate = lqMaxAllocate max sqMaxAllocate 384 val ldCanAccept = lqCounter >= lqAllocNumber +& lqMaxAllocate.U 385 val sqCanAccept = sqCounter >= sqAllocNumber +& sqMaxAllocate.U 386 // It is possible that t3_update and enq are true at the same clock cycle. 387 // For example, if redirect.valid lasts more than one clock cycle, 388 // after the last redirect, new instructions may enter but previously redirect has not been resolved (updated according to the cancel count from LSQ). 389 // To solve the issue easily, we block enqueue when t3_update, which is RegNext(t2_update). 390 io.enq.canAccept := RegNext(ldCanAccept && sqCanAccept && !t2_update) 391 val lqOffset = Wire(Vec(io.enq.resp.length, UInt(lqPtr.value.getWidth.W))) 392 val sqOffset = Wire(Vec(io.enq.resp.length, UInt(sqPtr.value.getWidth.W))) 393 for ((resp, i) <- io.enq.resp.zipWithIndex) { 394 lqOffset(i) := loadFlowPopCount(i) 395 resp.lqIdx := lqPtr + lqOffset(i) 396 sqOffset(i) := storeFlowPopCount(i) 397 resp.sqIdx := sqPtr + sqOffset(i) 398 } 399 400 io.enqLsq.needAlloc := RegNext(io.enq.needAlloc) 401 io.enqLsq.iqAccept := RegNext(io.enq.iqAccept) 402 io.enqLsq.req.zip(io.enq.req).zip(io.enq.resp).foreach{ case ((toLsq, enq), resp) => 403 val do_enq = enq.valid && !io.redirect.valid && io.enq.canAccept 404 toLsq.valid := RegNext(do_enq) 405 toLsq.bits := RegEnable(enq.bits, do_enq) 406 toLsq.bits.lqIdx := RegEnable(resp.lqIdx, do_enq) 407 toLsq.bits.sqIdx := RegEnable(resp.sqIdx, do_enq) 408 } 409 410} 411