1package xiangshan.backend.issue 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3._ 5import chisel3.util._ 6import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 7import utils.{HasPerfEvents, OptionWrapper} 8import xiangshan._ 9import xiangshan.backend.Bundles._ 10import xiangshan.backend.datapath.DataConfig._ 11import xiangshan.backend.datapath.WbConfig._ 12import xiangshan.backend.fu.FuType 13import xiangshan.backend.regfile.RfWritePortWithConfig 14import xiangshan.backend.rename.BusyTable 15import xiangshan.mem.{LsqEnqCtrl, LsqEnqIO, MemWaitUpdateReq, SqPtr, LqPtr} 16import xiangshan.backend.datapath.WbConfig.V0WB 17import xiangshan.backend.regfile.VlPregParams 18 19sealed trait SchedulerType 20 21case class IntScheduler() extends SchedulerType 22case class FpScheduler() extends SchedulerType 23case class MemScheduler() extends SchedulerType 24case class VfScheduler() extends SchedulerType 25case class NoScheduler() extends SchedulerType 26 27class Scheduler(val params: SchdBlockParams)(implicit p: Parameters) extends LazyModule with HasXSParameter { 28 override def shouldBeInlined: Boolean = false 29 30 val numIntStateWrite = backendParams.numPregWb(IntData()) 31 val numFpStateWrite = backendParams.numPregWb(FpData()) 32 val numVfStateWrite = backendParams.numPregWb(VecData()) 33 val numV0StateWrite = backendParams.numPregWb(V0Data()) 34 val numVlStateWrite = backendParams.numPregWb(VlData()) 35 36 val dispatch2Iq = LazyModule(new Dispatch2Iq(params)) 37 val issueQueue = params.issueBlockParams.map(x => LazyModule(new IssueQueue(x).suggestName(x.getIQName))) 38 39 lazy val module: SchedulerImpBase = params.schdType match { 40 case IntScheduler() => new SchedulerArithImp(this)(params, p) 41 case FpScheduler() => new SchedulerArithImp(this)(params, p) 42 case MemScheduler() => new SchedulerMemImp(this)(params, p) 43 case VfScheduler() => new SchedulerArithImp(this)(params, p) 44 case _ => null 45 } 46} 47 48class SchedulerIO()(implicit params: SchdBlockParams, p: Parameters) extends XSBundle { 49 // params alias 50 private val LoadQueueSize = VirtualLoadQueueSize 51 52 val fromTop = new Bundle { 53 val hartId = Input(UInt(8.W)) 54 } 55 val fromWbFuBusyTable = new Bundle{ 56 val fuBusyTableRead = MixedVec(params.issueBlockParams.map(x => Input(x.genWbFuBusyTableReadBundle))) 57 } 58 val wbFuBusyTable = MixedVec(params.issueBlockParams.map(x => Output(x.genWbFuBusyTableWriteBundle))) 59 val intIQValidNumVec = Output(MixedVec(backendParams.genIntIQValidNumBundle)) 60 val fpIQValidNumVec = Output(MixedVec(backendParams.genFpIQValidNumBundle)) 61 62 val fromCtrlBlock = new Bundle { 63 val flush = Flipped(ValidIO(new Redirect)) 64 } 65 val fromDispatch = new Bundle { 66 val allocPregs = Vec(RenameWidth, Input(new ResetPregStateReq)) 67 val uops = Vec(params.numUopIn, Flipped(DecoupledIO(new DynInst))) 68 } 69 val intWriteBack = MixedVec(Vec(backendParams.numPregWb(IntData()), 70 new RfWritePortWithConfig(backendParams.intPregParams.dataCfg, backendParams.intPregParams.addrWidth))) 71 val fpWriteBack = MixedVec(Vec(backendParams.numPregWb(FpData()), 72 new RfWritePortWithConfig(backendParams.fpPregParams.dataCfg, backendParams.fpPregParams.addrWidth))) 73 val vfWriteBack = MixedVec(Vec(backendParams.numPregWb(VecData()), 74 new RfWritePortWithConfig(backendParams.vfPregParams.dataCfg, backendParams.vfPregParams.addrWidth))) 75 val v0WriteBack = MixedVec(Vec(backendParams.numPregWb(V0Data()), 76 new RfWritePortWithConfig(backendParams.v0PregParams.dataCfg, backendParams.v0PregParams.addrWidth))) 77 val vlWriteBack = MixedVec(Vec(backendParams.numPregWb(VlData()), 78 new RfWritePortWithConfig(backendParams.vlPregParams.dataCfg, backendParams.vlPregParams.addrWidth))) 79 val toDataPathAfterDelay: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = MixedVec(params.issueBlockParams.map(_.genIssueDecoupledBundle)) 80 81 val vlWriteBackInfo = new Bundle { 82 val vlIsZero = Input(Bool()) 83 val vlIsVlmax = Input(Bool()) 84 } 85 86 val fromSchedulers = new Bundle { 87 val wakeupVec: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = Flipped(params.genIQWakeUpInValidBundle) 88 } 89 90 val toSchedulers = new Bundle { 91 val wakeupVec: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = params.genIQWakeUpOutValidBundle 92 } 93 94 val fromDataPath = new Bundle { 95 val resp: MixedVec[MixedVec[OGRespBundle]] = MixedVec(params.issueBlockParams.map(x => Flipped(x.genOGRespBundle))) 96 val og0Cancel = Input(ExuVec()) 97 // Todo: remove this after no cancel signal from og1 98 val og1Cancel = Input(ExuVec()) 99 // replace RCIdx to Wakeup Queue 100 val replaceRCIdx = OptionWrapper(params.needWriteRegCache, Vec(params.numWriteRegCache, Input(UInt(RegCacheIdxWidth.W)))) 101 // just be compatible to old code 102 def apply(i: Int)(j: Int) = resp(i)(j) 103 } 104 105 val loadFinalIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.LdExuCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 106 val memAddrIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.LdExuCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 107 val vecLoadIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.VlduCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 108 109 val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO)) 110 111 val memIO = if (params.isMemSchd) Some(new Bundle { 112 val lsqEnqIO = Flipped(new LsqEnqIO) 113 }) else None 114 val fromMem = if (params.isMemSchd) Some(new Bundle { 115 val ldaFeedback = Flipped(Vec(params.LduCnt, new MemRSFeedbackIO)) 116 val staFeedback = Flipped(Vec(params.StaCnt, new MemRSFeedbackIO)) 117 val hyuFeedback = Flipped(Vec(params.HyuCnt, new MemRSFeedbackIO)) 118 val vstuFeedback = Flipped(Vec(params.VstuCnt, new MemRSFeedbackIO(isVector = true))) 119 val vlduFeedback = Flipped(Vec(params.VlduCnt, new MemRSFeedbackIO(isVector = true))) 120 val stIssuePtr = Input(new SqPtr()) 121 val lcommit = Input(UInt(log2Up(CommitWidth + 1).W)) 122 val scommit = Input(UInt(log2Ceil(EnsbufferWidth + 1).W)) // connected to `memBlock.io.sqDeq` instead of ROB 123 val wakeup = Vec(params.LdExuCnt, Flipped(Valid(new DynInst))) 124 val lqDeqPtr = Input(new LqPtr) 125 val sqDeqPtr = Input(new SqPtr) 126 // from lsq 127 val lqCancelCnt = Input(UInt(log2Up(LoadQueueSize + 1).W)) 128 val sqCancelCnt = Input(UInt(log2Up(StoreQueueSize + 1).W)) 129 val memWaitUpdateReq = Flipped(new MemWaitUpdateReq) 130 }) else None 131 val toMem = if (params.isMemSchd) Some(new Bundle { 132 val loadFastMatch = Output(Vec(params.LduCnt, new IssueQueueLoadBundle)) 133 }) else None 134 val fromOg2 = if(params.isVfSchd) Some(MixedVec(params.issueBlockParams.map(x => Flipped(x.genOG2RespBundle)))) else None 135} 136 137abstract class SchedulerImpBase(wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 138 extends LazyModuleImp(wrapper) 139 with HasXSParameter 140{ 141 val io = IO(new SchedulerIO()) 142 143 // alias 144 private val iqWakeUpInMap: Map[Int, ValidIO[IssueQueueIQWakeUpBundle]] = 145 io.fromSchedulers.wakeupVec.map(x => (x.bits.exuIdx, x)).toMap 146 private val schdType = params.schdType 147 148 // Modules 149 val dispatch2Iq: Dispatch2IqImp = wrapper.dispatch2Iq.module 150 val issueQueues: Seq[IssueQueueImp] = wrapper.issueQueue.map(_.module) 151 io.intIQValidNumVec := 0.U.asTypeOf(io.intIQValidNumVec) 152 io.fpIQValidNumVec := 0.U.asTypeOf(io.fpIQValidNumVec) 153 if (params.isIntSchd) { 154 dispatch2Iq.io.intIQValidNumVec.get := io.intIQValidNumVec 155 io.intIQValidNumVec := MixedVecInit(issueQueues.map(_.io.validCntDeqVec)) 156 } 157 else if (params.isFpSchd) { 158 dispatch2Iq.io.fpIQValidNumVec.get := io.fpIQValidNumVec 159 io.fpIQValidNumVec := MixedVecInit(issueQueues.map(_.io.validCntDeqVec)) 160 } 161 162 // valid count 163 dispatch2Iq.io.iqValidCnt := issueQueues.filter(_.params.StdCnt == 0).map(_.io.status.validCnt) 164 165 // BusyTable Modules 166 val intBusyTable = schdType match { 167 case IntScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numIntStateRead, wrapper.numIntStateWrite, IntPhyRegs, IntWB()))) 168 case _ => None 169 } 170 val fpBusyTable = schdType match { 171 case FpScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numFpStateRead, wrapper.numFpStateWrite, FpPhyRegs, FpWB()))) 172 case _ => None 173 } 174 val vfBusyTable = schdType match { 175 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numVfStateRead, wrapper.numVfStateWrite, VfPhyRegs, VfWB()))) 176 case _ => None 177 } 178 val v0BusyTable = schdType match { 179 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numV0StateRead, wrapper.numV0StateWrite, V0PhyRegs, V0WB()))) 180 case _ => None 181 } 182 val vlBusyTable = schdType match { 183 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numVlStateRead, wrapper.numVlStateWrite, VlPhyRegs, VlWB()))) 184 case _ => None 185 } 186 187 dispatch2Iq.io match { case dp2iq => 188 dp2iq.redirect <> io.fromCtrlBlock.flush 189 dp2iq.in <> io.fromDispatch.uops 190 dp2iq.readIntState.foreach(_ <> intBusyTable.get.io.read) 191 dp2iq.readFpState.foreach(_ <> fpBusyTable.get.io.read) 192 dp2iq.readVfState.foreach(_ <> vfBusyTable.get.io.read) 193 dp2iq.readV0State.foreach(_ <> v0BusyTable.get.io.read) 194 dp2iq.readVlState.foreach(_ <> vlBusyTable.get.io.read) 195 } 196 197 intBusyTable match { 198 case Some(bt) => 199 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 200 btAllocPregs.valid := dpAllocPregs.isInt 201 btAllocPregs.bits := dpAllocPregs.preg 202 } 203 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 204 wb.valid := io.intWriteBack(i).wen && io.intWriteBack(i).intWen 205 wb.bits := io.intWriteBack(i).addr 206 } 207 bt.io.wakeUp := io.fromSchedulers.wakeupVec 208 bt.io.og0Cancel := io.fromDataPath.og0Cancel 209 bt.io.ldCancel := io.ldCancel 210 case None => 211 } 212 213 fpBusyTable match { 214 case Some(bt) => 215 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 216 btAllocPregs.valid := dpAllocPregs.isFp 217 btAllocPregs.bits := dpAllocPregs.preg 218 } 219 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 220 wb.valid := io.fpWriteBack(i).wen && io.fpWriteBack(i).fpWen 221 wb.bits := io.fpWriteBack(i).addr 222 } 223 bt.io.wakeUp := io.fromSchedulers.wakeupVec 224 bt.io.og0Cancel := io.fromDataPath.og0Cancel 225 bt.io.ldCancel := io.ldCancel 226 case None => 227 } 228 229 vfBusyTable match { 230 case Some(bt) => 231 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 232 btAllocPregs.valid := dpAllocPregs.isVec 233 btAllocPregs.bits := dpAllocPregs.preg 234 } 235 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 236 wb.valid := io.vfWriteBack(i).wen && io.vfWriteBack(i).vecWen 237 wb.bits := io.vfWriteBack(i).addr 238 } 239 bt.io.wakeUp := io.fromSchedulers.wakeupVec 240 bt.io.og0Cancel := io.fromDataPath.og0Cancel 241 bt.io.ldCancel := io.ldCancel 242 case None => 243 } 244 245 v0BusyTable match { 246 case Some(bt) => 247 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 248 btAllocPregs.valid := dpAllocPregs.isV0 249 btAllocPregs.bits := dpAllocPregs.preg 250 } 251 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 252 wb.valid := io.v0WriteBack(i).wen && io.v0WriteBack(i).v0Wen 253 wb.bits := io.v0WriteBack(i).addr 254 } 255 bt.io.wakeUp := io.fromSchedulers.wakeupVec 256 bt.io.og0Cancel := io.fromDataPath.og0Cancel 257 bt.io.ldCancel := io.ldCancel 258 case None => 259 } 260 261 vlBusyTable match { 262 case Some(bt) => 263 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 264 btAllocPregs.valid := dpAllocPregs.isVl 265 btAllocPregs.bits := dpAllocPregs.preg 266 } 267 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 268 wb.valid := io.vlWriteBack(i).wen && io.vlWriteBack(i).vlWen 269 wb.bits := io.vlWriteBack(i).addr 270 } 271 bt.io.wakeUp := io.fromSchedulers.wakeupVec 272 bt.io.og0Cancel := io.fromDataPath.og0Cancel 273 bt.io.ldCancel := io.ldCancel 274 case None => 275 } 276 277 val wakeupFromIntWBVec = Wire(params.genIntWBWakeUpSinkValidBundle) 278 val wakeupFromFpWBVec = Wire(params.genFpWBWakeUpSinkValidBundle) 279 val wakeupFromVfWBVec = Wire(params.genVfWBWakeUpSinkValidBundle) 280 val wakeupFromV0WBVec = Wire(params.genV0WBWakeUpSinkValidBundle) 281 val wakeupFromVlWBVec = Wire(params.genVlWBWakeUpSinkValidBundle) 282 283 wakeupFromIntWBVec.zip(io.intWriteBack).foreach { case (sink, source) => 284 sink.valid := source.wen 285 sink.bits.rfWen := source.intWen 286 sink.bits.fpWen := source.fpWen 287 sink.bits.vecWen := source.vecWen 288 sink.bits.v0Wen := source.v0Wen 289 sink.bits.vlWen := source.vlWen 290 sink.bits.pdest := source.addr 291 } 292 293 wakeupFromFpWBVec.zip(io.fpWriteBack).foreach { case (sink, source) => 294 sink.valid := source.wen 295 sink.bits.rfWen := source.intWen 296 sink.bits.fpWen := source.fpWen 297 sink.bits.vecWen := source.vecWen 298 sink.bits.v0Wen := source.v0Wen 299 sink.bits.vlWen := source.vlWen 300 sink.bits.pdest := source.addr 301 } 302 303 wakeupFromVfWBVec.zip(io.vfWriteBack).foreach { case (sink, source) => 304 sink.valid := source.wen 305 sink.bits.rfWen := source.intWen 306 sink.bits.fpWen := source.fpWen 307 sink.bits.vecWen := source.vecWen 308 sink.bits.v0Wen := source.v0Wen 309 sink.bits.vlWen := source.vlWen 310 sink.bits.pdest := source.addr 311 } 312 313 wakeupFromV0WBVec.zip(io.v0WriteBack).foreach { case (sink, source) => 314 sink.valid := source.wen 315 sink.bits.rfWen := source.intWen 316 sink.bits.fpWen := source.fpWen 317 sink.bits.vecWen := source.vecWen 318 sink.bits.v0Wen := source.v0Wen 319 sink.bits.vlWen := source.vlWen 320 sink.bits.pdest := source.addr 321 } 322 323 wakeupFromVlWBVec.zip(io.vlWriteBack).foreach { case (sink, source) => 324 sink.valid := source.wen 325 sink.bits.rfWen := source.intWen 326 sink.bits.fpWen := source.fpWen 327 sink.bits.vecWen := source.vecWen 328 sink.bits.v0Wen := source.v0Wen 329 sink.bits.vlWen := source.vlWen 330 sink.bits.pdest := source.addr 331 } 332 333 // Connect bundles having the same wakeup source 334 issueQueues.zipWithIndex.foreach { case(iq, i) => 335 iq.io.wakeupFromIQ.foreach { wakeUp => 336 val wakeUpIn = iqWakeUpInMap(wakeUp.bits.exuIdx) 337 val exuIdx = wakeUp.bits.exuIdx 338 println(s"[Backend] Connect wakeup exuIdx ${exuIdx}") 339 connectSamePort(wakeUp,wakeUpIn) 340 backendParams.connectWakeup(exuIdx) 341 if (backendParams.isCopyPdest(exuIdx)) { 342 println(s"[Backend] exuIdx ${exuIdx} use pdestCopy ${backendParams.getCopyPdestIndex(exuIdx)}") 343 wakeUp.bits.pdest := wakeUpIn.bits.pdestCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 344 if (wakeUpIn.bits.rfWenCopy.nonEmpty) wakeUp.bits.rfWen := wakeUpIn.bits.rfWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 345 if (wakeUpIn.bits.fpWenCopy.nonEmpty) wakeUp.bits.fpWen := wakeUpIn.bits.fpWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 346 if (wakeUpIn.bits.vecWenCopy.nonEmpty) wakeUp.bits.vecWen := wakeUpIn.bits.vecWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 347 if (wakeUpIn.bits.v0WenCopy.nonEmpty) wakeUp.bits.v0Wen := wakeUpIn.bits.v0WenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 348 if (wakeUpIn.bits.vlWenCopy.nonEmpty) wakeUp.bits.vlWen := wakeUpIn.bits.vlWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 349 if (wakeUpIn.bits.loadDependencyCopy.nonEmpty) wakeUp.bits.loadDependency := wakeUpIn.bits.loadDependencyCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 350 } 351 if (iq.params.numIntSrc == 0) wakeUp.bits.rfWen := false.B 352 if (iq.params.numFpSrc == 0) wakeUp.bits.fpWen := false.B 353 if (iq.params.numVfSrc == 0) wakeUp.bits.vecWen := false.B 354 if (iq.params.numV0Src == 0) wakeUp.bits.v0Wen := false.B 355 if (iq.params.numVlSrc == 0) wakeUp.bits.vlWen := false.B 356 } 357 iq.io.og0Cancel := io.fromDataPath.og0Cancel 358 iq.io.og1Cancel := io.fromDataPath.og1Cancel 359 iq.io.ldCancel := io.ldCancel 360 } 361 362 // connect the vl writeback informatino to the issue queues 363 issueQueues.zipWithIndex.foreach { case(iq, i) => 364 iq.io.vlIsVlmax := io.vlWriteBackInfo.vlIsVlmax 365 iq.io.vlIsZero := io.vlWriteBackInfo.vlIsZero 366 } 367 368 private val iqWakeUpOutMap: Map[Int, ValidIO[IssueQueueIQWakeUpBundle]] = 369 issueQueues.flatMap(_.io.wakeupToIQ) 370 .map(x => (x.bits.exuIdx, x)) 371 .toMap 372 373 // Connect bundles having the same wakeup source 374 io.toSchedulers.wakeupVec.foreach { wakeUp => 375 wakeUp := iqWakeUpOutMap(wakeUp.bits.exuIdx) 376 } 377 378 io.toDataPathAfterDelay.zipWithIndex.foreach { case (toDpDy, i) => 379 toDpDy <> issueQueues(i).io.deqDelay 380 } 381 382 // Response 383 issueQueues.zipWithIndex.foreach { case (iq, i) => 384 iq.io.og0Resp.zipWithIndex.foreach { case (og0Resp, j) => 385 og0Resp := io.fromDataPath(i)(j).og0resp 386 } 387 iq.io.og1Resp.zipWithIndex.foreach { case (og1Resp, j) => 388 og1Resp := io.fromDataPath(i)(j).og1resp 389 } 390 iq.io.finalIssueResp.foreach(_.zipWithIndex.foreach { case (finalIssueResp, j) => 391 if (io.loadFinalIssueResp(i).isDefinedAt(j)) { 392 finalIssueResp := io.loadFinalIssueResp(i)(j) 393 } else { 394 finalIssueResp := 0.U.asTypeOf(finalIssueResp) 395 } 396 }) 397 iq.io.memAddrIssueResp.foreach(_.zipWithIndex.foreach { case (memAddrIssueResp, j) => 398 if (io.memAddrIssueResp(i).isDefinedAt(j)) { 399 memAddrIssueResp := io.memAddrIssueResp(i)(j) 400 } else { 401 memAddrIssueResp := 0.U.asTypeOf(memAddrIssueResp) 402 } 403 }) 404 iq.io.vecLoadIssueResp.foreach(_.zipWithIndex.foreach { case (resp, deqIdx) => 405 resp := io.vecLoadIssueResp(i)(deqIdx) 406 }) 407 if(params.isVfSchd) { 408 iq.io.og2Resp.get.zipWithIndex.foreach { case (og2Resp, exuIdx) => 409 og2Resp := io.fromOg2.get(i)(exuIdx) 410 } 411 } 412 iq.io.wbBusyTableRead := io.fromWbFuBusyTable.fuBusyTableRead(i) 413 io.wbFuBusyTable(i) := iq.io.wbBusyTableWrite 414 iq.io.replaceRCIdx.foreach(x => x := 0.U.asTypeOf(x)) 415 } 416 417 // Connect each replace RCIdx to IQ 418 if (params.needWriteRegCache) { 419 val iqReplaceRCIdxVec = issueQueues.filter(_.params.needWriteRegCache).flatMap{ iq => 420 iq.params.allExuParams.zip(iq.io.replaceRCIdx.get).filter(_._1.needWriteRegCache).map(_._2) 421 } 422 iqReplaceRCIdxVec.zip(io.fromDataPath.replaceRCIdx.get).foreach{ case (iq, in) => 423 iq := in 424 } 425 426 println(s"[Scheduler] numWriteRegCache: ${params.numWriteRegCache}") 427 println(s"[Scheduler] iqReplaceRCIdxVec: ${iqReplaceRCIdxVec.size}") 428 } 429 430 // perfEvent 431 val lastCycleDp2IqOutFireVec = RegNext(VecInit(dispatch2Iq.io.out.flatten.map(_.fire))) 432 val lastCycleIqEnqFireVec = RegNext(VecInit(issueQueues.map(_.io.enq.map(_.fire)).flatten)) 433 val lastCycleIqFullVec = RegNext(VecInit(issueQueues.map(_.io.enq.head.ready))) 434 435 val issueQueueFullVecPerf = issueQueues.zip(lastCycleIqFullVec)map{ case (iq, full) => (iq.params.getIQName + s"_full", full) } 436 val basePerfEvents = Seq( 437 ("dispatch2Iq_out_fire_cnt", PopCount(lastCycleDp2IqOutFireVec) ), 438 ("issueQueue_enq_fire_cnt", PopCount(lastCycleIqEnqFireVec) ) 439 ) ++ issueQueueFullVecPerf 440 441 println(s"[Scheduler] io.fromSchedulers.wakeupVec: ${io.fromSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") 442 println(s"[Scheduler] iqWakeUpInKeys: ${iqWakeUpInMap.keys}") 443 444 println(s"[Scheduler] iqWakeUpOutKeys: ${iqWakeUpOutMap.keys}") 445 println(s"[Scheduler] io.toSchedulers.wakeupVec: ${io.toSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") 446} 447 448class SchedulerArithImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 449 extends SchedulerImpBase(wrapper) 450 with HasXSParameter 451 with HasPerfEvents 452{ 453// dontTouch(io.vfWbFuBusyTable) 454 println(s"[SchedulerArithImp] " + 455 s"has intBusyTable: ${intBusyTable.nonEmpty}, " + 456 s"has vfBusyTable: ${vfBusyTable.nonEmpty}") 457 458 issueQueues.zipWithIndex.foreach { case (iq, i) => 459 iq.io.flush <> io.fromCtrlBlock.flush 460 iq.io.enq <> dispatch2Iq.io.out(i) 461 val intWBIQ = params.schdType match { 462 case IntScheduler() => wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1) 463 case FpScheduler() => wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1) 464 case VfScheduler() => (wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 465 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 466 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1)) 467 case _ => null 468 } 469 iq.io.wakeupFromWB.zip(intWBIQ).foreach{ case (sink, source) => sink := source} 470 } 471 472 val perfEvents = basePerfEvents 473 generatePerfEvent() 474} 475 476// FIXME: Vector mem instructions may not be handled properly! 477class SchedulerMemImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 478 extends SchedulerImpBase(wrapper) 479 with HasXSParameter 480 with HasPerfEvents 481{ 482 println(s"[SchedulerMemImp] " + 483 s"has intBusyTable: ${intBusyTable.nonEmpty}, " + 484 s"has vfBusyTable: ${vfBusyTable.nonEmpty}") 485 486 val memAddrIQs = issueQueues.filter(_.params.isMemAddrIQ) 487 val stAddrIQs = issueQueues.filter(iq => iq.params.StaCnt > 0) // included in memAddrIQs 488 val ldAddrIQs = issueQueues.filter(iq => iq.params.LduCnt > 0) 489 val stDataIQs = issueQueues.filter(iq => iq.params.StdCnt > 0) 490 val vecMemIQs = issueQueues.filter(_.params.isVecMemIQ) 491 val (hyuIQs, hyuIQIdxs) = issueQueues.zipWithIndex.filter(_._1.params.HyuCnt > 0).unzip 492 493 println(s"[SchedulerMemImp] memAddrIQs.size: ${memAddrIQs.size}, enq.size: ${memAddrIQs.map(_.io.enq.size).sum}") 494 println(s"[SchedulerMemImp] stAddrIQs.size: ${stAddrIQs.size }, enq.size: ${stAddrIQs.map(_.io.enq.size).sum}") 495 println(s"[SchedulerMemImp] ldAddrIQs.size: ${ldAddrIQs.size }, enq.size: ${ldAddrIQs.map(_.io.enq.size).sum}") 496 println(s"[SchedulerMemImp] stDataIQs.size: ${stDataIQs.size }, enq.size: ${stDataIQs.map(_.io.enq.size).sum}") 497 println(s"[SchedulerMemImp] hyuIQs.size: ${hyuIQs.size }, enq.size: ${hyuIQs.map(_.io.enq.size).sum}") 498 require(memAddrIQs.nonEmpty && stDataIQs.nonEmpty) 499 500 io.toMem.get.loadFastMatch := 0.U.asTypeOf(io.toMem.get.loadFastMatch) // TODO: is still needed? 501 502 private val loadWakeUp = issueQueues.filter(_.params.LdExuCnt > 0).map(_.asInstanceOf[IssueQueueMemAddrImp].io.memIO.get.loadWakeUp).flatten 503 require(loadWakeUp.length == io.fromMem.get.wakeup.length) 504 loadWakeUp.zip(io.fromMem.get.wakeup).foreach(x => x._1 := x._2) 505 506 memAddrIQs.zipWithIndex.foreach { case (iq, i) => 507 iq.io.flush <> io.fromCtrlBlock.flush 508 iq.io.enq <> dispatch2Iq.io.out(i) 509 iq.io.wakeupFromWB.zip( 510 wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 511 wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 512 wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 513 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 514 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1) 515 ).foreach{ case (sink, source) => sink := source} 516 } 517 518 ldAddrIQs.zipWithIndex.foreach { 519 case (imp: IssueQueueMemAddrImp, i) => 520 imp.io.memIO.get.feedbackIO.head := 0.U.asTypeOf(imp.io.memIO.get.feedbackIO.head) 521 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 522 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 523 case _ => 524 } 525 526 stAddrIQs.zipWithIndex.foreach { 527 case (imp: IssueQueueMemAddrImp, i) => 528 imp.io.memIO.get.feedbackIO.head := io.fromMem.get.staFeedback(i) 529 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 530 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 531 case _ => 532 } 533 534 hyuIQs.zip(hyuIQIdxs).foreach { 535 case (imp: IssueQueueMemAddrImp, idx) => 536 imp.io.memIO.get.feedbackIO.head := io.fromMem.get.hyuFeedback.head 537 imp.io.memIO.get.feedbackIO(1) := 0.U.asTypeOf(imp.io.memIO.get.feedbackIO(1)) 538 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 539 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 540 // TODO: refactor ditry code 541 imp.io.deqDelay(1).ready := false.B 542 io.toDataPathAfterDelay(idx)(1).valid := false.B 543 io.toDataPathAfterDelay(idx)(1).bits := 0.U.asTypeOf(io.toDataPathAfterDelay(idx)(1).bits) 544 case _ => 545 } 546 547 private val staIdxSeq = (stAddrIQs).map(iq => iq.params.idxInSchBlk) 548 private val hyaIdxSeq = (hyuIQs).map(iq => iq.params.idxInSchBlk) 549 550 println(s"[SchedulerMemImp] sta iq idx in memSchdBlock: $staIdxSeq") 551 println(s"[SchedulerMemImp] hya iq idx in memSchdBlock: $hyaIdxSeq") 552 553 private val staEnqs = stAddrIQs.map(_.io.enq).flatten 554 private val stdEnqs = stDataIQs.map(_.io.enq).flatten.take(staEnqs.size) 555 private val hyaEnqs = hyuIQs.map(_.io.enq).flatten 556 private val hydEnqs = stDataIQs.map(_.io.enq).flatten.drop(staEnqs.size) 557 558 require(staEnqs.size == stdEnqs.size, s"number of enq ports of store address IQs(${staEnqs.size}) " + 559 s"should be equal to number of enq ports of store data IQs(${stdEnqs.size})") 560 561 require(hyaEnqs.size == hydEnqs.size, s"number of enq ports of hybrid address IQs(${hyaEnqs.size}) " + 562 s"should be equal to number of enq ports of hybrid data IQs(${hydEnqs.size})") 563 564 val d2IqStaOut = dispatch2Iq.io.out.zipWithIndex.filter(staIdxSeq contains _._2).unzip._1.flatten 565 d2IqStaOut.zip(staEnqs).zip(stdEnqs).foreach{ case((dp, staIQ), stdIQ) => 566 val isAllReady = staIQ.ready && stdIQ.ready 567 dp.ready := isAllReady 568 staIQ.valid := dp.valid && isAllReady 569 stdIQ.valid := dp.valid && isAllReady && FuType.FuTypeOrR(dp.bits.fuType, FuType.stu, FuType.mou) 570 } 571 572 val d2IqHyaOut = dispatch2Iq.io.out.zipWithIndex.filter(hyaIdxSeq contains _._2).unzip._1.flatten 573 d2IqHyaOut.zip(hyaEnqs).zip(hydEnqs).foreach{ case((dp, hyaIQ), hydIQ) => 574 val isAllReady = hyaIQ.ready && hydIQ.ready 575 dp.ready := isAllReady 576 hyaIQ.valid := dp.valid && isAllReady 577 hydIQ.valid := dp.valid && isAllReady && FuType.FuTypeOrR(dp.bits.fuType, FuType.stu, FuType.mou) 578 } 579 580 stDataIQs.zipWithIndex.foreach { case (iq, i) => 581 iq.io.flush <> io.fromCtrlBlock.flush 582 iq.io.wakeupFromWB.zip( 583 wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 584 wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 585 wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 586 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 587 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq 588 ).foreach{ case (sink, source) => sink := source} 589 } 590 591 (stdEnqs ++ hydEnqs).zip(staEnqs ++ hyaEnqs).zipWithIndex.foreach { case ((stdIQEnq, staIQEnq), i) => 592 stdIQEnq.bits := staIQEnq.bits 593 // Store data reuses store addr src(1) in dispatch2iq 594 // [dispatch2iq] --src*------src*(0)--> [staIQ|hyaIQ] 595 // \ 596 // ---src*(1)--> [stdIQ] 597 // Since the src(1) of sta is easier to get, stdIQEnq.bits.src*(0) is assigned to staIQEnq.bits.src*(1) 598 // instead of dispatch2Iq.io.out(x).bits.src*(1) 599 val stdIdx = 1 600 stdIQEnq.bits.srcState(0) := staIQEnq.bits.srcState(stdIdx) 601 stdIQEnq.bits.srcLoadDependency(0) := staIQEnq.bits.srcLoadDependency(1) 602 stdIQEnq.bits.srcType(0) := staIQEnq.bits.srcType(stdIdx) 603 stdIQEnq.bits.psrc(0) := staIQEnq.bits.psrc(stdIdx) 604 stdIQEnq.bits.sqIdx := staIQEnq.bits.sqIdx 605 } 606 607 vecMemIQs.foreach { 608 case imp: IssueQueueVecMemImp => 609 imp.io.memIO.get.sqDeqPtr.foreach(_ := io.fromMem.get.sqDeqPtr) 610 imp.io.memIO.get.lqDeqPtr.foreach(_ := io.fromMem.get.lqDeqPtr) 611 // not used 612 //imp.io.memIO.get.feedbackIO.head := io.fromMem.get.vstuFeedback.head // only vector store replay 613 // maybe not used 614 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 615 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 616 imp.io.wakeupFromWB.zip( 617 wakeupFromIntWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 618 wakeupFromFpWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 619 wakeupFromVfWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 620 wakeupFromV0WBVec.zipWithIndex.filter(x => imp.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 621 wakeupFromVlWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq 622 ).foreach{ case (sink, source) => sink := source} 623 624 case _ => 625 } 626 val vecMemFeedbackIO: Seq[MemRSFeedbackIO] = vecMemIQs.map { 627 case imp: IssueQueueVecMemImp => 628 imp.io.memIO.get.feedbackIO 629 }.flatten 630 assert(vecMemFeedbackIO.size == io.fromMem.get.vstuFeedback.size, "vecMemFeedback size dont match!") 631 vecMemFeedbackIO.zip(io.fromMem.get.vstuFeedback).foreach{ 632 case (sink, source) => 633 sink := source 634 } 635 636 val lsqEnqCtrl = Module(new LsqEnqCtrl) 637 638 lsqEnqCtrl.io.redirect <> io.fromCtrlBlock.flush 639 lsqEnqCtrl.io.enq <> dispatch2Iq.io.enqLsqIO.get 640 lsqEnqCtrl.io.lcommit := io.fromMem.get.lcommit 641 lsqEnqCtrl.io.scommit := io.fromMem.get.scommit 642 lsqEnqCtrl.io.lqCancelCnt := io.fromMem.get.lqCancelCnt 643 lsqEnqCtrl.io.sqCancelCnt := io.fromMem.get.sqCancelCnt 644 dispatch2Iq.io.lqFreeCount.get := lsqEnqCtrl.io.lqFreeCount 645 dispatch2Iq.io.sqFreeCount.get := lsqEnqCtrl.io.sqFreeCount 646 io.memIO.get.lsqEnqIO <> lsqEnqCtrl.io.enqLsq 647 648 dontTouch(io.vecLoadIssueResp) 649 650 val intBusyTablePerf = intBusyTable.get 651 val fpBusyTablePerf = fpBusyTable.get 652 val vecBusyTablePerf = vfBusyTable.get 653 val v0BusyTablePerf = v0BusyTable.get 654 val vlBusyTablePerf = vlBusyTable.get 655 656 val perfEvents = basePerfEvents ++ Seq(intBusyTablePerf, fpBusyTablePerf, vecBusyTablePerf, v0BusyTablePerf, vlBusyTablePerf).flatten(_.getPerfEvents) 657 generatePerfEvent() 658} 659