1package xiangshan.backend.issue 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3._ 5import chisel3.util._ 6import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 7import utility.HasPerfEvents 8import utils.OptionWrapper 9import xiangshan._ 10import xiangshan.backend.Bundles._ 11import xiangshan.backend.datapath.DataConfig._ 12import xiangshan.backend.datapath.WbConfig._ 13import xiangshan.backend.fu.FuType 14import xiangshan.backend.regfile.RfWritePortWithConfig 15import xiangshan.backend.rename.BusyTable 16import xiangshan.mem.{LsqEnqCtrl, LsqEnqIO, MemWaitUpdateReq, SqPtr, LqPtr} 17import xiangshan.backend.datapath.WbConfig.V0WB 18import xiangshan.backend.regfile.VlPregParams 19import xiangshan.backend.regcache.RegCacheTagTable 20 21sealed trait SchedulerType 22 23case class IntScheduler() extends SchedulerType 24case class FpScheduler() extends SchedulerType 25case class MemScheduler() extends SchedulerType 26case class VfScheduler() extends SchedulerType 27case class NoScheduler() extends SchedulerType 28 29class Scheduler(val params: SchdBlockParams)(implicit p: Parameters) extends LazyModule with HasXSParameter { 30 override def shouldBeInlined: Boolean = false 31 32 val numIntStateWrite = backendParams.numPregWb(IntData()) 33 val numFpStateWrite = backendParams.numPregWb(FpData()) 34 val numVfStateWrite = backendParams.numPregWb(VecData()) 35 val numV0StateWrite = backendParams.numPregWb(V0Data()) 36 val numVlStateWrite = backendParams.numPregWb(VlData()) 37 38 val dispatch2Iq = LazyModule(new Dispatch2Iq(params)) 39 val issueQueue = params.issueBlockParams.map(x => LazyModule(new IssueQueue(x).suggestName(x.getIQName))) 40 41 lazy val module: SchedulerImpBase = params.schdType match { 42 case IntScheduler() => new SchedulerArithImp(this)(params, p) 43 case FpScheduler() => new SchedulerArithImp(this)(params, p) 44 case MemScheduler() => new SchedulerMemImp(this)(params, p) 45 case VfScheduler() => new SchedulerArithImp(this)(params, p) 46 case _ => null 47 } 48} 49 50class SchedulerIO()(implicit params: SchdBlockParams, p: Parameters) extends XSBundle { 51 // params alias 52 private val LoadQueueSize = VirtualLoadQueueSize 53 54 val fromTop = new Bundle { 55 val hartId = Input(UInt(8.W)) 56 } 57 val fromWbFuBusyTable = new Bundle{ 58 val fuBusyTableRead = MixedVec(params.issueBlockParams.map(x => Input(x.genWbFuBusyTableReadBundle))) 59 } 60 val wbFuBusyTable = MixedVec(params.issueBlockParams.map(x => Output(x.genWbFuBusyTableWriteBundle))) 61 val intIQValidNumVec = Output(MixedVec(backendParams.genIntIQValidNumBundle)) 62 val fpIQValidNumVec = Output(MixedVec(backendParams.genFpIQValidNumBundle)) 63 64 val fromCtrlBlock = new Bundle { 65 val flush = Flipped(ValidIO(new Redirect)) 66 } 67 val fromDispatch = new Bundle { 68 val allocPregs = Vec(RenameWidth, Input(new ResetPregStateReq)) 69 val uops = Vec(params.numUopIn, Flipped(DecoupledIO(new DynInst))) 70 } 71 val intWriteBack = MixedVec(Vec(backendParams.numPregWb(IntData()), 72 new RfWritePortWithConfig(backendParams.intPregParams.dataCfg, backendParams.intPregParams.addrWidth))) 73 val fpWriteBack = MixedVec(Vec(backendParams.numPregWb(FpData()), 74 new RfWritePortWithConfig(backendParams.fpPregParams.dataCfg, backendParams.fpPregParams.addrWidth))) 75 val vfWriteBack = MixedVec(Vec(backendParams.numPregWb(VecData()), 76 new RfWritePortWithConfig(backendParams.vfPregParams.dataCfg, backendParams.vfPregParams.addrWidth))) 77 val v0WriteBack = MixedVec(Vec(backendParams.numPregWb(V0Data()), 78 new RfWritePortWithConfig(backendParams.v0PregParams.dataCfg, backendParams.v0PregParams.addrWidth))) 79 val vlWriteBack = MixedVec(Vec(backendParams.numPregWb(VlData()), 80 new RfWritePortWithConfig(backendParams.vlPregParams.dataCfg, backendParams.vlPregParams.addrWidth))) 81 val toDataPathAfterDelay: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = MixedVec(params.issueBlockParams.map(_.genIssueDecoupledBundle)) 82 83 val vlWriteBackInfo = new Bundle { 84 val vlFromIntIsZero = Input(Bool()) 85 val vlFromIntIsVlmax = Input(Bool()) 86 val vlFromVfIsZero = Input(Bool()) 87 val vlFromVfIsVlmax = Input(Bool()) 88 } 89 90 val fromSchedulers = new Bundle { 91 val wakeupVec: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = Flipped(params.genIQWakeUpInValidBundle) 92 } 93 94 val toSchedulers = new Bundle { 95 val wakeupVec: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = params.genIQWakeUpOutValidBundle 96 } 97 98 val fromDataPath = new Bundle { 99 val resp: MixedVec[MixedVec[OGRespBundle]] = MixedVec(params.issueBlockParams.map(x => Flipped(x.genOGRespBundle))) 100 val og0Cancel = Input(ExuVec()) 101 // Todo: remove this after no cancel signal from og1 102 val og1Cancel = Input(ExuVec()) 103 // replace RCIdx to Wakeup Queue 104 val replaceRCIdx = OptionWrapper(params.needWriteRegCache, Vec(params.numWriteRegCache, Input(UInt(RegCacheIdxWidth.W)))) 105 // just be compatible to old code 106 def apply(i: Int)(j: Int) = resp(i)(j) 107 } 108 109 val loadFinalIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.LdExuCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 110 val vecLoadFinalIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.VlduCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 111 val memAddrIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.LdExuCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 112 val vecLoadIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.VlduCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 113 114 val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO)) 115 116 val memIO = if (params.isMemSchd) Some(new Bundle { 117 val lsqEnqIO = Flipped(new LsqEnqIO) 118 }) else None 119 val fromMem = if (params.isMemSchd) Some(new Bundle { 120 val ldaFeedback = Flipped(Vec(params.LduCnt, new MemRSFeedbackIO)) 121 val staFeedback = Flipped(Vec(params.StaCnt, new MemRSFeedbackIO)) 122 val hyuFeedback = Flipped(Vec(params.HyuCnt, new MemRSFeedbackIO)) 123 val vstuFeedback = Flipped(Vec(params.VstuCnt, new MemRSFeedbackIO(isVector = true))) 124 val vlduFeedback = Flipped(Vec(params.VlduCnt, new MemRSFeedbackIO(isVector = true))) 125 val stIssuePtr = Input(new SqPtr()) 126 val lcommit = Input(UInt(log2Up(CommitWidth + 1).W)) 127 val scommit = Input(UInt(log2Ceil(EnsbufferWidth + 1).W)) // connected to `memBlock.io.sqDeq` instead of ROB 128 val wakeup = Vec(params.LdExuCnt, Flipped(Valid(new DynInst))) 129 val lqDeqPtr = Input(new LqPtr) 130 val sqDeqPtr = Input(new SqPtr) 131 // from lsq 132 val lqCancelCnt = Input(UInt(log2Up(LoadQueueSize + 1).W)) 133 val sqCancelCnt = Input(UInt(log2Up(StoreQueueSize + 1).W)) 134 val memWaitUpdateReq = Flipped(new MemWaitUpdateReq) 135 }) else None 136 val toMem = if (params.isMemSchd) Some(new Bundle { 137 val loadFastMatch = Output(Vec(params.LduCnt, new IssueQueueLoadBundle)) 138 }) else None 139 val fromOg2Resp = if(params.needOg2Resp) Some(MixedVec(params.issueBlockParams.filter(_.needOg2Resp).map(x => Flipped(x.genOG2RespBundle)))) else None 140} 141 142abstract class SchedulerImpBase(wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 143 extends LazyModuleImp(wrapper) 144 with HasXSParameter 145{ 146 val io = IO(new SchedulerIO()) 147 148 // alias 149 private val iqWakeUpInMap: Map[Int, ValidIO[IssueQueueIQWakeUpBundle]] = 150 io.fromSchedulers.wakeupVec.map(x => (x.bits.exuIdx, x)).toMap 151 private val schdType = params.schdType 152 153 // Modules 154 val dispatch2Iq: Dispatch2IqImp = wrapper.dispatch2Iq.module 155 val issueQueues: Seq[IssueQueueImp] = wrapper.issueQueue.map(_.module) 156 io.intIQValidNumVec := 0.U.asTypeOf(io.intIQValidNumVec) 157 io.fpIQValidNumVec := 0.U.asTypeOf(io.fpIQValidNumVec) 158 if (params.isIntSchd) { 159 dispatch2Iq.io.intIQValidNumVec.get := io.intIQValidNumVec 160 io.intIQValidNumVec := MixedVecInit(issueQueues.map(_.io.validCntDeqVec)) 161 } 162 else if (params.isFpSchd) { 163 dispatch2Iq.io.fpIQValidNumVec.get := io.fpIQValidNumVec 164 io.fpIQValidNumVec := MixedVecInit(issueQueues.map(_.io.validCntDeqVec)) 165 } 166 167 // valid count 168 dispatch2Iq.io.iqValidCnt := issueQueues.filter(_.params.StdCnt == 0).map(_.io.status.validCnt) 169 170 // BusyTable Modules 171 val intBusyTable = schdType match { 172 case IntScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numIntStateRead, wrapper.numIntStateWrite, IntPhyRegs, IntWB()))) 173 case _ => None 174 } 175 val fpBusyTable = schdType match { 176 case FpScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numFpStateRead, wrapper.numFpStateWrite, FpPhyRegs, FpWB()))) 177 case _ => None 178 } 179 val vfBusyTable = schdType match { 180 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numVfStateRead, wrapper.numVfStateWrite, VfPhyRegs, VfWB()))) 181 case _ => None 182 } 183 val v0BusyTable = schdType match { 184 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numV0StateRead, wrapper.numV0StateWrite, V0PhyRegs, V0WB()))) 185 case _ => None 186 } 187 val vlBusyTable = schdType match { 188 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numVlStateRead, wrapper.numVlStateWrite, VlPhyRegs, VlWB()))) 189 case _ => None 190 } 191 192 // RegCacheTagTable Module 193 val rcTagTable = schdType match { 194 case IntScheduler() | MemScheduler() => Some(Module(new RegCacheTagTable(dispatch2Iq.numRCTagTableStateRead))) 195 case _ => None 196 } 197 198 dispatch2Iq.io match { case dp2iq => 199 dp2iq.redirect <> io.fromCtrlBlock.flush 200 dp2iq.in <> io.fromDispatch.uops 201 dp2iq.readIntState.foreach(_ <> intBusyTable.get.io.read) 202 dp2iq.readFpState.foreach(_ <> fpBusyTable.get.io.read) 203 dp2iq.readVfState.foreach(_ <> vfBusyTable.get.io.read) 204 dp2iq.readV0State.foreach(_ <> v0BusyTable.get.io.read) 205 dp2iq.readVlState.foreach(_ <> vlBusyTable.get.io.read) 206 dp2iq.readRCTagTableState.foreach(_ <> rcTagTable.get.io.readPorts) 207 } 208 209 intBusyTable match { 210 case Some(bt) => 211 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 212 btAllocPregs.valid := dpAllocPregs.isInt 213 btAllocPregs.bits := dpAllocPregs.preg 214 } 215 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 216 wb.valid := io.intWriteBack(i).wen && io.intWriteBack(i).intWen 217 wb.bits := io.intWriteBack(i).addr 218 } 219 bt.io.wakeUp := io.fromSchedulers.wakeupVec 220 bt.io.og0Cancel := io.fromDataPath.og0Cancel 221 bt.io.ldCancel := io.ldCancel 222 case None => 223 } 224 225 fpBusyTable match { 226 case Some(bt) => 227 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 228 btAllocPregs.valid := dpAllocPregs.isFp 229 btAllocPregs.bits := dpAllocPregs.preg 230 } 231 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 232 wb.valid := io.fpWriteBack(i).wen && io.fpWriteBack(i).fpWen 233 wb.bits := io.fpWriteBack(i).addr 234 } 235 bt.io.wakeUp := io.fromSchedulers.wakeupVec 236 bt.io.og0Cancel := io.fromDataPath.og0Cancel 237 bt.io.ldCancel := io.ldCancel 238 case None => 239 } 240 241 vfBusyTable match { 242 case Some(bt) => 243 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 244 btAllocPregs.valid := dpAllocPregs.isVec 245 btAllocPregs.bits := dpAllocPregs.preg 246 } 247 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 248 wb.valid := io.vfWriteBack(i).wen && io.vfWriteBack(i).vecWen 249 wb.bits := io.vfWriteBack(i).addr 250 } 251 bt.io.wakeUp := io.fromSchedulers.wakeupVec 252 bt.io.og0Cancel := io.fromDataPath.og0Cancel 253 bt.io.ldCancel := io.ldCancel 254 case None => 255 } 256 257 v0BusyTable match { 258 case Some(bt) => 259 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 260 btAllocPregs.valid := dpAllocPregs.isV0 261 btAllocPregs.bits := dpAllocPregs.preg 262 } 263 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 264 wb.valid := io.v0WriteBack(i).wen && io.v0WriteBack(i).v0Wen 265 wb.bits := io.v0WriteBack(i).addr 266 } 267 bt.io.wakeUp := io.fromSchedulers.wakeupVec 268 bt.io.og0Cancel := io.fromDataPath.og0Cancel 269 bt.io.ldCancel := io.ldCancel 270 case None => 271 } 272 273 vlBusyTable match { 274 case Some(bt) => 275 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 276 btAllocPregs.valid := dpAllocPregs.isVl 277 btAllocPregs.bits := dpAllocPregs.preg 278 } 279 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 280 wb.valid := io.vlWriteBack(i).wen && io.vlWriteBack(i).vlWen 281 wb.bits := io.vlWriteBack(i).addr 282 } 283 bt.io.wakeUp := io.fromSchedulers.wakeupVec 284 bt.io.og0Cancel := io.fromDataPath.og0Cancel 285 bt.io.ldCancel := io.ldCancel 286 case None => 287 } 288 289 rcTagTable match { 290 case Some(rct) => 291 rct.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 292 btAllocPregs.valid := dpAllocPregs.isInt 293 btAllocPregs.bits := dpAllocPregs.preg 294 } 295 rct.io.wakeupFromIQ := io.fromSchedulers.wakeupVec 296 rct.io.og0Cancel := io.fromDataPath.og0Cancel 297 rct.io.ldCancel := io.ldCancel 298 case None => 299 } 300 301 val wakeupFromIntWBVec = Wire(params.genIntWBWakeUpSinkValidBundle) 302 val wakeupFromFpWBVec = Wire(params.genFpWBWakeUpSinkValidBundle) 303 val wakeupFromVfWBVec = Wire(params.genVfWBWakeUpSinkValidBundle) 304 val wakeupFromV0WBVec = Wire(params.genV0WBWakeUpSinkValidBundle) 305 val wakeupFromVlWBVec = Wire(params.genVlWBWakeUpSinkValidBundle) 306 307 wakeupFromIntWBVec.zip(io.intWriteBack).foreach { case (sink, source) => 308 sink.valid := source.wen 309 sink.bits.rfWen := source.intWen 310 sink.bits.fpWen := source.fpWen 311 sink.bits.vecWen := source.vecWen 312 sink.bits.v0Wen := source.v0Wen 313 sink.bits.vlWen := source.vlWen 314 sink.bits.pdest := source.addr 315 } 316 317 wakeupFromFpWBVec.zip(io.fpWriteBack).foreach { case (sink, source) => 318 sink.valid := source.wen 319 sink.bits.rfWen := source.intWen 320 sink.bits.fpWen := source.fpWen 321 sink.bits.vecWen := source.vecWen 322 sink.bits.v0Wen := source.v0Wen 323 sink.bits.vlWen := source.vlWen 324 sink.bits.pdest := source.addr 325 } 326 327 wakeupFromVfWBVec.zip(io.vfWriteBack).foreach { case (sink, source) => 328 sink.valid := source.wen 329 sink.bits.rfWen := source.intWen 330 sink.bits.fpWen := source.fpWen 331 sink.bits.vecWen := source.vecWen 332 sink.bits.v0Wen := source.v0Wen 333 sink.bits.vlWen := source.vlWen 334 sink.bits.pdest := source.addr 335 } 336 337 wakeupFromV0WBVec.zip(io.v0WriteBack).foreach { case (sink, source) => 338 sink.valid := source.wen 339 sink.bits.rfWen := source.intWen 340 sink.bits.fpWen := source.fpWen 341 sink.bits.vecWen := source.vecWen 342 sink.bits.v0Wen := source.v0Wen 343 sink.bits.vlWen := source.vlWen 344 sink.bits.pdest := source.addr 345 } 346 347 wakeupFromVlWBVec.zip(io.vlWriteBack).foreach { case (sink, source) => 348 sink.valid := source.wen 349 sink.bits.rfWen := source.intWen 350 sink.bits.fpWen := source.fpWen 351 sink.bits.vecWen := source.vecWen 352 sink.bits.v0Wen := source.v0Wen 353 sink.bits.vlWen := source.vlWen 354 sink.bits.pdest := source.addr 355 } 356 357 // Connect bundles having the same wakeup source 358 issueQueues.zipWithIndex.foreach { case(iq, i) => 359 iq.io.wakeupFromIQ.foreach { wakeUp => 360 val wakeUpIn = iqWakeUpInMap(wakeUp.bits.exuIdx) 361 val exuIdx = wakeUp.bits.exuIdx 362 println(s"[Backend] Connect wakeup exuIdx ${exuIdx}") 363 connectSamePort(wakeUp,wakeUpIn) 364 backendParams.connectWakeup(exuIdx) 365 if (backendParams.isCopyPdest(exuIdx)) { 366 println(s"[Backend] exuIdx ${exuIdx} use pdestCopy ${backendParams.getCopyPdestIndex(exuIdx)}") 367 wakeUp.bits.pdest := wakeUpIn.bits.pdestCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 368 if (wakeUpIn.bits.rfWenCopy.nonEmpty) wakeUp.bits.rfWen := wakeUpIn.bits.rfWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 369 if (wakeUpIn.bits.fpWenCopy.nonEmpty) wakeUp.bits.fpWen := wakeUpIn.bits.fpWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 370 if (wakeUpIn.bits.vecWenCopy.nonEmpty) wakeUp.bits.vecWen := wakeUpIn.bits.vecWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 371 if (wakeUpIn.bits.v0WenCopy.nonEmpty) wakeUp.bits.v0Wen := wakeUpIn.bits.v0WenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 372 if (wakeUpIn.bits.vlWenCopy.nonEmpty) wakeUp.bits.vlWen := wakeUpIn.bits.vlWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 373 if (wakeUpIn.bits.loadDependencyCopy.nonEmpty) wakeUp.bits.loadDependency := wakeUpIn.bits.loadDependencyCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 374 } 375 if (iq.params.numIntSrc == 0) wakeUp.bits.rfWen := false.B 376 if (iq.params.numFpSrc == 0) wakeUp.bits.fpWen := false.B 377 if (iq.params.numVfSrc == 0) wakeUp.bits.vecWen := false.B 378 if (iq.params.numV0Src == 0) wakeUp.bits.v0Wen := false.B 379 if (iq.params.numVlSrc == 0) wakeUp.bits.vlWen := false.B 380 } 381 iq.io.og0Cancel := io.fromDataPath.og0Cancel 382 iq.io.og1Cancel := io.fromDataPath.og1Cancel 383 if (iq.params.needLoadDependency) 384 iq.io.ldCancel := io.ldCancel 385 else 386 iq.io.ldCancel := 0.U.asTypeOf(io.ldCancel) 387 } 388 389 // connect the vl writeback informatino to the issue queues 390 issueQueues.zipWithIndex.foreach { case(iq, i) => 391 iq.io.vlFromIntIsVlmax := io.vlWriteBackInfo.vlFromIntIsVlmax 392 iq.io.vlFromIntIsZero := io.vlWriteBackInfo.vlFromIntIsZero 393 iq.io.vlFromVfIsVlmax := io.vlWriteBackInfo.vlFromVfIsVlmax 394 iq.io.vlFromVfIsZero := io.vlWriteBackInfo.vlFromVfIsZero 395 } 396 397 private val iqWakeUpOutMap: Map[Int, ValidIO[IssueQueueIQWakeUpBundle]] = 398 issueQueues.flatMap(_.io.wakeupToIQ) 399 .map(x => (x.bits.exuIdx, x)) 400 .toMap 401 402 // Connect bundles having the same wakeup source 403 io.toSchedulers.wakeupVec.foreach { wakeUp => 404 wakeUp := iqWakeUpOutMap(wakeUp.bits.exuIdx) 405 } 406 407 io.toDataPathAfterDelay.zipWithIndex.foreach { case (toDpDy, i) => 408 toDpDy <> issueQueues(i).io.deqDelay 409 } 410 411 // Response 412 issueQueues.zipWithIndex.foreach { case (iq, i) => 413 iq.io.og0Resp.zipWithIndex.foreach { case (og0Resp, j) => 414 og0Resp := io.fromDataPath(i)(j).og0resp 415 } 416 iq.io.og1Resp.zipWithIndex.foreach { case (og1Resp, j) => 417 og1Resp := io.fromDataPath(i)(j).og1resp 418 } 419 iq.io.finalIssueResp.foreach(_.zipWithIndex.foreach { case (finalIssueResp, j) => 420 if (io.loadFinalIssueResp(i).isDefinedAt(j) && iq.params.isLdAddrIQ) { 421 finalIssueResp := io.loadFinalIssueResp(i)(j) 422 } else if (io.vecLoadFinalIssueResp(i).isDefinedAt(j) && iq.params.isVecLduIQ) { 423 finalIssueResp := io.vecLoadFinalIssueResp(i)(j) 424 } 425 else { 426 finalIssueResp := 0.U.asTypeOf(finalIssueResp) 427 } 428 }) 429 iq.io.memAddrIssueResp.foreach(_.zipWithIndex.foreach { case (memAddrIssueResp, j) => 430 if (io.memAddrIssueResp(i).isDefinedAt(j)) { 431 memAddrIssueResp := io.memAddrIssueResp(i)(j) 432 } else { 433 memAddrIssueResp := 0.U.asTypeOf(memAddrIssueResp) 434 } 435 }) 436 iq.io.vecLoadIssueResp.foreach(_.zipWithIndex.foreach { case (resp, deqIdx) => 437 resp := io.vecLoadIssueResp(i)(deqIdx) 438 }) 439 iq.io.wbBusyTableRead := io.fromWbFuBusyTable.fuBusyTableRead(i) 440 io.wbFuBusyTable(i) := iq.io.wbBusyTableWrite 441 iq.io.replaceRCIdx.foreach(x => x := 0.U.asTypeOf(x)) 442 } 443 if (params.needOg2Resp) { 444 issueQueues.filter(_.params.needOg2Resp).zip(io.fromOg2Resp.get).foreach{ case (iq, og2RespVec) => 445 iq.io.og2Resp.get.zip(og2RespVec).foreach{ case (iqOg2Resp, og2Resp) => 446 iqOg2Resp := og2Resp 447 } 448 } 449 } 450 451 // Connect each replace RCIdx to IQ 452 if (params.needWriteRegCache) { 453 val iqReplaceRCIdxVec = issueQueues.filter(_.params.needWriteRegCache).flatMap{ iq => 454 iq.params.allExuParams.zip(iq.io.replaceRCIdx.get).filter(_._1.needWriteRegCache).map(_._2) 455 } 456 iqReplaceRCIdxVec.zip(io.fromDataPath.replaceRCIdx.get).foreach{ case (iq, in) => 457 iq := in 458 } 459 460 println(s"[Scheduler] numWriteRegCache: ${params.numWriteRegCache}") 461 println(s"[Scheduler] iqReplaceRCIdxVec: ${iqReplaceRCIdxVec.size}") 462 } 463 464 // perfEvent 465 val lastCycleDp2IqOutFireVec = RegNext(VecInit(dispatch2Iq.io.out.flatten.map(_.fire))) 466 val lastCycleIqEnqFireVec = RegNext(VecInit(issueQueues.map(_.io.enq.map(_.fire)).flatten)) 467 val lastCycleIqFullVec = RegNext(VecInit(issueQueues.map(_.io.enq.head.ready))) 468 469 val issueQueueFullVecPerf = issueQueues.zip(lastCycleIqFullVec)map{ case (iq, full) => (iq.params.getIQName + s"_full", full) } 470 val basePerfEvents = Seq( 471 ("dispatch2Iq_out_fire_cnt", PopCount(lastCycleDp2IqOutFireVec) ), 472 ("issueQueue_enq_fire_cnt", PopCount(lastCycleIqEnqFireVec) ) 473 ) ++ issueQueueFullVecPerf 474 475 println(s"[Scheduler] io.fromSchedulers.wakeupVec: ${io.fromSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") 476 println(s"[Scheduler] iqWakeUpInKeys: ${iqWakeUpInMap.keys}") 477 478 println(s"[Scheduler] iqWakeUpOutKeys: ${iqWakeUpOutMap.keys}") 479 println(s"[Scheduler] io.toSchedulers.wakeupVec: ${io.toSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") 480} 481 482class SchedulerArithImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 483 extends SchedulerImpBase(wrapper) 484 with HasXSParameter 485 with HasPerfEvents 486{ 487// dontTouch(io.vfWbFuBusyTable) 488 println(s"[SchedulerArithImp] " + 489 s"has intBusyTable: ${intBusyTable.nonEmpty}, " + 490 s"has vfBusyTable: ${vfBusyTable.nonEmpty}") 491 492 issueQueues.zipWithIndex.foreach { case (iq, i) => 493 iq.io.flush <> io.fromCtrlBlock.flush 494 iq.io.enq <> dispatch2Iq.io.out(i) 495 if (!iq.params.needLoadDependency) { 496 iq.io.enq.map(x => x.bits.srcLoadDependency := 0.U.asTypeOf(x.bits.srcLoadDependency)) 497 } 498 val intWBIQ = params.schdType match { 499 case IntScheduler() => wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1) 500 case FpScheduler() => wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1) 501 case VfScheduler() => (wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 502 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 503 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1)) 504 case _ => null 505 } 506 iq.io.wakeupFromWB.zip(intWBIQ).foreach{ case (sink, source) => sink := source} 507 } 508 509 val perfEvents = basePerfEvents 510 generatePerfEvent() 511} 512 513// FIXME: Vector mem instructions may not be handled properly! 514class SchedulerMemImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 515 extends SchedulerImpBase(wrapper) 516 with HasXSParameter 517 with HasPerfEvents 518{ 519 println(s"[SchedulerMemImp] " + 520 s"has intBusyTable: ${intBusyTable.nonEmpty}, " + 521 s"has vfBusyTable: ${vfBusyTable.nonEmpty}") 522 523 val memAddrIQs = issueQueues.filter(_.params.isMemAddrIQ) 524 val stAddrIQs = issueQueues.filter(iq => iq.params.StaCnt > 0) // included in memAddrIQs 525 val ldAddrIQs = issueQueues.filter(iq => iq.params.LduCnt > 0) 526 val stDataIQs = issueQueues.filter(iq => iq.params.StdCnt > 0) 527 val vecMemIQs = issueQueues.filter(_.params.isVecMemIQ) 528 val (hyuIQs, hyuIQIdxs) = issueQueues.zipWithIndex.filter(_._1.params.HyuCnt > 0).unzip 529 530 println(s"[SchedulerMemImp] memAddrIQs.size: ${memAddrIQs.size}, enq.size: ${memAddrIQs.map(_.io.enq.size).sum}") 531 println(s"[SchedulerMemImp] stAddrIQs.size: ${stAddrIQs.size }, enq.size: ${stAddrIQs.map(_.io.enq.size).sum}") 532 println(s"[SchedulerMemImp] ldAddrIQs.size: ${ldAddrIQs.size }, enq.size: ${ldAddrIQs.map(_.io.enq.size).sum}") 533 println(s"[SchedulerMemImp] stDataIQs.size: ${stDataIQs.size }, enq.size: ${stDataIQs.map(_.io.enq.size).sum}") 534 println(s"[SchedulerMemImp] hyuIQs.size: ${hyuIQs.size }, enq.size: ${hyuIQs.map(_.io.enq.size).sum}") 535 require(memAddrIQs.nonEmpty && stDataIQs.nonEmpty) 536 537 io.toMem.get.loadFastMatch := 0.U.asTypeOf(io.toMem.get.loadFastMatch) // TODO: is still needed? 538 539 private val loadWakeUp = issueQueues.filter(_.params.LdExuCnt > 0).map(_.asInstanceOf[IssueQueueMemAddrImp].io.memIO.get.loadWakeUp).flatten 540 require(loadWakeUp.length == io.fromMem.get.wakeup.length) 541 loadWakeUp.zip(io.fromMem.get.wakeup).foreach(x => x._1 := x._2) 542 543 memAddrIQs.zipWithIndex.foreach { case (iq, i) => 544 iq.io.flush <> io.fromCtrlBlock.flush 545 iq.io.enq <> dispatch2Iq.io.out(i) 546 if (!iq.params.needLoadDependency) { 547 iq.io.enq.map(x => x.bits.srcLoadDependency := 0.U.asTypeOf(x.bits.srcLoadDependency)) 548 } 549 iq.io.wakeupFromWB.zip( 550 wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 551 wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 552 wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 553 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 554 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1) 555 ).foreach{ case (sink, source) => sink := source} 556 } 557 558 ldAddrIQs.zipWithIndex.foreach { 559 case (imp: IssueQueueMemAddrImp, i) => 560 imp.io.memIO.get.feedbackIO.head := 0.U.asTypeOf(imp.io.memIO.get.feedbackIO.head) 561 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 562 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 563 case _ => 564 } 565 566 stAddrIQs.zipWithIndex.foreach { 567 case (imp: IssueQueueMemAddrImp, i) => 568 imp.io.memIO.get.feedbackIO.head := io.fromMem.get.staFeedback(i) 569 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 570 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 571 case _ => 572 } 573 574 hyuIQs.zip(hyuIQIdxs).foreach { 575 case (imp: IssueQueueMemAddrImp, idx) => 576 imp.io.memIO.get.feedbackIO.head := io.fromMem.get.hyuFeedback.head 577 imp.io.memIO.get.feedbackIO(1) := 0.U.asTypeOf(imp.io.memIO.get.feedbackIO(1)) 578 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 579 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 580 // TODO: refactor ditry code 581 imp.io.deqDelay(1).ready := false.B 582 io.toDataPathAfterDelay(idx)(1).valid := false.B 583 io.toDataPathAfterDelay(idx)(1).bits := 0.U.asTypeOf(io.toDataPathAfterDelay(idx)(1).bits) 584 case _ => 585 } 586 587 private val staIdxSeq = (stAddrIQs).map(iq => iq.params.idxInSchBlk) 588 private val hyaIdxSeq = (hyuIQs).map(iq => iq.params.idxInSchBlk) 589 590 println(s"[SchedulerMemImp] sta iq idx in memSchdBlock: $staIdxSeq") 591 println(s"[SchedulerMemImp] hya iq idx in memSchdBlock: $hyaIdxSeq") 592 593 private val staEnqs = stAddrIQs.map(_.io.enq).flatten 594 private val stdEnqs = stDataIQs.map(_.io.enq).flatten.take(staEnqs.size) 595 private val hyaEnqs = hyuIQs.map(_.io.enq).flatten 596 private val hydEnqs = stDataIQs.map(_.io.enq).flatten.drop(staEnqs.size) 597 598 require(staEnqs.size == stdEnqs.size, s"number of enq ports of store address IQs(${staEnqs.size}) " + 599 s"should be equal to number of enq ports of store data IQs(${stdEnqs.size})") 600 601 require(hyaEnqs.size == hydEnqs.size, s"number of enq ports of hybrid address IQs(${hyaEnqs.size}) " + 602 s"should be equal to number of enq ports of hybrid data IQs(${hydEnqs.size})") 603 604 val d2IqStaOut = dispatch2Iq.io.out.zipWithIndex.filter(staIdxSeq contains _._2).unzip._1.flatten 605 d2IqStaOut.zip(staEnqs).zip(stdEnqs).foreach{ case((dp, staIQ), stdIQ) => 606 val isAllReady = staIQ.ready && stdIQ.ready 607 dp.ready := isAllReady 608 staIQ.valid := dp.valid && isAllReady 609 stdIQ.valid := dp.valid && isAllReady && FuType.FuTypeOrR(dp.bits.fuType, FuType.stu, FuType.mou) 610 } 611 612 val d2IqHyaOut = dispatch2Iq.io.out.zipWithIndex.filter(hyaIdxSeq contains _._2).unzip._1.flatten 613 d2IqHyaOut.zip(hyaEnqs).zip(hydEnqs).foreach{ case((dp, hyaIQ), hydIQ) => 614 val isAllReady = hyaIQ.ready && hydIQ.ready 615 dp.ready := isAllReady 616 hyaIQ.valid := dp.valid && isAllReady 617 hydIQ.valid := dp.valid && isAllReady && FuType.FuTypeOrR(dp.bits.fuType, FuType.stu, FuType.mou) 618 } 619 620 stDataIQs.zipWithIndex.foreach { case (iq, i) => 621 iq.io.flush <> io.fromCtrlBlock.flush 622 iq.io.wakeupFromWB.zip( 623 wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 624 wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 625 wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 626 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 627 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq 628 ).foreach{ case (sink, source) => sink := source} 629 } 630 631 (stdEnqs ++ hydEnqs).zip(staEnqs ++ hyaEnqs).zipWithIndex.foreach { case ((stdIQEnq, staIQEnq), i) => 632 stdIQEnq.bits := staIQEnq.bits 633 // Store data reuses store addr src(1) in dispatch2iq 634 // [dispatch2iq] --src*------src*(0)--> [staIQ|hyaIQ] 635 // \ 636 // ---src*(1)--> [stdIQ] 637 // Since the src(1) of sta is easier to get, stdIQEnq.bits.src*(0) is assigned to staIQEnq.bits.src*(1) 638 // instead of dispatch2Iq.io.out(x).bits.src*(1) 639 val stdIdx = 1 640 stdIQEnq.bits.srcState(0) := staIQEnq.bits.srcState(stdIdx) 641 stdIQEnq.bits.srcLoadDependency(0) := staIQEnq.bits.srcLoadDependency(stdIdx) 642 stdIQEnq.bits.srcType(0) := staIQEnq.bits.srcType(stdIdx) 643 stdIQEnq.bits.psrc(0) := staIQEnq.bits.psrc(stdIdx) 644 stdIQEnq.bits.sqIdx := staIQEnq.bits.sqIdx 645 stdIQEnq.bits.useRegCache(0) := staIQEnq.bits.useRegCache(stdIdx) 646 stdIQEnq.bits.regCacheIdx(0) := staIQEnq.bits.regCacheIdx(stdIdx) 647 } 648 649 vecMemIQs.foreach { 650 case imp: IssueQueueVecMemImp => 651 imp.io.memIO.get.sqDeqPtr.foreach(_ := io.fromMem.get.sqDeqPtr) 652 imp.io.memIO.get.lqDeqPtr.foreach(_ := io.fromMem.get.lqDeqPtr) 653 // not used 654 //imp.io.memIO.get.feedbackIO.head := io.fromMem.get.vstuFeedback.head // only vector store replay 655 // maybe not used 656 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 657 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 658 imp.io.wakeupFromWB.zip( 659 wakeupFromIntWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 660 wakeupFromFpWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 661 wakeupFromVfWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 662 wakeupFromV0WBVec.zipWithIndex.filter(x => imp.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 663 wakeupFromVlWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq 664 ).foreach{ case (sink, source) => sink := source} 665 666 case _ => 667 } 668 val vecMemFeedbackIO: Seq[MemRSFeedbackIO] = vecMemIQs.map { 669 case imp: IssueQueueVecMemImp => 670 imp.io.memIO.get.feedbackIO 671 }.flatten 672 assert(vecMemFeedbackIO.size == io.fromMem.get.vstuFeedback.size, "vecMemFeedback size dont match!") 673 vecMemFeedbackIO.zip(io.fromMem.get.vstuFeedback).foreach{ 674 case (sink, source) => 675 sink := source 676 } 677 678 val lsqEnqCtrl = Module(new LsqEnqCtrl) 679 680 lsqEnqCtrl.io.redirect <> io.fromCtrlBlock.flush 681 lsqEnqCtrl.io.enq <> dispatch2Iq.io.enqLsqIO.get 682 lsqEnqCtrl.io.lcommit := io.fromMem.get.lcommit 683 lsqEnqCtrl.io.scommit := io.fromMem.get.scommit 684 lsqEnqCtrl.io.lqCancelCnt := io.fromMem.get.lqCancelCnt 685 lsqEnqCtrl.io.sqCancelCnt := io.fromMem.get.sqCancelCnt 686 dispatch2Iq.io.lqFreeCount.get := lsqEnqCtrl.io.lqFreeCount 687 dispatch2Iq.io.sqFreeCount.get := lsqEnqCtrl.io.sqFreeCount 688 io.memIO.get.lsqEnqIO <> lsqEnqCtrl.io.enqLsq 689 690 dontTouch(io.vecLoadIssueResp) 691 692 val intBusyTablePerf = intBusyTable.get 693 val fpBusyTablePerf = fpBusyTable.get 694 val vecBusyTablePerf = vfBusyTable.get 695 val v0BusyTablePerf = v0BusyTable.get 696 val vlBusyTablePerf = vlBusyTable.get 697 698 val perfEvents = basePerfEvents ++ Seq(intBusyTablePerf, fpBusyTablePerf, vecBusyTablePerf, v0BusyTablePerf, vlBusyTablePerf).flatten(_.getPerfEvents) 699 generatePerfEvent() 700} 701