1package xiangshan.backend.issue 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3._ 5import chisel3.util._ 6import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 7import utils.{HasPerfEvents, OptionWrapper} 8import xiangshan._ 9import xiangshan.backend.Bundles._ 10import xiangshan.backend.datapath.DataConfig._ 11import xiangshan.backend.datapath.WbConfig._ 12import xiangshan.backend.fu.FuType 13import xiangshan.backend.regfile.RfWritePortWithConfig 14import xiangshan.backend.rename.BusyTable 15import xiangshan.mem.{LsqEnqCtrl, LsqEnqIO, MemWaitUpdateReq, SqPtr, LqPtr} 16import xiangshan.backend.datapath.WbConfig.V0WB 17import xiangshan.backend.regfile.VlPregParams 18 19sealed trait SchedulerType 20 21case class IntScheduler() extends SchedulerType 22case class FpScheduler() extends SchedulerType 23case class MemScheduler() extends SchedulerType 24case class VfScheduler() extends SchedulerType 25case class NoScheduler() extends SchedulerType 26 27class Scheduler(val params: SchdBlockParams)(implicit p: Parameters) extends LazyModule with HasXSParameter { 28 override def shouldBeInlined: Boolean = false 29 30 val numIntStateWrite = backendParams.numPregWb(IntData()) 31 val numFpStateWrite = backendParams.numPregWb(FpData()) 32 val numVfStateWrite = backendParams.numPregWb(VecData()) 33 val numV0StateWrite = backendParams.numPregWb(V0Data()) 34 val numVlStateWrite = backendParams.numPregWb(VlData()) 35 36 val dispatch2Iq = LazyModule(new Dispatch2Iq(params)) 37 val issueQueue = params.issueBlockParams.map(x => LazyModule(new IssueQueue(x).suggestName(x.getIQName))) 38 39 lazy val module: SchedulerImpBase = params.schdType match { 40 case IntScheduler() => new SchedulerArithImp(this)(params, p) 41 case FpScheduler() => new SchedulerArithImp(this)(params, p) 42 case MemScheduler() => new SchedulerMemImp(this)(params, p) 43 case VfScheduler() => new SchedulerArithImp(this)(params, p) 44 case _ => null 45 } 46} 47 48class SchedulerIO()(implicit params: SchdBlockParams, p: Parameters) extends XSBundle { 49 // params alias 50 private val LoadQueueSize = VirtualLoadQueueSize 51 52 val fromTop = new Bundle { 53 val hartId = Input(UInt(8.W)) 54 } 55 val fromWbFuBusyTable = new Bundle{ 56 val fuBusyTableRead = MixedVec(params.issueBlockParams.map(x => Input(x.genWbFuBusyTableReadBundle))) 57 } 58 val wbFuBusyTable = MixedVec(params.issueBlockParams.map(x => Output(x.genWbFuBusyTableWriteBundle))) 59 val intIQValidNumVec = Output(MixedVec(backendParams.genIntIQValidNumBundle)) 60 val fpIQValidNumVec = Output(MixedVec(backendParams.genFpIQValidNumBundle)) 61 62 val fromCtrlBlock = new Bundle { 63 val flush = Flipped(ValidIO(new Redirect)) 64 } 65 val fromDispatch = new Bundle { 66 val allocPregs = Vec(RenameWidth, Input(new ResetPregStateReq)) 67 val uops = Vec(params.numUopIn, Flipped(DecoupledIO(new DynInst))) 68 } 69 val intWriteBack = MixedVec(Vec(backendParams.numPregWb(IntData()), 70 new RfWritePortWithConfig(backendParams.intPregParams.dataCfg, backendParams.intPregParams.addrWidth))) 71 val fpWriteBack = MixedVec(Vec(backendParams.numPregWb(FpData()), 72 new RfWritePortWithConfig(backendParams.fpPregParams.dataCfg, backendParams.fpPregParams.addrWidth))) 73 val vfWriteBack = MixedVec(Vec(backendParams.numPregWb(VecData()), 74 new RfWritePortWithConfig(backendParams.vfPregParams.dataCfg, backendParams.vfPregParams.addrWidth))) 75 val v0WriteBack = MixedVec(Vec(backendParams.numPregWb(V0Data()), 76 new RfWritePortWithConfig(backendParams.v0PregParams.dataCfg, backendParams.v0PregParams.addrWidth))) 77 val vlWriteBack = MixedVec(Vec(backendParams.numPregWb(VlData()), 78 new RfWritePortWithConfig(backendParams.vlPregParams.dataCfg, backendParams.vlPregParams.addrWidth))) 79 val toDataPathAfterDelay: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = MixedVec(params.issueBlockParams.map(_.genIssueDecoupledBundle)) 80 81 val vlWriteBackInfo = new Bundle { 82 val vlIsZero = Input(Bool()) 83 val vlIsVlmax = Input(Bool()) 84 } 85 86 val fromSchedulers = new Bundle { 87 val wakeupVec: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = Flipped(params.genIQWakeUpInValidBundle) 88 } 89 90 val toSchedulers = new Bundle { 91 val wakeupVec: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = params.genIQWakeUpOutValidBundle 92 } 93 94 val fromDataPath = new Bundle { 95 val resp: MixedVec[MixedVec[OGRespBundle]] = MixedVec(params.issueBlockParams.map(x => Flipped(x.genOGRespBundle))) 96 val og0Cancel = Input(ExuOH(backendParams.numExu)) 97 // Todo: remove this after no cancel signal from og1 98 val og1Cancel = Input(ExuOH(backendParams.numExu)) 99 val cancelToBusyTable = Vec(backendParams.numExu, Flipped(ValidIO(new CancelSignal))) 100 // just be compatible to old code 101 def apply(i: Int)(j: Int) = resp(i)(j) 102 } 103 104 val loadFinalIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.LdExuCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 105 val memAddrIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.LdExuCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 106 val vecLoadIssueResp = MixedVec(params.issueBlockParams.map(x => MixedVec(Vec(x.VlduCnt, Flipped(ValidIO(new IssueQueueDeqRespBundle()(p, x))))))) 107 108 val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO)) 109 110 val memIO = if (params.isMemSchd) Some(new Bundle { 111 val lsqEnqIO = Flipped(new LsqEnqIO) 112 }) else None 113 val fromMem = if (params.isMemSchd) Some(new Bundle { 114 val ldaFeedback = Flipped(Vec(params.LduCnt, new MemRSFeedbackIO)) 115 val staFeedback = Flipped(Vec(params.StaCnt, new MemRSFeedbackIO)) 116 val hyuFeedback = Flipped(Vec(params.HyuCnt, new MemRSFeedbackIO)) 117 val vstuFeedback = Flipped(Vec(params.VstuCnt, new MemRSFeedbackIO(isVector = true))) 118 val vlduFeedback = Flipped(Vec(params.VlduCnt, new MemRSFeedbackIO(isVector = true))) 119 val stIssuePtr = Input(new SqPtr()) 120 val lcommit = Input(UInt(log2Up(CommitWidth + 1).W)) 121 val scommit = Input(UInt(log2Ceil(EnsbufferWidth + 1).W)) // connected to `memBlock.io.sqDeq` instead of ROB 122 val wakeup = Vec(params.LdExuCnt, Flipped(Valid(new DynInst))) 123 val lqDeqPtr = Input(new LqPtr) 124 val sqDeqPtr = Input(new SqPtr) 125 // from lsq 126 val lqCancelCnt = Input(UInt(log2Up(LoadQueueSize + 1).W)) 127 val sqCancelCnt = Input(UInt(log2Up(StoreQueueSize + 1).W)) 128 val memWaitUpdateReq = Flipped(new MemWaitUpdateReq) 129 }) else None 130 val toMem = if (params.isMemSchd) Some(new Bundle { 131 val loadFastMatch = Output(Vec(params.LduCnt, new IssueQueueLoadBundle)) 132 }) else None 133 val fromOg2 = if(params.isVfSchd) Some(MixedVec(params.issueBlockParams.map(x => Flipped(x.genOG2RespBundle)))) else None 134} 135 136abstract class SchedulerImpBase(wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 137 extends LazyModuleImp(wrapper) 138 with HasXSParameter 139{ 140 val io = IO(new SchedulerIO()) 141 142 // alias 143 private val iqWakeUpInMap: Map[Int, ValidIO[IssueQueueIQWakeUpBundle]] = 144 io.fromSchedulers.wakeupVec.map(x => (x.bits.exuIdx, x)).toMap 145 private val schdType = params.schdType 146 147 // Modules 148 val dispatch2Iq: Dispatch2IqImp = wrapper.dispatch2Iq.module 149 val issueQueues: Seq[IssueQueueImp] = wrapper.issueQueue.map(_.module) 150 io.intIQValidNumVec := 0.U.asTypeOf(io.intIQValidNumVec) 151 io.fpIQValidNumVec := 0.U.asTypeOf(io.fpIQValidNumVec) 152 if (params.isIntSchd) { 153 dispatch2Iq.io.intIQValidNumVec.get := io.intIQValidNumVec 154 io.intIQValidNumVec := MixedVecInit(issueQueues.map(_.io.validCntDeqVec)) 155 } 156 else if (params.isFpSchd) { 157 dispatch2Iq.io.fpIQValidNumVec.get := io.fpIQValidNumVec 158 io.fpIQValidNumVec := MixedVecInit(issueQueues.map(_.io.validCntDeqVec)) 159 } 160 161 // valid count 162 dispatch2Iq.io.iqValidCnt := issueQueues.filter(_.params.StdCnt == 0).map(_.io.status.validCnt) 163 164 // BusyTable Modules 165 val intBusyTable = schdType match { 166 case IntScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numIntStateRead, wrapper.numIntStateWrite, IntPhyRegs, IntWB()))) 167 case _ => None 168 } 169 val fpBusyTable = schdType match { 170 case FpScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numFpStateRead, wrapper.numFpStateWrite, FpPhyRegs, FpWB()))) 171 case _ => None 172 } 173 val vfBusyTable = schdType match { 174 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numVfStateRead, wrapper.numVfStateWrite, VfPhyRegs, VfWB()))) 175 case _ => None 176 } 177 val v0BusyTable = schdType match { 178 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numV0StateRead, wrapper.numV0StateWrite, V0PhyRegs, V0WB()))) 179 case _ => None 180 } 181 val vlBusyTable = schdType match { 182 case VfScheduler() | MemScheduler() => Some(Module(new BusyTable(dispatch2Iq.numVlStateRead, wrapper.numVlStateWrite, VlPhyRegs, VlWB()))) 183 case _ => None 184 } 185 186 dispatch2Iq.io match { case dp2iq => 187 dp2iq.redirect <> io.fromCtrlBlock.flush 188 dp2iq.in <> io.fromDispatch.uops 189 dp2iq.readIntState.foreach(_ <> intBusyTable.get.io.read) 190 dp2iq.readFpState.foreach(_ <> fpBusyTable.get.io.read) 191 dp2iq.readVfState.foreach(_ <> vfBusyTable.get.io.read) 192 dp2iq.readV0State.foreach(_ <> v0BusyTable.get.io.read) 193 dp2iq.readVlState.foreach(_ <> vlBusyTable.get.io.read) 194 } 195 196 intBusyTable match { 197 case Some(bt) => 198 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 199 btAllocPregs.valid := dpAllocPregs.isInt 200 btAllocPregs.bits := dpAllocPregs.preg 201 } 202 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 203 wb.valid := io.intWriteBack(i).wen && io.intWriteBack(i).intWen 204 wb.bits := io.intWriteBack(i).addr 205 } 206 bt.io.wakeUp := io.fromSchedulers.wakeupVec 207 bt.io.cancel := io.fromDataPath.cancelToBusyTable 208 bt.io.ldCancel := io.ldCancel 209 case None => 210 } 211 212 fpBusyTable match { 213 case Some(bt) => 214 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 215 btAllocPregs.valid := dpAllocPregs.isFp 216 btAllocPregs.bits := dpAllocPregs.preg 217 } 218 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 219 wb.valid := io.fpWriteBack(i).wen && io.fpWriteBack(i).fpWen 220 wb.bits := io.fpWriteBack(i).addr 221 } 222 bt.io.wakeUp := io.fromSchedulers.wakeupVec 223 bt.io.cancel := io.fromDataPath.cancelToBusyTable 224 bt.io.ldCancel := io.ldCancel 225 case None => 226 } 227 228 vfBusyTable match { 229 case Some(bt) => 230 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 231 btAllocPregs.valid := dpAllocPregs.isVec 232 btAllocPregs.bits := dpAllocPregs.preg 233 } 234 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 235 wb.valid := io.vfWriteBack(i).wen && io.vfWriteBack(i).vecWen 236 wb.bits := io.vfWriteBack(i).addr 237 } 238 bt.io.wakeUp := io.fromSchedulers.wakeupVec 239 bt.io.cancel := io.fromDataPath.cancelToBusyTable 240 bt.io.ldCancel := io.ldCancel 241 case None => 242 } 243 244 v0BusyTable match { 245 case Some(bt) => 246 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 247 btAllocPregs.valid := dpAllocPregs.isV0 248 btAllocPregs.bits := dpAllocPregs.preg 249 } 250 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 251 wb.valid := io.v0WriteBack(i).wen && io.v0WriteBack(i).v0Wen 252 wb.bits := io.v0WriteBack(i).addr 253 } 254 bt.io.wakeUp := io.fromSchedulers.wakeupVec 255 bt.io.cancel := io.fromDataPath.cancelToBusyTable 256 bt.io.ldCancel := io.ldCancel 257 case None => 258 } 259 260 vlBusyTable match { 261 case Some(bt) => 262 bt.io.allocPregs.zip(io.fromDispatch.allocPregs).foreach { case (btAllocPregs, dpAllocPregs) => 263 btAllocPregs.valid := dpAllocPregs.isVl 264 btAllocPregs.bits := dpAllocPregs.preg 265 } 266 bt.io.wbPregs.zipWithIndex.foreach { case (wb, i) => 267 wb.valid := io.vlWriteBack(i).wen && io.vlWriteBack(i).vlWen 268 wb.bits := io.vlWriteBack(i).addr 269 } 270 bt.io.wakeUp := io.fromSchedulers.wakeupVec 271 bt.io.cancel := io.fromDataPath.cancelToBusyTable 272 bt.io.ldCancel := io.ldCancel 273 case None => 274 } 275 276 val wakeupFromIntWBVec = Wire(params.genIntWBWakeUpSinkValidBundle) 277 val wakeupFromFpWBVec = Wire(params.genFpWBWakeUpSinkValidBundle) 278 val wakeupFromVfWBVec = Wire(params.genVfWBWakeUpSinkValidBundle) 279 val wakeupFromV0WBVec = Wire(params.genV0WBWakeUpSinkValidBundle) 280 val wakeupFromVlWBVec = Wire(params.genVlWBWakeUpSinkValidBundle) 281 282 wakeupFromIntWBVec.zip(io.intWriteBack).foreach { case (sink, source) => 283 sink.valid := source.wen 284 sink.bits.rfWen := source.intWen 285 sink.bits.fpWen := source.fpWen 286 sink.bits.vecWen := source.vecWen 287 sink.bits.v0Wen := source.v0Wen 288 sink.bits.vlWen := source.vlWen 289 sink.bits.pdest := source.addr 290 } 291 292 wakeupFromFpWBVec.zip(io.fpWriteBack).foreach { case (sink, source) => 293 sink.valid := source.wen 294 sink.bits.rfWen := source.intWen 295 sink.bits.fpWen := source.fpWen 296 sink.bits.vecWen := source.vecWen 297 sink.bits.v0Wen := source.v0Wen 298 sink.bits.vlWen := source.vlWen 299 sink.bits.pdest := source.addr 300 } 301 302 wakeupFromVfWBVec.zip(io.vfWriteBack).foreach { case (sink, source) => 303 sink.valid := source.wen 304 sink.bits.rfWen := source.intWen 305 sink.bits.fpWen := source.fpWen 306 sink.bits.vecWen := source.vecWen 307 sink.bits.v0Wen := source.v0Wen 308 sink.bits.vlWen := source.vlWen 309 sink.bits.pdest := source.addr 310 } 311 312 wakeupFromV0WBVec.zip(io.v0WriteBack).foreach { case (sink, source) => 313 sink.valid := source.wen 314 sink.bits.rfWen := source.intWen 315 sink.bits.fpWen := source.fpWen 316 sink.bits.vecWen := source.vecWen 317 sink.bits.v0Wen := source.v0Wen 318 sink.bits.vlWen := source.vlWen 319 sink.bits.pdest := source.addr 320 } 321 322 wakeupFromVlWBVec.zip(io.vlWriteBack).foreach { case (sink, source) => 323 sink.valid := source.wen 324 sink.bits.rfWen := source.intWen 325 sink.bits.fpWen := source.fpWen 326 sink.bits.vecWen := source.vecWen 327 sink.bits.v0Wen := source.v0Wen 328 sink.bits.vlWen := source.vlWen 329 sink.bits.pdest := source.addr 330 } 331 332 // Connect bundles having the same wakeup source 333 issueQueues.zipWithIndex.foreach { case(iq, i) => 334 iq.io.wakeupFromIQ.foreach { wakeUp => 335 val wakeUpIn = iqWakeUpInMap(wakeUp.bits.exuIdx) 336 val exuIdx = wakeUp.bits.exuIdx 337 println(s"[Backend] Connect wakeup exuIdx ${exuIdx}") 338 connectSamePort(wakeUp,wakeUpIn) 339 backendParams.connectWakeup(exuIdx) 340 if (backendParams.isCopyPdest(exuIdx)) { 341 println(s"[Backend] exuIdx ${exuIdx} use pdestCopy ${backendParams.getCopyPdestIndex(exuIdx)}") 342 wakeUp.bits.pdest := wakeUpIn.bits.pdestCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 343 if (wakeUpIn.bits.rfWenCopy.nonEmpty) wakeUp.bits.rfWen := wakeUpIn.bits.rfWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 344 if (wakeUpIn.bits.fpWenCopy.nonEmpty) wakeUp.bits.fpWen := wakeUpIn.bits.fpWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 345 if (wakeUpIn.bits.vecWenCopy.nonEmpty) wakeUp.bits.vecWen := wakeUpIn.bits.vecWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 346 if (wakeUpIn.bits.v0WenCopy.nonEmpty) wakeUp.bits.v0Wen := wakeUpIn.bits.v0WenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 347 if (wakeUpIn.bits.vlWenCopy.nonEmpty) wakeUp.bits.vlWen := wakeUpIn.bits.vlWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 348 if (wakeUpIn.bits.loadDependencyCopy.nonEmpty) wakeUp.bits.loadDependency := wakeUpIn.bits.loadDependencyCopy.get(backendParams.getCopyPdestIndex(exuIdx)) 349 } 350 if (iq.params.numIntSrc == 0) wakeUp.bits.rfWen := false.B 351 if (iq.params.numFpSrc == 0) wakeUp.bits.fpWen := false.B 352 if (iq.params.numVfSrc == 0) wakeUp.bits.vecWen := false.B 353 if (iq.params.numV0Src == 0) wakeUp.bits.v0Wen := false.B 354 if (iq.params.numVlSrc == 0) wakeUp.bits.vlWen := false.B 355 } 356 iq.io.og0Cancel := io.fromDataPath.og0Cancel 357 iq.io.og1Cancel := io.fromDataPath.og1Cancel 358 iq.io.ldCancel := io.ldCancel 359 } 360 361 // connect the vl writeback informatino to the issue queues 362 issueQueues.zipWithIndex.foreach { case(iq, i) => 363 iq.io.vlIsVlmax := io.vlWriteBackInfo.vlIsVlmax 364 iq.io.vlIsZero := io.vlWriteBackInfo.vlIsZero 365 } 366 367 private val iqWakeUpOutMap: Map[Int, ValidIO[IssueQueueIQWakeUpBundle]] = 368 issueQueues.flatMap(_.io.wakeupToIQ) 369 .map(x => (x.bits.exuIdx, x)) 370 .toMap 371 372 // Connect bundles having the same wakeup source 373 io.toSchedulers.wakeupVec.foreach { wakeUp => 374 wakeUp := iqWakeUpOutMap(wakeUp.bits.exuIdx) 375 } 376 377 io.toDataPathAfterDelay.zipWithIndex.foreach { case (toDpDy, i) => 378 toDpDy <> issueQueues(i).io.deqDelay 379 } 380 381 // Response 382 issueQueues.zipWithIndex.foreach { case (iq, i) => 383 iq.io.og0Resp.zipWithIndex.foreach { case (og0Resp, j) => 384 og0Resp := io.fromDataPath(i)(j).og0resp 385 } 386 iq.io.og1Resp.zipWithIndex.foreach { case (og1Resp, j) => 387 og1Resp := io.fromDataPath(i)(j).og1resp 388 } 389 iq.io.finalIssueResp.foreach(_.zipWithIndex.foreach { case (finalIssueResp, j) => 390 if (io.loadFinalIssueResp(i).isDefinedAt(j)) { 391 finalIssueResp := io.loadFinalIssueResp(i)(j) 392 } else { 393 finalIssueResp := 0.U.asTypeOf(finalIssueResp) 394 } 395 }) 396 iq.io.memAddrIssueResp.foreach(_.zipWithIndex.foreach { case (memAddrIssueResp, j) => 397 if (io.memAddrIssueResp(i).isDefinedAt(j)) { 398 memAddrIssueResp := io.memAddrIssueResp(i)(j) 399 } else { 400 memAddrIssueResp := 0.U.asTypeOf(memAddrIssueResp) 401 } 402 }) 403 iq.io.vecLoadIssueResp.foreach(_.zipWithIndex.foreach { case (resp, deqIdx) => 404 resp := io.vecLoadIssueResp(i)(deqIdx) 405 }) 406 if(params.isVfSchd) { 407 iq.io.og2Resp.get.zipWithIndex.foreach { case (og2Resp, exuIdx) => 408 og2Resp := io.fromOg2.get(i)(exuIdx) 409 } 410 } 411 iq.io.wbBusyTableRead := io.fromWbFuBusyTable.fuBusyTableRead(i) 412 io.wbFuBusyTable(i) := iq.io.wbBusyTableWrite 413 } 414 415 // perfEvent 416 val lastCycleDp2IqOutFireVec = RegNext(VecInit(dispatch2Iq.io.out.flatten.map(_.fire))) 417 val lastCycleIqEnqFireVec = RegNext(VecInit(issueQueues.map(_.io.enq.map(_.fire)).flatten)) 418 val lastCycleIqFullVec = RegNext(VecInit(issueQueues.map(_.io.enq.head.ready))) 419 420 val issueQueueFullVecPerf = issueQueues.zip(lastCycleIqFullVec)map{ case (iq, full) => (iq.params.getIQName + s"_full", full) } 421 val basePerfEvents = Seq( 422 ("dispatch2Iq_out_fire_cnt", PopCount(lastCycleDp2IqOutFireVec) ), 423 ("issueQueue_enq_fire_cnt", PopCount(lastCycleIqEnqFireVec) ) 424 ) ++ issueQueueFullVecPerf 425 426 println(s"[Scheduler] io.fromSchedulers.wakeupVec: ${io.fromSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") 427 println(s"[Scheduler] iqWakeUpInKeys: ${iqWakeUpInMap.keys}") 428 429 println(s"[Scheduler] iqWakeUpOutKeys: ${iqWakeUpOutMap.keys}") 430 println(s"[Scheduler] io.toSchedulers.wakeupVec: ${io.toSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") 431} 432 433class SchedulerArithImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 434 extends SchedulerImpBase(wrapper) 435 with HasXSParameter 436 with HasPerfEvents 437{ 438// dontTouch(io.vfWbFuBusyTable) 439 println(s"[SchedulerArithImp] " + 440 s"has intBusyTable: ${intBusyTable.nonEmpty}, " + 441 s"has vfBusyTable: ${vfBusyTable.nonEmpty}") 442 443 issueQueues.zipWithIndex.foreach { case (iq, i) => 444 iq.io.flush <> io.fromCtrlBlock.flush 445 iq.io.enq <> dispatch2Iq.io.out(i) 446 val intWBIQ = params.schdType match { 447 case IntScheduler() => wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1) 448 case FpScheduler() => wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1) 449 case VfScheduler() => (wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 450 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 451 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1)) 452 case _ => null 453 } 454 iq.io.wakeupFromWB.zip(intWBIQ).foreach{ case (sink, source) => sink := source} 455 } 456 457 val perfEvents = basePerfEvents 458 generatePerfEvent() 459} 460 461// FIXME: Vector mem instructions may not be handled properly! 462class SchedulerMemImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) 463 extends SchedulerImpBase(wrapper) 464 with HasXSParameter 465 with HasPerfEvents 466{ 467 println(s"[SchedulerMemImp] " + 468 s"has intBusyTable: ${intBusyTable.nonEmpty}, " + 469 s"has vfBusyTable: ${vfBusyTable.nonEmpty}") 470 471 val memAddrIQs = issueQueues.filter(_.params.isMemAddrIQ) 472 val stAddrIQs = issueQueues.filter(iq => iq.params.StaCnt > 0) // included in memAddrIQs 473 val ldAddrIQs = issueQueues.filter(iq => iq.params.LduCnt > 0) 474 val stDataIQs = issueQueues.filter(iq => iq.params.StdCnt > 0) 475 val vecMemIQs = issueQueues.filter(_.params.isVecMemIQ) 476 val (hyuIQs, hyuIQIdxs) = issueQueues.zipWithIndex.filter(_._1.params.HyuCnt > 0).unzip 477 478 println(s"[SchedulerMemImp] memAddrIQs.size: ${memAddrIQs.size}, enq.size: ${memAddrIQs.map(_.io.enq.size).sum}") 479 println(s"[SchedulerMemImp] stAddrIQs.size: ${stAddrIQs.size }, enq.size: ${stAddrIQs.map(_.io.enq.size).sum}") 480 println(s"[SchedulerMemImp] ldAddrIQs.size: ${ldAddrIQs.size }, enq.size: ${ldAddrIQs.map(_.io.enq.size).sum}") 481 println(s"[SchedulerMemImp] stDataIQs.size: ${stDataIQs.size }, enq.size: ${stDataIQs.map(_.io.enq.size).sum}") 482 println(s"[SchedulerMemImp] hyuIQs.size: ${hyuIQs.size }, enq.size: ${hyuIQs.map(_.io.enq.size).sum}") 483 require(memAddrIQs.nonEmpty && stDataIQs.nonEmpty) 484 485 io.toMem.get.loadFastMatch := 0.U.asTypeOf(io.toMem.get.loadFastMatch) // TODO: is still needed? 486 487 private val loadWakeUp = issueQueues.filter(_.params.LdExuCnt > 0).map(_.asInstanceOf[IssueQueueMemAddrImp].io.memIO.get.loadWakeUp).flatten 488 require(loadWakeUp.length == io.fromMem.get.wakeup.length) 489 loadWakeUp.zip(io.fromMem.get.wakeup).foreach(x => x._1 := x._2) 490 491 memAddrIQs.zipWithIndex.foreach { case (iq, i) => 492 iq.io.flush <> io.fromCtrlBlock.flush 493 iq.io.enq <> dispatch2Iq.io.out(i) 494 iq.io.wakeupFromWB.zip( 495 wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 496 wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 497 wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 498 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1) ++ 499 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1) 500 ).foreach{ case (sink, source) => sink := source} 501 } 502 503 ldAddrIQs.zipWithIndex.foreach { 504 case (imp: IssueQueueMemAddrImp, i) => 505 imp.io.memIO.get.feedbackIO.head := 0.U.asTypeOf(imp.io.memIO.get.feedbackIO.head) 506 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 507 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 508 case _ => 509 } 510 511 stAddrIQs.zipWithIndex.foreach { 512 case (imp: IssueQueueMemAddrImp, i) => 513 imp.io.memIO.get.feedbackIO.head := io.fromMem.get.staFeedback(i) 514 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 515 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 516 case _ => 517 } 518 519 hyuIQs.zip(hyuIQIdxs).foreach { 520 case (imp: IssueQueueMemAddrImp, idx) => 521 imp.io.memIO.get.feedbackIO.head := io.fromMem.get.hyuFeedback.head 522 imp.io.memIO.get.feedbackIO(1) := 0.U.asTypeOf(imp.io.memIO.get.feedbackIO(1)) 523 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 524 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 525 // TODO: refactor ditry code 526 imp.io.deqDelay(1).ready := false.B 527 io.toDataPathAfterDelay(idx)(1).valid := false.B 528 io.toDataPathAfterDelay(idx)(1).bits := 0.U.asTypeOf(io.toDataPathAfterDelay(idx)(1).bits) 529 case _ => 530 } 531 532 private val staIdxSeq = (stAddrIQs).map(iq => iq.params.idxInSchBlk) 533 private val hyaIdxSeq = (hyuIQs).map(iq => iq.params.idxInSchBlk) 534 535 println(s"[SchedulerMemImp] sta iq idx in memSchdBlock: $staIdxSeq") 536 println(s"[SchedulerMemImp] hya iq idx in memSchdBlock: $hyaIdxSeq") 537 538 private val staEnqs = stAddrIQs.map(_.io.enq).flatten 539 private val stdEnqs = stDataIQs.map(_.io.enq).flatten.take(staEnqs.size) 540 private val hyaEnqs = hyuIQs.map(_.io.enq).flatten 541 private val hydEnqs = stDataIQs.map(_.io.enq).flatten.drop(staEnqs.size) 542 543 require(staEnqs.size == stdEnqs.size, s"number of enq ports of store address IQs(${staEnqs.size}) " + 544 s"should be equal to number of enq ports of store data IQs(${stdEnqs.size})") 545 546 require(hyaEnqs.size == hydEnqs.size, s"number of enq ports of hybrid address IQs(${hyaEnqs.size}) " + 547 s"should be equal to number of enq ports of hybrid data IQs(${hydEnqs.size})") 548 549 val d2IqStaOut = dispatch2Iq.io.out.zipWithIndex.filter(staIdxSeq contains _._2).unzip._1.flatten 550 d2IqStaOut.zip(staEnqs).zip(stdEnqs).foreach{ case((dp, staIQ), stdIQ) => 551 val isAllReady = staIQ.ready && stdIQ.ready 552 dp.ready := isAllReady 553 staIQ.valid := dp.valid && isAllReady 554 stdIQ.valid := dp.valid && isAllReady && FuType.FuTypeOrR(dp.bits.fuType, FuType.stu, FuType.mou) 555 } 556 557 val d2IqHyaOut = dispatch2Iq.io.out.zipWithIndex.filter(hyaIdxSeq contains _._2).unzip._1.flatten 558 d2IqHyaOut.zip(hyaEnqs).zip(hydEnqs).foreach{ case((dp, hyaIQ), hydIQ) => 559 val isAllReady = hyaIQ.ready && hydIQ.ready 560 dp.ready := isAllReady 561 hyaIQ.valid := dp.valid && isAllReady 562 hydIQ.valid := dp.valid && isAllReady && FuType.FuTypeOrR(dp.bits.fuType, FuType.stu, FuType.mou) 563 } 564 565 stDataIQs.zipWithIndex.foreach { case (iq, i) => 566 iq.io.flush <> io.fromCtrlBlock.flush 567 iq.io.wakeupFromWB.zip( 568 wakeupFromIntWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 569 wakeupFromFpWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 570 wakeupFromVfWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 571 wakeupFromV0WBVec.zipWithIndex.filter(x => iq.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 572 wakeupFromVlWBVec.zipWithIndex.filter(x => iq.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq 573 ).foreach{ case (sink, source) => sink := source} 574 } 575 576 (stdEnqs ++ hydEnqs).zip(staEnqs ++ hyaEnqs).zipWithIndex.foreach { case ((stdIQEnq, staIQEnq), i) => 577 stdIQEnq.bits := staIQEnq.bits 578 // Store data reuses store addr src(1) in dispatch2iq 579 // [dispatch2iq] --src*------src*(0)--> [staIQ|hyaIQ] 580 // \ 581 // ---src*(1)--> [stdIQ] 582 // Since the src(1) of sta is easier to get, stdIQEnq.bits.src*(0) is assigned to staIQEnq.bits.src*(1) 583 // instead of dispatch2Iq.io.out(x).bits.src*(1) 584 val stdIdx = 1 585 stdIQEnq.bits.srcState(0) := staIQEnq.bits.srcState(stdIdx) 586 stdIQEnq.bits.srcLoadDependency(0) := staIQEnq.bits.srcLoadDependency(1) 587 stdIQEnq.bits.srcType(0) := staIQEnq.bits.srcType(stdIdx) 588 stdIQEnq.bits.psrc(0) := staIQEnq.bits.psrc(stdIdx) 589 stdIQEnq.bits.sqIdx := staIQEnq.bits.sqIdx 590 } 591 592 vecMemIQs.foreach { 593 case imp: IssueQueueVecMemImp => 594 imp.io.memIO.get.sqDeqPtr.foreach(_ := io.fromMem.get.sqDeqPtr) 595 imp.io.memIO.get.lqDeqPtr.foreach(_ := io.fromMem.get.lqDeqPtr) 596 // not used 597 //imp.io.memIO.get.feedbackIO.head := io.fromMem.get.vstuFeedback.head // only vector store replay 598 // maybe not used 599 imp.io.memIO.get.checkWait.stIssuePtr := io.fromMem.get.stIssuePtr 600 imp.io.memIO.get.checkWait.memWaitUpdateReq := io.fromMem.get.memWaitUpdateReq 601 imp.io.wakeupFromWB.zip( 602 wakeupFromIntWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromIntWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 603 wakeupFromFpWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromFpWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 604 wakeupFromVfWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromVfWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 605 wakeupFromV0WBVec.zipWithIndex.filter(x => imp.params.needWakeupFromV0WBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq ++ 606 wakeupFromVlWBVec.zipWithIndex.filter(x => imp.params.needWakeupFromVlWBPort.keys.toSeq.contains(x._2)).map(_._1).toSeq 607 ).foreach{ case (sink, source) => sink := source} 608 609 case _ => 610 } 611 val vecMemFeedbackIO: Seq[MemRSFeedbackIO] = vecMemIQs.map { 612 case imp: IssueQueueVecMemImp => 613 imp.io.memIO.get.feedbackIO 614 }.flatten 615 assert(vecMemFeedbackIO.size == io.fromMem.get.vstuFeedback.size, "vecMemFeedback size dont match!") 616 vecMemFeedbackIO.zip(io.fromMem.get.vstuFeedback).foreach{ 617 case (sink, source) => 618 sink := source 619 } 620 621 val lsqEnqCtrl = Module(new LsqEnqCtrl) 622 623 lsqEnqCtrl.io.redirect <> io.fromCtrlBlock.flush 624 lsqEnqCtrl.io.enq <> dispatch2Iq.io.enqLsqIO.get 625 lsqEnqCtrl.io.lcommit := io.fromMem.get.lcommit 626 lsqEnqCtrl.io.scommit := io.fromMem.get.scommit 627 lsqEnqCtrl.io.lqCancelCnt := io.fromMem.get.lqCancelCnt 628 lsqEnqCtrl.io.sqCancelCnt := io.fromMem.get.sqCancelCnt 629 dispatch2Iq.io.lqFreeCount.get := lsqEnqCtrl.io.lqFreeCount 630 dispatch2Iq.io.sqFreeCount.get := lsqEnqCtrl.io.sqFreeCount 631 io.memIO.get.lsqEnqIO <> lsqEnqCtrl.io.enqLsq 632 633 dontTouch(io.vecLoadIssueResp) 634 635 val intBusyTablePerf = intBusyTable.get 636 val fpBusyTablePerf = fpBusyTable.get 637 val vecBusyTablePerf = vfBusyTable.get 638 val v0BusyTablePerf = v0BusyTable.get 639 val vlBusyTablePerf = vlBusyTable.get 640 641 val perfEvents = basePerfEvents ++ Seq(intBusyTablePerf, fpBusyTablePerf, vecBusyTablePerf, v0BusyTablePerf, vlBusyTablePerf).flatten(_.getPerfEvents) 642 generatePerfEvent() 643} 644