xref: /XiangShan/src/main/scala/xiangshan/backend/datapath/DataPath.scala (revision 4daa5bf3c3f27e7fd090866d52405b21e107eb8d)
1package xiangshan.backend.datapath
2
3import org.chipsalliance.cde.config.Parameters
4import chisel3._
5import chisel3.util._
6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule}
7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
8import utility._
9import utils.SeqUtils._
10import utils.{XSPerfAccumulate, XSPerfHistogram}
11import xiangshan._
12import xiangshan.backend.BackendParams
13import xiangshan.backend.Bundles._
14import xiangshan.backend.decode.ImmUnion
15import xiangshan.backend.datapath.DataConfig._
16import xiangshan.backend.datapath.RdConfig._
17import xiangshan.backend.issue.{ImmExtractor, IntScheduler, MemScheduler, VfScheduler, FpScheduler}
18import xiangshan.backend.issue.EntryBundles._
19import xiangshan.backend.regfile._
20import xiangshan.backend.PcToDataPathIO
21import xiangshan.backend.fu.FuType.is0latency
22
23class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule {
24  override def shouldBeInlined: Boolean = false
25
26  private implicit val dpParams: BackendParams = params
27  lazy val module = new DataPathImp(this)
28
29  println(s"[DataPath] Preg Params: ")
30  println(s"[DataPath]   Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ")
31  println(s"[DataPath]   Fp R(${params.getRfReadSize(FpData())}), W(${params.getRfWriteSize(FpData())}) ")
32  println(s"[DataPath]   Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ")
33}
34
35class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams)
36  extends LazyModuleImp(wrapper) with HasXSParameter {
37
38  val io = IO(new DataPathIO())
39
40  private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu)
41  private val (fromFpIQ, toFpIQ, toFpExu) = (io.fromFpIQ, io.toFpIQ, io.toFpExu)
42  private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu)
43  private val (fromVfIQ , toVfIQ , toVfExu ) = (io.fromVfIQ , io.toVfIQ , io.toVecExu)
44
45  println(s"[DataPath] IntIQ(${fromIntIQ.size}), MemIQ(${fromFpIQ.size}), MemIQ(${fromMemIQ.size})")
46  println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), FpExu(${fromFpIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})")
47
48  // just refences for convience
49  private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromFpIQ ++ fromVfIQ ++ fromMemIQ).toSeq
50
51  private val toIQs = toIntIQ ++ toFpIQ ++ toVfIQ ++ toMemIQ
52
53  private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = (toIntExu ++ toFpExu ++ toVfExu ++ toMemExu).toSeq
54
55  private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten
56
57  private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten
58
59  private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams))
60  private val fpWbBusyArbiter = Module(new FpRFWBCollideChecker(backendParams))
61  private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams))
62  private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams))
63  private val fpRFReadArbiter = Module(new FpRFReadArbiter(backendParams))
64  private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams))
65
66  private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
67  private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
68
69  // port -> win
70  private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
71  private val fpRdArbWinner: Seq2[MixedVec[Bool]] = fpRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
72  private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
73  private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
74  private val fpWbNotBlock: Seq[MixedVec[Bool]] = fpWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
75  private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
76
77  private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR))
78  private val fpRdNotBlock: Seq2[Bool] = fpRdArbWinner.map(_.map(_.asUInt.andR))
79  private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR))
80
81  private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getIntRfReadValidBundle(xx.valid)).toSeq).toSeq
82  private val fpRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getFpRfReadValidBundle(xx.valid)).toSeq).toSeq
83  private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getVfRfReadValidBundle(xx.valid)).toSeq).toSeq
84  private val allDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources).toSeq)
85  private val allNumRegSrcs: Seq[Seq[Int]] = fromIQ.map(x => x.map(xx => xx.bits.exuParams.numRegSrc).toSeq)
86
87  intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
88    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
89      val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData())
90      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
91        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
92          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
93          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
94//          if (allNumRegSrcs(iqIdx)(exuIdx) == 2) {
95//            val src0Req = inRFReadReqSeq(0).valid && allDataSources(iqIdx)(exuIdx)(0).readReg
96//            val src1Req = inRFReadReqSeq(1).valid && allDataSources(iqIdx)(exuIdx)(1).readReg
97//            if (srcIdx == 0) {
98//              arbInSeq(srcIdx).valid := src0Req || src1Req
99//              arbInSeq(srcIdx).bits.addr := Mux(src1Req && !src0Req, inRFReadReqSeq(1).bits.addr,inRFReadReqSeq(0).bits.addr)
100//            } else {
101//              arbInSeq(srcIdx).valid := src0Req && src1Req
102//              arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
103//            }
104//          } else {
105//            arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
106//            arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
107//          }
108        } else {
109          arbInSeq(srcIdx).valid := false.B
110          arbInSeq(srcIdx).bits.addr := 0.U
111        }
112      }
113    }
114  }
115  fpRFReadArbiter.io.in.zip(fpRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
116    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
117      val srcIndices: Seq[Int] = FpRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
118      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
119        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
120          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
121          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
122        } else {
123          arbInSeq(srcIdx).valid := false.B
124          arbInSeq(srcIdx).bits.addr := 0.U
125        }
126      }
127    }
128  }
129
130  vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
131    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
132      val srcIndices: Seq[Int] = VfRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
133      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
134        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
135          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
136          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
137        } else {
138          arbInSeq(srcIdx).valid := false.B
139          arbInSeq(srcIdx).bits.addr := 0.U
140        }
141      }
142    }
143  }
144
145  private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq
146  private val fpRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.getFpWen.getOrElse(false.B)).toSeq).toSeq
147  private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.getVfWen.getOrElse(false.B)).toSeq).toSeq
148
149  intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
150    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
151      arbIn.valid := inRFWriteReq
152    }
153  }
154
155  fpWbBusyArbiter.io.in.zip(fpRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
156    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
157      arbIn.valid := inRFWriteReq
158    }
159  }
160
161  vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
162    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
163      arbIn.valid := inRFWriteReq
164    }
165  }
166
167  private val intSchdParams = params.schdParams(IntScheduler())
168  private val fpSchdParams = params.schdParams(FpScheduler())
169  private val vfSchdParams = params.schdParams(VfScheduler())
170  private val memSchdParams = params.schdParams(MemScheduler())
171
172  private val numIntRfReadByExu = intSchdParams.numIntRfReadByExu + memSchdParams.numIntRfReadByExu
173  private val numFpRfReadByExu = fpSchdParams.numFpRfReadByExu + memSchdParams.numFpRfReadByExu
174  private val numVfRfReadByExu = vfSchdParams.numVfRfReadByExu + memSchdParams.numVfRfReadByExu
175  // Todo: limit read port
176  private val numIntR = numIntRfReadByExu
177  private val numFpR = numFpRfReadByExu
178  private val numVfR = numVfRfReadByExu
179  println(s"[DataPath] RegFile read req needed by Exu: Int(${numIntRfReadByExu}), Fp(${numFpRfReadByExu}), Vf(${numVfRfReadByExu})")
180  println(s"[DataPath] RegFile read port: Int(${numIntR}), Fp(${numFpR}), Vf(${numVfR})")
181
182  private val schdParams = params.allSchdParams
183
184  private val pcReadValid = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathValid))
185  private val pcReadFtqPtr = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqPtr))
186  private val pcReadFtqOffset = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqOffset))
187  private val targetPCRdata = io.fromPcTargetMem.toDataPathTargetPC
188  private val pcRdata = io.fromPcTargetMem.toDataPathPC
189  private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W)))
190  private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W)))
191  private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool()))
192  private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W)))
193  private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W)))
194
195  private val fpRfRaddr = Wire(Vec(params.numPregRd(FpData()), UInt(fpSchdParams.pregIdxWidth.W)))
196  private val fpRfRdata = Wire(Vec(params.numPregRd(FpData()), UInt(fpSchdParams.rfDataWidth.W)))
197  private val fpRfWen = Wire(Vec(io.fromFpWb.length, Bool()))
198  private val fpRfWaddr = Wire(Vec(io.fromFpWb.length, UInt(fpSchdParams.pregIdxWidth.W)))
199  private val fpRfWdata = Wire(Vec(io.fromFpWb.length, UInt(fpSchdParams.rfDataWidth.W)))
200
201  private val vfRfSplitNum = VLEN / XLEN
202  private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W)))
203  private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W)))
204  private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool())))
205  private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W)))
206  private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W)))
207
208  val pcReadFtqPtrFormIQ = fromIntIQ.flatten.filter(x => x.bits.exuParams.needPc)
209  assert(pcReadFtqPtrFormIQ.size == pcReadFtqPtr.size, s"pcReadFtqPtrFormIQ.size ${pcReadFtqPtrFormIQ.size} not equal pcReadFtqPtr.size ${pcReadFtqPtr.size}")
210  pcReadValid.zip(pcReadFtqPtrFormIQ.map(_.valid)).map(x => x._1 := x._2)
211  pcReadFtqPtr.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqIdx.get)).map(x => x._1 := x._2)
212  pcReadFtqOffset.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqOffset.get)).map(x => x._1 := x._2)
213  io.fromPcTargetMem.fromDataPathValid := pcReadValid
214  io.fromPcTargetMem.fromDataPathFtqPtr := pcReadFtqPtr
215  io.fromPcTargetMem.fromDataPathFtqOffset := pcReadFtqOffset
216
217  private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] =
218    if (env.AlwaysBasicDiff || env.EnableDifftest) {
219      Some(Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W))))
220    } else { None }
221  private val fpDebugRead: Option[(Vec[UInt], Vec[UInt])] =
222    if (env.AlwaysBasicDiff || env.EnableDifftest) {
223      Some(Wire(Vec(32, UInt(fpSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W))))
224    } else { None }
225  private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] =
226    if (env.AlwaysBasicDiff || env.EnableDifftest) {
227      Some(Wire(Vec(32 + 1, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(32 + 1, UInt(VLEN.W))))
228    } else { None }
229
230  private val fpDebugReadData: Option[Vec[UInt]] =
231    if (env.AlwaysBasicDiff || env.EnableDifftest) {
232      Some(Wire(Vec(32, UInt(XLEN.W))))
233    } else { None }
234  private val vecDebugReadData: Option[Vec[UInt]] =
235    if (env.AlwaysBasicDiff || env.EnableDifftest) {
236      Some(Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0))
237    } else { None }
238  private val vconfigDebugReadData: Option[UInt] =
239    if (env.AlwaysBasicDiff || env.EnableDifftest) {
240      Some(Wire(UInt(64.W)))
241    } else { None }
242
243
244  fpDebugReadData.foreach(_ := fpDebugRead
245    .get._2
246    .slice(0, 32)
247    .map(_(63, 0))
248  ) // fp only used [63, 0]
249  vecDebugReadData.foreach(_ := vfDebugRead
250    .get._2
251    .slice(0, 32)
252    .map(x => Seq(x(63, 0), x(127, 64))).flatten
253  )
254  vconfigDebugReadData.foreach(_ := vfDebugRead
255    .get._2(32)(63, 0)
256  )
257
258  io.debugVconfig.foreach(_ := vconfigDebugReadData.get)
259
260  IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata,
261    bankNum = 4,
262    debugReadAddr = intDebugRead.map(_._1),
263    debugReadData = intDebugRead.map(_._2))
264  FpRegFile("FpRegFile", fpSchdParams.numPregs, fpRfRaddr, fpRfRdata, fpRfWen, fpRfWaddr, fpRfWdata,
265    bankNum = 1,
266    debugReadAddr = fpDebugRead.map(_._1),
267    debugReadData = fpDebugRead.map(_._2))
268  VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata,
269    debugReadAddr = vfDebugRead.map(_._1),
270    debugReadData = vfDebugRead.map(_._2))
271
272  intRfWaddr := io.fromIntWb.map(_.addr).toSeq
273  intRfWdata := io.fromIntWb.map(_.data).toSeq
274  intRfWen := io.fromIntWb.map(_.wen).toSeq
275
276  for (portIdx <- intRfRaddr.indices) {
277    if (intRFReadArbiter.io.out.isDefinedAt(portIdx))
278      intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr
279    else
280      intRfRaddr(portIdx) := 0.U
281  }
282
283  fpRfWaddr := io.fromFpWb.map(_.addr).toSeq
284  fpRfWdata := io.fromFpWb.map(_.data).toSeq
285  fpRfWen := io.fromFpWb.map(_.wen).toSeq
286
287  for (portIdx <- fpRfRaddr.indices) {
288    if (fpRFReadArbiter.io.out.isDefinedAt(portIdx))
289      fpRfRaddr(portIdx) := fpRFReadArbiter.io.out(portIdx).bits.addr
290    else
291      fpRfRaddr(portIdx) := 0.U
292  }
293
294  vfRfWaddr := io.fromVfWb.map(x => RegEnable(x.addr, x.wen)).toSeq
295  vfRfWdata := io.fromVfWb.map(x => RegEnable(x.data, x.wen)).toSeq
296  vfRfWen.foreach(_.zip(io.fromVfWb.map(x => RegNext(x.wen))).foreach { case (wenSink, wenSource) => wenSink := wenSource } )// Todo: support fp multi-write
297
298  for (portIdx <- vfRfRaddr.indices) {
299    if (vfRFReadArbiter.io.out.isDefinedAt(portIdx))
300      vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr
301    else
302      vfRfRaddr(portIdx) := 0.U
303  }
304
305
306  intDebugRead.foreach { case (addr, _) =>
307    addr := io.debugIntRat.get
308  }
309
310  fpDebugRead.foreach { case (addr, _) =>
311    addr := io.debugFpRat.get
312  }
313
314  vfDebugRead.foreach { case (addr, _) =>
315    addr := io.debugVecRat.get :+ io.debugVconfigRat.get
316  }
317  println(s"[DataPath] " +
318    s"has intDebugRead: ${intDebugRead.nonEmpty}, " +
319    s"has vfDebugRead: ${vfDebugRead.nonEmpty}")
320
321  val s1_addrOHs = Reg(MixedVec(
322    fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq
323  ))
324  val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec(
325    toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq
326  ))
327  val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq))
328  val s1_immInfo = Reg(MixedVec(toExu.map(x => MixedVec(x.map(x => new ImmInfo).toSeq)).toSeq))
329  s1_immInfo.zip(fromIQ).map { case (s1Vec, s0Vec) =>
330    s1Vec.zip(s0Vec).map { case (s1, s0) =>
331      s1.imm := Mux(s0.valid, s0.bits.common.imm, s1.imm)
332      s1.immType := Mux(s0.valid, s0.bits.immType, s1.immType)
333    }
334  }
335  io.og1ImmInfo.zip(s1_immInfo.flatten).map{ case(out, reg) =>
336    out := reg
337  }
338  val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType).toSeq))))
339  val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire)).toSeq)))
340
341  val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
342  val s1_fpPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
343  val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
344
345  val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs))
346
347  println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}")
348  s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
349  s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
350      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
351        val realIuCfg = iuCfg.map(x => x.filter(_.isInstanceOf[IntRD])).flatten
352        iuRdata.zip(realIuCfg)
353          .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[IntRD] }
354          .foreach { case (sink, cfg) => sink := intRfRdata(cfg.port) }
355      }
356  }
357
358  println(s"[DataPath] s1_fpPregRData.flatten.flatten.size: ${s1_fpPregRData.flatten.flatten.size}, fpRfRdata.size: ${fpRfRdata.size}")
359  s1_fpPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
360  s1_fpPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
361    iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
362      val realIuCfg = iuCfg.map(x => x.filter(_.isInstanceOf[FpRD])).flatten
363      iuRdata.zip(realIuCfg)
364        .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[FpRD] }
365        .foreach { case (sink, cfg) => sink := fpRfRdata(cfg.port) }
366    }
367  }
368
369  println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}")
370  s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
371  s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
372      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
373        val realIuCfg = iuCfg.map(x => x.filter(_.isInstanceOf[VfRD])).flatten
374        iuRdata.zip(realIuCfg)
375          .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[VfRD] }
376          .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.port) }
377      }
378  }
379
380  val og0_cancel_no_load = VecInit(og0FailedVec2.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1).toSeq)
381  val exuParamsNoLoad = fromIQ.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu)
382  val is_0latency = Wire(Vec(og0_cancel_no_load.size, Bool()))
383  is_0latency := exuParamsNoLoad.map(x => is0latency(x._1.bits.common.fuType))
384  val og0_cancel_delay = RegNext(VecInit(og0_cancel_no_load.zip(is_0latency).map(x => x._1 && x._2)))
385  val isVfScheduler = VecInit(exuParamsNoLoad.map(x => x._2.schdType.isInstanceOf[VfScheduler].B))
386  val og0_cancel_delay_for_mem = VecInit(og0_cancel_delay.zip(isVfScheduler).map(x => x._1 && !x._2))
387  for (i <- fromIQ.indices) {
388    for (j <- fromIQ(i).indices) {
389      // IQ(s0) --[Ctrl]--> s1Reg ---------- begin
390      // refs
391      val s1_valid = s1_toExuValid(i)(j)
392      val s1_ready = s1_toExuReady(i)(j)
393      val s1_data = s1_toExuData(i)(j)
394      val s1_addrOH = s1_addrOHs(i)(j)
395      val s0 = fromIQ(i)(j) // s0
396
397      val srcNotBlock = Wire(Bool())
398      srcNotBlock := s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip fpRdArbWinner(i)(j) zip vfRdArbWinner(i)(j)).map {
399        case (source, ((win_int, win_fp),win_vf)) =>
400        !source.readReg || win_int && win_fp && win_vf
401      }.fold(true.B)(_ && _)
402//      if (fromIQ(i)(j).bits.exuParams.schdType.isInstanceOf[IntScheduler] && (fromIQ(i)(j).bits.exuParams.numRegSrc == 2)) {
403//        val src0VfBlock = s0.bits.common.dataSources(0).readReg && !vfRdArbWinner(i)(j)(0)
404//        val src1VfBlock = s0.bits.common.dataSources(1).readReg && !vfRdArbWinner(i)(j)(1)
405//        val src1IntBlock = s0.bits.common.dataSources(0).readReg && s0.bits.common.dataSources(1).readReg && !intRdArbWinner(i)(j)(1)
406//        val src0IntBlock = (s0.bits.common.dataSources(0).readReg || s0.bits.common.dataSources(1).readReg) && !intRdArbWinner(i)(j)(0)
407//        srcNotBlock := !src0VfBlock && !src1VfBlock && !src1IntBlock && !src0IntBlock
408//      }
409      val notBlock = srcNotBlock && intWbNotBlock(i)(j) && fpWbNotBlock(i)(j) && vfWbNotBlock(i)(j)
410      val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush)))
411      val s1_cancel = og1FailedVec2(i)(j)
412      val s0_cancel = Wire(Bool())
413      val og0_cancel_delay_need = if (s0.bits.exuParams.schdType.isInstanceOf[MemScheduler]) og0_cancel_delay_for_mem else og0_cancel_delay
414      if (s0.bits.exuParams.isIQWakeUpSink) {
415        val exuOHNoLoad = s0.bits.common.l1ExuOH.get.map(x => x.asTypeOf(Vec(x.getWidth, Bool())).zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1))
416        s0_cancel := exuOHNoLoad.zip(s0.bits.common.dataSources).map{
417          case (exuOH, dataSource) => (VecInit(exuOH).asUInt & og0_cancel_delay_need.asUInt).orR && dataSource.readForward
418        }.reduce(_ || _) && s0.valid
419      } else s0_cancel := false.B
420      val s0_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel)
421      when (s0.fire && !s1_flush && notBlock && !s1_cancel && !s0_ldCancel && !s0_cancel) {
422        s1_valid := s0.valid
423        s1_data.fromIssueBundle(s0.bits) // no src data here
424//        if (fromIQ(i)(j).bits.exuParams.schdType.isInstanceOf[IntScheduler] && (fromIQ(i)(j).bits.exuParams.numRegSrc == 2)) {
425//          s1_data.dataSources(1).value := Mux(!s0.bits.common.dataSources(0).readReg && s0.bits.common.dataSources(1).readReg, DataSource.anotherReg, s0.bits.common.dataSources(1).value)
426//        }
427        s1_addrOH := s0.bits.addrOH
428      }.otherwise {
429        s1_valid := false.B
430      }
431      s0.ready := (s1_ready || !s1_valid) && notBlock && !s1_cancel && !s0_ldCancel && !s0_cancel
432      // IQ(s0) --[Ctrl]--> s1Reg ---------- end
433    }
434  }
435
436  private val fromIQFire = fromIQ.map(_.map(_.fire))
437  private val toExuFire = toExu.map(_.map(_.fire))
438  toIQs.zipWithIndex.foreach {
439    case(toIQ, iqIdx) =>
440      toIQ.zipWithIndex.foreach {
441        case (toIU, iuIdx) =>
442          // IU: issue unit
443          val og0resp = toIU.og0resp
444          og0FailedVec2(iqIdx)(iuIdx) := fromIQ(iqIdx)(iuIdx).valid && (!fromIQFire(iqIdx)(iuIdx))
445          og0resp.valid                 := og0FailedVec2(iqIdx)(iuIdx)
446          og0resp.bits.robIdx           := fromIQ(iqIdx)(iuIdx).bits.common.robIdx
447          og0resp.bits.uopIdx.foreach(_ := fromIQ(iqIdx)(iuIdx).bits.common.vpu.get.vuopIdx)
448          og0resp.bits.resp             := RespType.block
449          og0resp.bits.fuType           := fromIQ(iqIdx)(iuIdx).bits.common.fuType
450
451          val og1resp = toIU.og1resp
452          og1FailedVec2(iqIdx)(iuIdx)   := s1_toExuValid(iqIdx)(iuIdx) && !toExuFire(iqIdx)(iuIdx)
453          og1resp.valid                 := s1_toExuValid(iqIdx)(iuIdx)
454          og1resp.bits.robIdx           := s1_toExuData(iqIdx)(iuIdx).robIdx
455          og1resp.bits.uopIdx.foreach(_ := s1_toExuData(iqIdx)(iuIdx).vpu.get.vuopIdx)
456          // respType:  fuIdle      ->IQ entry clear
457          //            fuUncertain ->IQ entry no action
458          //            fuBusy      ->IQ entry issued set false, then re-issue
459          // Only hyu, lda and sta are fuUncertain at OG1 stage
460          og1resp.bits.resp             := Mux(!og1FailedVec2(iqIdx)(iuIdx),
461            if (toIU.issueQueueParams match { case x => x.isMemAddrIQ && !x.isVecMemIQ || x.inVfSchd}) RespType.uncertain else RespType.success,
462            RespType.block
463          )
464          og1resp.bits.fuType           := s1_toExuData(iqIdx)(iuIdx).fuType
465      }
466  }
467
468  io.og0CancelOH := VecInit(fromFlattenIQ.map(x => x.valid && !x.fire)).asUInt
469  io.og1CancelOH := VecInit(toFlattenExu.map(x => x.valid && !x.fire)).asUInt
470
471  io.cancelToBusyTable.zipWithIndex.foreach { case (cancel, i) =>
472    cancel.valid := fromFlattenIQ(i).valid && !fromFlattenIQ(i).fire
473    cancel.bits.rfWen := fromFlattenIQ(i).bits.common.rfWen.getOrElse(false.B)
474    cancel.bits.fpWen := fromFlattenIQ(i).bits.common.fpWen.getOrElse(false.B)
475    cancel.bits.vecWen := fromFlattenIQ(i).bits.common.vecWen.getOrElse(false.B)
476    cancel.bits.pdest := fromFlattenIQ(i).bits.common.pdest
477  }
478
479  if (backendParams.debugEn){
480    dontTouch(og0_cancel_no_load)
481    dontTouch(is_0latency)
482    dontTouch(og0_cancel_delay)
483    dontTouch(isVfScheduler)
484    dontTouch(og0_cancel_delay_for_mem)
485  }
486  for (i <- toExu.indices) {
487    for (j <- toExu(i).indices) {
488      // s1Reg --[Ctrl]--> exu(s1) ---------- begin
489      // refs
490      val sinkData = toExu(i)(j).bits
491      // assign
492      toExu(i)(j).valid := s1_toExuValid(i)(j)
493      s1_toExuReady(i)(j) := toExu(i)(j).ready
494      sinkData := s1_toExuData(i)(j)
495      // s1Reg --[Ctrl]--> exu(s1) ---------- end
496
497      // s1Reg --[Data]--> exu(s1) ---------- begin
498      // data source1: preg read data
499      for (k <- sinkData.src.indices) {
500        val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k)
501
502        val readRfMap: Seq[(Bool, UInt)] = (Seq(None) :+
503          (if (s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty)
504            Some(SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k))
505          else None) :+
506          (if (s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VfRegSrcDataSet).nonEmpty)
507            Some(SrcType.isVp(s1_srcType(i)(j)(k))-> s1_vfPregRData(i)(j)(k))
508          else None) :+
509          (if (s1_fpPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(FpRegSrcDataSet).nonEmpty)
510            Some(SrcType.isFp(s1_srcType(i)(j)(k)) -> s1_fpPregRData(i)(j)(k))
511          else None)
512        ).filter(_.nonEmpty).map(_.get)
513        if (readRfMap.nonEmpty)
514          sinkData.src(k) := Mux1H(readRfMap)
515      }
516      if (sinkData.params.hasJmpFu) {
517        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
518        sinkData.pc.get := pcRdata(index)
519      }
520      if (sinkData.params.needTarget) {
521        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
522        sinkData.predictInfo.get.target := targetPCRdata(index)
523      }
524    }
525  }
526
527  if (env.AlwaysBasicDiff || env.EnableDifftest) {
528    val delayedCnt = 2
529    val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt)
530    difftestArchIntRegState.coreid := io.hartId
531    difftestArchIntRegState.value := intDebugRead.get._2
532
533    val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt)
534    difftestArchFpRegState.coreid := io.hartId
535    difftestArchFpRegState.value := fpDebugReadData.get
536
537    val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt)
538    difftestArchVecRegState.coreid := io.hartId
539    difftestArchVecRegState.value := vecDebugReadData.get
540  }
541
542  val int_regcache_size = 48
543  val int_regcache_tag = RegInit(VecInit(Seq.fill(int_regcache_size)(0.U(intSchdParams.pregIdxWidth.W))))
544  val int_regcache_enqPtr = RegInit(0.U(log2Up(int_regcache_size).W))
545  int_regcache_enqPtr := int_regcache_enqPtr + PopCount(intRfWen)
546  for (i <- intRfWen.indices) {
547    when (intRfWen(i)) {
548      int_regcache_tag(int_regcache_enqPtr + PopCount(intRfWen.take(i))) := intRfWaddr(i)
549    }
550  }
551
552  val vf_regcache_size = 48
553  val vf_regcache_tag = RegInit(VecInit(Seq.fill(vf_regcache_size)(0.U(vfSchdParams.pregIdxWidth.W))))
554  val vf_regcache_enqPtr = RegInit(0.U(log2Up(vf_regcache_size).W))
555  vf_regcache_enqPtr := vf_regcache_enqPtr + PopCount(vfRfWen.head)
556  for (i <- vfRfWen.indices) {
557    when (vfRfWen.head(i)) {
558      vf_regcache_tag(vf_regcache_enqPtr + PopCount(vfRfWen.head.take(i))) := vfRfWaddr(i)
559    }
560  }
561
562  XSPerfHistogram(s"IntRegFileRead_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
563  XSPerfHistogram(s"FpRegFileRead_hist", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
564  XSPerfHistogram(s"VfRegFileRead_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
565  XSPerfHistogram(s"IntRegFileWrite_hist", PopCount(intRFWriteReq.flatten), true.B, 0, 20, 1)
566  XSPerfHistogram(s"FpRegFileWrite_hist", PopCount(fpRFWriteReq.flatten), true.B, 0, 20, 1)
567  XSPerfHistogram(s"VfRegFileWrite_hist", PopCount(vfRFWriteReq.flatten), true.B, 0, 20, 1)
568
569  val int_regcache_part32 = (1 until 33).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
570  val int_regcache_part24 = (1 until 24).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
571  val int_regcache_part16 = (1 until 17).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
572  val int_regcache_part8 = (1 until 9).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
573
574  val int_regcache_48_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_tag.map(_ === x.bits.addr).reduce(_ || _))
575  val int_regcache_8_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part8.map(_ === x.bits.addr).reduce(_ || _))
576  val int_regcache_16_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part16.map(_ === x.bits.addr).reduce(_ || _))
577  val int_regcache_24_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part24.map(_ === x.bits.addr).reduce(_ || _))
578  val int_regcache_32_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part32.map(_ === x.bits.addr).reduce(_ || _))
579  XSPerfAccumulate("IntRegCache48Hit", PopCount(int_regcache_48_hit_vec))
580  XSPerfAccumulate("IntRegCache8Hit", PopCount(int_regcache_8_hit_vec))
581  XSPerfAccumulate("IntRegCache16Hit", PopCount(int_regcache_16_hit_vec))
582  XSPerfAccumulate("IntRegCache24Hit", PopCount(int_regcache_24_hit_vec))
583  XSPerfAccumulate("IntRegCache32Hit", PopCount(int_regcache_32_hit_vec))
584  XSPerfHistogram("IntRegCache48Hit_hist", PopCount(int_regcache_48_hit_vec), true.B, 0, 16, 2)
585
586  XSPerfAccumulate(s"IntRFReadBeforeArb", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
587  XSPerfAccumulate(s"IntRFReadAfterArb", PopCount(intRFReadArbiter.io.out.map(_.valid)))
588  XSPerfAccumulate(s"FpRFReadBeforeArb", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
589  XSPerfAccumulate(s"FpRFReadAfterArb", PopCount(fpRFReadArbiter.io.out.map(_.valid)))
590  XSPerfAccumulate(s"VfRFReadBeforeArb", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
591  XSPerfAccumulate(s"VfRFReadAfterArb", PopCount(vfRFReadArbiter.io.out.map(_.valid)))
592  XSPerfAccumulate(s"IntUopBeforeArb", PopCount(fromIntIQ.flatten.map(_.valid)))
593  XSPerfAccumulate(s"IntUopAfterArb", PopCount(fromIntIQ.flatten.map(_.fire)))
594  XSPerfAccumulate(s"MemUopBeforeArb", PopCount(fromMemIQ.flatten.map(_.valid)))
595  XSPerfAccumulate(s"MemUopAfterArb", PopCount(fromMemIQ.flatten.map(_.fire)))
596  XSPerfAccumulate(s"VfUopBeforeArb", PopCount(fromVfIQ.flatten.map(_.valid)))
597  XSPerfAccumulate(s"VfUopAfterArb", PopCount(fromVfIQ.flatten.map(_.fire)))
598
599  XSPerfHistogram(s"IntRFReadBeforeArb_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
600  XSPerfHistogram(s"IntRFReadAfterArb_hist", PopCount(intRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
601  XSPerfHistogram(s"FpRFReadBeforeArb_hist", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
602  XSPerfHistogram(s"FpRFReadAfterArb_hist", PopCount(fpRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
603  XSPerfHistogram(s"VfRFReadBeforeArb_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
604  XSPerfHistogram(s"VfRFReadAfterArb_hist", PopCount(vfRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
605  XSPerfHistogram(s"IntUopBeforeArb_hist", PopCount(fromIntIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
606  XSPerfHistogram(s"IntUopAfterArb_hist", PopCount(fromIntIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
607  XSPerfHistogram(s"MemUopBeforeArb_hist", PopCount(fromMemIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
608  XSPerfHistogram(s"MemUopAfterArb_hist", PopCount(fromMemIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
609  XSPerfHistogram(s"VfUopBeforeArb_hist", PopCount(fromVfIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
610  XSPerfHistogram(s"VfUopAfterArb_hist", PopCount(fromVfIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
611}
612
613class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle {
614  // params
615  private val intSchdParams = params.schdParams(IntScheduler())
616  private val fpSchdParams = params.schdParams(FpScheduler())
617  private val vfSchdParams = params.schdParams(VfScheduler())
618  private val memSchdParams = params.schdParams(MemScheduler())
619  // bundles
620  val hartId = Input(UInt(8.W))
621
622  val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect))
623
624  val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle())))))
625
626  val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
627    Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
628
629  val fromFpIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
630    Flipped(MixedVec(fpSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
631
632  val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
633    Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
634
635  val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
636
637  val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle))
638
639  val toFpIQ = MixedVec(fpSchdParams.issueBlockParams.map(_.genOGRespBundle))
640
641  val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle))
642
643  val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle))
644
645  val og0CancelOH = Output(ExuOH(backendParams.numExu))
646
647  val og1CancelOH = Output(ExuOH(backendParams.numExu))
648
649  val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO))
650
651  val cancelToBusyTable = Vec(backendParams.numExu, ValidIO(new CancelSignal))
652
653  val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle
654
655  val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(fpSchdParams.genExuInputBundle)
656
657  val toVecExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle)
658
659  val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle
660
661  val og1ImmInfo: Vec[ImmInfo] = Output(Vec(params.allExuParams.size, new ImmInfo))
662
663  val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle)
664
665  val fromFpWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genFpWriteBackBundle)
666
667  val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle)
668
669  val fromPcTargetMem = Flipped(new PcToDataPathIO(params))
670
671  val debugIntRat     = if (params.debugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None
672  val debugFpRat      = if (params.debugEn) Some(Input(Vec(32, UInt(fpSchdParams.pregIdxWidth.W)))) else None
673  val debugVecRat     = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None
674  val debugVconfigRat = if (params.debugEn) Some(Input(UInt(vfSchdParams.pregIdxWidth.W))) else None
675  val debugVconfig    = if (params.debugEn) Some(Output(UInt(XLEN.W))) else None
676}
677