xref: /XiangShan/src/main/scala/xiangshan/backend/datapath/DataPath.scala (revision 3088616cbf0793407bb68460b2db89b7de80c12a)
1package xiangshan.backend.datapath
2
3import org.chipsalliance.cde.config.Parameters
4import chisel3._
5import chisel3.util._
6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule}
7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
8import utility._
9import utils.SeqUtils._
10import utils._
11import xiangshan._
12import xiangshan.backend.BackendParams
13import xiangshan.backend.Bundles._
14import xiangshan.backend.decode.ImmUnion
15import xiangshan.backend.datapath.DataConfig._
16import xiangshan.backend.datapath.RdConfig._
17import xiangshan.backend.issue.{FpScheduler, ImmExtractor, IntScheduler, MemScheduler, VfScheduler}
18import xiangshan.backend.issue.EntryBundles._
19import xiangshan.backend.regfile._
20import xiangshan.backend.regcache._
21import xiangshan.backend.PcToDataPathIO
22import xiangshan.backend.fu.FuType.is0latency
23import xiangshan.mem.{SqPtr, LqPtr}
24
25class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule {
26  override def shouldBeInlined: Boolean = false
27
28  private implicit val dpParams: BackendParams = params
29  lazy val module = new DataPathImp(this)
30
31  println(s"[DataPath] Preg Params: ")
32  println(s"[DataPath]   Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ")
33  println(s"[DataPath]   Fp R(${params.getRfReadSize(FpData())}), W(${params.getRfWriteSize(FpData())}) ")
34  println(s"[DataPath]   Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ")
35  println(s"[DataPath]   V0 R(${params.getRfReadSize(V0Data())}), W(${params.getRfWriteSize(V0Data())}) ")
36  println(s"[DataPath]   Vl R(${params.getRfReadSize(VlData())}), W(${params.getRfWriteSize(VlData())}) ")
37}
38
39class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams)
40  extends LazyModuleImp(wrapper) with HasXSParameter {
41
42  val io = IO(new DataPathIO())
43
44  private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu)
45  private val (fromFpIQ,  toFpIQ,  toFpExu)  = (io.fromFpIQ,  io.toFpIQ,  io.toFpExu)
46  private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu)
47  private val (fromVfIQ,  toVfIQ,  toVfExu ) = (io.fromVfIQ,  io.toVfIQ,  io.toVecExu)
48
49  println(s"[DataPath] IntIQ(${fromIntIQ.size}), FpIQ(${fromFpIQ.size}), VecIQ(${fromVfIQ.size}), MemIQ(${fromMemIQ.size})")
50  println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), FpExu(${fromFpIQ.map(_.size).sum}), VecExu(${fromVfIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})")
51
52  // just refences for convience
53  private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromFpIQ ++ fromVfIQ ++ fromMemIQ).toSeq
54
55  private val toIQs = toIntIQ ++ toFpIQ ++ toVfIQ ++ toMemIQ
56
57  private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = (toIntExu ++ toFpExu ++ toVfExu ++ toMemExu).toSeq
58
59  private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten
60
61  private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten
62
63  private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams))
64  private val fpWbBusyArbiter = Module(new FpRFWBCollideChecker(backendParams))
65  private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams))
66  private val v0WbBusyArbiter = Module(new V0RFWBCollideChecker(backendParams))
67  private val vlWbBusyArbiter = Module(new VlRFWBCollideChecker(backendParams))
68
69  private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams))
70  private val fpRFReadArbiter = Module(new FpRFReadArbiter(backendParams))
71  private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams))
72  private val v0RFReadArbiter = Module(new V0RFReadArbiter(backendParams))
73  private val vlRFReadArbiter = Module(new VlRFReadArbiter(backendParams))
74
75  private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
76  private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
77
78  // port -> win
79  private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
80  private val fpRdArbWinner: Seq2[MixedVec[Bool]] = fpRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
81  private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
82  private val v0RdArbWinner: Seq2[MixedVec[Bool]] = v0RFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
83  private val vlRdArbWinner: Seq2[MixedVec[Bool]] = vlRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
84
85  private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
86  private val fpWbNotBlock: Seq[MixedVec[Bool]] = fpWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
87  private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
88  private val v0WbNotBlock: Seq[MixedVec[Bool]] = v0WbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
89  private val vlWbNotBlock: Seq[MixedVec[Bool]] = vlWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
90
91  private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR))
92  private val fpRdNotBlock: Seq2[Bool] = fpRdArbWinner.map(_.map(_.asUInt.andR))
93  private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR))
94  private val v0RdNotBlock: Seq2[Bool] = v0RdArbWinner.map(_.map(_.asUInt.andR))
95  private val vlRdNotBlock: Seq2[Bool] = vlRdArbWinner.map(_.map(_.asUInt.andR))
96
97  private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
98  private val fpRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
99  private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
100  private val v0RFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
101  private val vlRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
102
103  private val allDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources).toSeq)
104  private val allNumRegSrcs: Seq[Seq[Int]] = fromIQ.map(x => x.map(xx => xx.bits.exuParams.numRegSrc).toSeq)
105
106  intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
107    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
108      val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData())
109      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
110        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
111          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
112          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
113        } else {
114          arbInSeq(srcIdx).valid := false.B
115          arbInSeq(srcIdx).bits.addr := 0.U
116        }
117      }
118    }
119  }
120  fpRFReadArbiter.io.in.zip(fpRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
121    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
122      val srcIndices: Seq[Int] = FpRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
123      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
124        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
125          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
126          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
127        } else {
128          arbInSeq(srcIdx).valid := false.B
129          arbInSeq(srcIdx).bits.addr := 0.U
130        }
131      }
132    }
133  }
134
135  vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
136    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
137      val srcIndices: Seq[Int] = VecRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
138      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
139        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
140          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
141          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
142        } else {
143          arbInSeq(srcIdx).valid := false.B
144          arbInSeq(srcIdx).bits.addr := 0.U
145        }
146      }
147    }
148  }
149
150  v0RFReadArbiter.io.in.zip(v0RFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
151    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
152      val srcIndices: Seq[Int] = V0RegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
153      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
154        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
155          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
156          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
157        } else {
158          arbInSeq(srcIdx).valid := false.B
159          arbInSeq(srcIdx).bits.addr := 0.U
160        }
161      }
162    }
163  }
164
165  vlRFReadArbiter.io.in.zip(vlRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
166    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
167      val srcIndices: Seq[Int] = VlRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
168      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
169        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
170          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
171          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
172        } else {
173          arbInSeq(srcIdx).valid := false.B
174          arbInSeq(srcIdx).bits.addr := 0.U
175        }
176      }
177    }
178  }
179
180  private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq
181  private val fpRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.fpWen.getOrElse(false.B)).toSeq).toSeq
182  private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.vecWen.getOrElse(false.B)).toSeq).toSeq
183  private val v0RFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.v0Wen.getOrElse(false.B)).toSeq).toSeq
184  private val vlRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.vlWen.getOrElse(false.B)).toSeq).toSeq
185
186  intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
187    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
188      arbIn.valid := inRFWriteReq
189    }
190  }
191
192  fpWbBusyArbiter.io.in.zip(fpRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
193    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
194      arbIn.valid := inRFWriteReq
195    }
196  }
197
198  vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
199    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
200      arbIn.valid := inRFWriteReq
201    }
202  }
203
204  v0WbBusyArbiter.io.in.zip(v0RFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
205    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
206      arbIn.valid := inRFWriteReq
207    }
208  }
209
210  vlWbBusyArbiter.io.in.zip(vlRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
211    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
212      arbIn.valid := inRFWriteReq
213    }
214  }
215
216  private val intSchdParams = params.schdParams(IntScheduler())
217  private val fpSchdParams = params.schdParams(FpScheduler())
218  private val vfSchdParams = params.schdParams(VfScheduler())
219  private val memSchdParams = params.schdParams(MemScheduler())
220
221  private val schdParams = params.allSchdParams
222
223  private val pcReadValid = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathValid))
224  private val pcReadFtqPtr = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqPtr))
225  private val pcReadFtqOffset = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqOffset))
226  private val targetPCRdata = io.fromPcTargetMem.toDataPathTargetPC
227  private val pcRdata = io.fromPcTargetMem.toDataPathPC
228  private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W)))
229  private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W)))
230  private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool()))
231  private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W)))
232  private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W)))
233
234  private val fpRfRaddr = Wire(Vec(params.numPregRd(FpData()), UInt(fpSchdParams.pregIdxWidth.W)))
235  private val fpRfRdata = Wire(Vec(params.numPregRd(FpData()), UInt(fpSchdParams.rfDataWidth.W)))
236  private val fpRfWen = Wire(Vec(io.fromFpWb.length, Bool()))
237  private val fpRfWaddr = Wire(Vec(io.fromFpWb.length, UInt(fpSchdParams.pregIdxWidth.W)))
238  private val fpRfWdata = Wire(Vec(io.fromFpWb.length, UInt(fpSchdParams.rfDataWidth.W)))
239
240  private val vfRfSplitNum = VLEN / XLEN
241  private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W)))
242  private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W)))
243  private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool())))
244  private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W)))
245  private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W)))
246
247  private val v0RfSplitNum = VLEN / XLEN
248  private val v0RfRaddr = Wire(Vec(params.numPregRd(V0Data()), UInt(log2Up(V0PhyRegs).W)))
249  private val v0RfRdata = Wire(Vec(params.numPregRd(V0Data()), UInt(V0Data().dataWidth.W)))
250  private val v0RfWen = Wire(Vec(v0RfSplitNum, Vec(io.fromV0Wb.length, Bool())))
251  private val v0RfWaddr = Wire(Vec(io.fromV0Wb.length, UInt(log2Up(V0PhyRegs).W)))
252  private val v0RfWdata = Wire(Vec(io.fromV0Wb.length, UInt(V0Data().dataWidth.W)))
253
254  private val vlRfRaddr = Wire(Vec(params.numPregRd(VlData()), UInt(log2Up(VlPhyRegs).W)))
255  private val vlRfRdata = Wire(Vec(params.numPregRd(VlData()), UInt(VlData().dataWidth.W)))
256  private val vlRfWen = Wire(Vec(io.fromVlWb.length, Bool()))
257  private val vlRfWaddr = Wire(Vec(io.fromVlWb.length, UInt(log2Up(VlPhyRegs).W)))
258  private val vlRfWdata = Wire(Vec(io.fromVlWb.length, UInt(VlData().dataWidth.W)))
259
260  val pcReadFtqPtrFormIQ = fromIntIQ.flatten.filter(x => x.bits.exuParams.needPc)
261  assert(pcReadFtqPtrFormIQ.size == pcReadFtqPtr.size, s"pcReadFtqPtrFormIQ.size ${pcReadFtqPtrFormIQ.size} not equal pcReadFtqPtr.size ${pcReadFtqPtr.size}")
262  pcReadValid.zip(pcReadFtqPtrFormIQ.map(_.valid)).map(x => x._1 := x._2)
263  pcReadFtqPtr.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqIdx.get)).map(x => x._1 := x._2)
264  pcReadFtqOffset.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqOffset.get)).map(x => x._1 := x._2)
265  io.fromPcTargetMem.fromDataPathValid := pcReadValid
266  io.fromPcTargetMem.fromDataPathFtqPtr := pcReadFtqPtr
267  io.fromPcTargetMem.fromDataPathFtqOffset := pcReadFtqOffset
268
269  private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] =
270    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, (Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))))
271  private val fpDebugRead: Option[(Vec[UInt], Vec[UInt])] =
272    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, (Wire(Vec(32, UInt(fpSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))))
273  private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] =
274    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, (Wire(Vec(31, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(31, UInt(VLEN.W)))))
275  private val v0DebugRead: Option[(Vec[UInt], Vec[UInt])] =
276    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, (Wire(Vec(1, UInt(log2Up(V0PhyRegs).W))), Wire(Vec(1, UInt(V0Data().dataWidth.W)))))
277  private val vlDebugRead: Option[(Vec[UInt], Vec[UInt])] =
278    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, (Wire(Vec(1, UInt(log2Up(VlPhyRegs).W))), Wire(Vec(1, UInt(VlData().dataWidth.W)))))
279
280  private val fpDebugReadData: Option[Vec[UInt]] =
281    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, Wire(Vec(32, UInt(XLEN.W))))
282  private val vecDebugReadData: Option[Vec[UInt]] =
283    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0))
284  private val vlDebugReadData: Option[UInt] =
285    OptionWrapper(env.AlwaysBasicDiff || env.EnableDifftest, Wire(UInt(VlData().dataWidth.W)))
286
287
288  fpDebugReadData.foreach(_ := fpDebugRead
289    .get._2
290    .slice(0, 32)
291    .map(_(63, 0))
292  ) // fp only used [63, 0]
293  vecDebugReadData.foreach(_ :=
294    v0DebugRead
295    .get._2
296    .slice(0, 1)
297    .map(x => Seq(x(63, 0), x(127, 64))).flatten ++
298    vfDebugRead
299    .get._2
300    .slice(0, 31)
301    .map(x => Seq(x(63, 0), x(127, 64))).flatten
302  )
303  vlDebugReadData.foreach(_ := vlDebugRead
304    .get._2(0)
305  )
306
307  io.debugVl.foreach(_ := vlDebugReadData.get)
308
309  IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata,
310    bankNum = 1,
311    debugReadAddr = intDebugRead.map(_._1),
312    debugReadData = intDebugRead.map(_._2)
313  )
314  FpRegFile("FpRegFile", fpSchdParams.numPregs, fpRfRaddr, fpRfRdata, fpRfWen, fpRfWaddr, fpRfWdata,
315    bankNum = 1,
316    debugReadAddr = fpDebugRead.map(_._1),
317    debugReadData = fpDebugRead.map(_._2)
318  )
319  VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata,
320    debugReadAddr = vfDebugRead.map(_._1),
321    debugReadData = vfDebugRead.map(_._2)
322  )
323  VfRegFile("V0RegFile", V0PhyRegs, v0RfSplitNum, v0RfRaddr, v0RfRdata, v0RfWen, v0RfWaddr, v0RfWdata,
324    debugReadAddr = v0DebugRead.map(_._1),
325    debugReadData = v0DebugRead.map(_._2)
326  )
327  FpRegFile("VlRegFile", VlPhyRegs, vlRfRaddr, vlRfRdata, vlRfWen, vlRfWaddr, vlRfWdata,
328    bankNum = 1,
329    isVlRegfile = true,
330    debugReadAddr = vlDebugRead.map(_._1),
331    debugReadData = vlDebugRead.map(_._2)
332  )
333
334  intRfWaddr := io.fromIntWb.map(x => RegEnable(x.addr, x.wen)).toSeq
335  intRfWdata := io.fromIntWb.map(x => RegEnable(x.data, x.wen)).toSeq
336  intRfWen := RegNext(VecInit(io.fromIntWb.map(_.wen).toSeq))
337
338  for (portIdx <- intRfRaddr.indices) {
339    if (intRFReadArbiter.io.out.isDefinedAt(portIdx))
340      intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr
341    else
342      intRfRaddr(portIdx) := 0.U
343  }
344
345  fpRfWaddr := io.fromFpWb.map(x => RegEnable(x.addr, x.wen)).toSeq
346  fpRfWdata := io.fromFpWb.map(x => RegEnable(x.data, x.wen)).toSeq
347  fpRfWen := RegNext(VecInit(io.fromFpWb.map(_.wen).toSeq))
348
349  for (portIdx <- fpRfRaddr.indices) {
350    if (fpRFReadArbiter.io.out.isDefinedAt(portIdx))
351      fpRfRaddr(portIdx) := fpRFReadArbiter.io.out(portIdx).bits.addr
352    else
353      fpRfRaddr(portIdx) := 0.U
354  }
355
356  vfRfWaddr := io.fromVfWb.map(x => RegEnable(x.addr, x.wen)).toSeq
357  vfRfWdata := io.fromVfWb.map(x => RegEnable(x.data, x.wen)).toSeq
358  vfRfWen.foreach(_.zip(io.fromVfWb.map(x => RegNext(x.wen))).foreach { case (wenSink, wenSource) => wenSink := wenSource } )
359
360  for (portIdx <- vfRfRaddr.indices) {
361    if (vfRFReadArbiter.io.out.isDefinedAt(portIdx))
362      vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr
363    else
364      vfRfRaddr(portIdx) := 0.U
365  }
366
367  v0RfWaddr := io.fromV0Wb.map(x => RegEnable(x.addr, x.wen)).toSeq
368  v0RfWdata := io.fromV0Wb.map(x => RegEnable(x.data, x.wen)).toSeq
369  v0RfWen.foreach(_.zip(io.fromV0Wb.map(x => RegNext(x.wen))).foreach { case (wenSink, wenSource) => wenSink := wenSource } )
370
371  for (portIdx <- v0RfRaddr.indices) {
372    if (v0RFReadArbiter.io.out.isDefinedAt(portIdx))
373      v0RfRaddr(portIdx) := v0RFReadArbiter.io.out(portIdx).bits.addr
374    else
375      v0RfRaddr(portIdx) := 0.U
376  }
377
378  vlRfWaddr := io.fromVlWb.map(x => RegEnable(x.addr, x.wen)).toSeq
379  vlRfWdata := io.fromVlWb.map(x => RegEnable(x.data, x.wen)).toSeq
380  vlRfWen := io.fromVlWb.map(x => RegNext(x.wen)).toSeq
381
382  for (portIdx <- vlRfRaddr.indices) {
383    if (vlRFReadArbiter.io.out.isDefinedAt(portIdx))
384      vlRfRaddr(portIdx) := vlRFReadArbiter.io.out(portIdx).bits.addr
385    else
386      vlRfRaddr(portIdx) := 0.U
387  }
388
389
390  intDebugRead.foreach { case (addr, _) =>
391    addr := io.debugIntRat.get
392  }
393
394  fpDebugRead.foreach { case (addr, _) =>
395    addr := io.debugFpRat.get
396  }
397
398  vfDebugRead.foreach { case (addr, _) =>
399    addr := io.debugVecRat.get
400  }
401  v0DebugRead.foreach { case (addr, _) =>
402    addr := io.debugV0Rat.get
403  }
404  vlDebugRead.foreach { case (addr, _) =>
405    addr := io.debugVlRat.get
406  }
407
408  println(s"[DataPath] " +
409    s"has intDebugRead: ${intDebugRead.nonEmpty}, " +
410    s"has fpDebugRead: ${fpDebugRead.nonEmpty}, " +
411    s"has vecDebugRead: ${vfDebugRead.nonEmpty}, " +
412    s"has v0DebugRead: ${v0DebugRead.nonEmpty}, " +
413    s"has vlDebugRead: ${vlDebugRead.nonEmpty}")
414
415  // regcache
416  private val regCache = Module(new RegCache())
417
418  def IssueBundle2RCReadPort(issue: DecoupledIO[IssueQueueIssueBundle]): Vec[RCReadPort] = {
419    val readPorts = Wire(Vec(issue.bits.exuParams.numIntSrc, new RCReadPort(params.intSchdParams.get.rfDataWidth, RegCacheIdxWidth)))
420    readPorts.zipWithIndex.foreach{ case (r, idx) =>
421      r.ren  := issue.valid && issue.bits.common.dataSources(idx).readRegCache
422      r.addr := issue.bits.rcIdx.get(idx)
423      r.data := DontCare
424    }
425    readPorts
426  }
427
428  private val regCacheReadReq = fromIntIQ.flatten.filter(_.bits.exuParams.numIntSrc > 0).flatMap(IssueBundle2RCReadPort(_)) ++
429                                fromMemIQ.flatten.filter(_.bits.exuParams.numIntSrc > 0).flatMap(IssueBundle2RCReadPort(_))
430  private val regCacheReadData = regCache.io.readPorts.map(_.data)
431
432  println(s"[DataPath] regCache readPorts size: ${regCache.io.readPorts.size}, regCacheReadReq size: ${regCacheReadReq.size}")
433  require(regCache.io.readPorts.size == regCacheReadReq.size, "reg cache's readPorts size should be equal to regCacheReadReq")
434
435  regCache.io.readPorts.zip(regCacheReadReq).foreach{ case (r, req) =>
436    r.ren := req.ren
437    r.addr := req.addr
438  }
439
440  val s1_RCReadData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
441  s1_RCReadData.foreach(_.foreach(_.foreach(_ := 0.U)))
442  s1_RCReadData.zip(toExu).filter(_._2.map(_.bits.params.isIntExeUnit).reduce(_ || _)).flatMap(_._1).flatten
443    .zip(regCacheReadData.take(params.getIntExuRCReadSize)).foreach{ case (s1_data, rdata) =>
444      s1_data := rdata
445    }
446  s1_RCReadData.zip(toExu).filter(_._2.map(x => x.bits.params.isMemExeUnit && x.bits.params.readIntRf).reduce(_ || _)).flatMap(_._1).flatten
447    .zip(regCacheReadData.takeRight(params.getMemExuRCReadSize)).foreach{ case (s1_data, rdata) =>
448      s1_data := rdata
449    }
450
451  println(s"[DataPath] s1_RCReadData.int.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(_.bits.params.isIntExeUnit).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.int.size: ${params.getIntExuRCReadSize}")
452  println(s"[DataPath] s1_RCReadData.mem.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(x => x.bits.params.isMemExeUnit && x.bits.params.readIntRf).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.mem.size: ${params.getMemExuRCReadSize}")
453
454  io.toWakeupQueueRCIdx := regCache.io.toWakeupQueueRCIdx
455  io.toBypassNetworkRCData := s1_RCReadData
456  regCache.io.writePorts := io.fromBypassNetwork
457
458  val s1_addrOHs = Reg(MixedVec(
459    fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq
460  ))
461  val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec(
462    toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq
463  ))
464  val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq))
465  val s1_immInfo = Reg(MixedVec(toExu.map(x => MixedVec(x.map(x => new ImmInfo).toSeq)).toSeq))
466  s1_immInfo.zip(fromIQ).map { case (s1Vec, s0Vec) =>
467    s1Vec.zip(s0Vec).map { case (s1, s0) =>
468      s1.imm := Mux(s0.valid, s0.bits.common.imm, s1.imm)
469      s1.immType := Mux(s0.valid, s0.bits.immType, s1.immType)
470    }
471  }
472  io.og1ImmInfo.zip(s1_immInfo.flatten).map{ case(out, reg) =>
473    out := reg
474  }
475  val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType).toSeq))))
476  val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire)).toSeq)))
477
478  val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
479  val s1_fpPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
480  val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
481  val s1_v0PregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
482  val s1_vlPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
483
484  val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs))
485
486  println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}")
487  s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
488  s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
489      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
490        iuRdata.zip(iuCfg)
491          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[IntRD]) > 0 }
492          .foreach { case (sink, cfg) => sink := intRfRdata(cfg.find(_.isInstanceOf[IntRD]).get.port) }
493      }
494  }
495
496  println(s"[DataPath] s1_fpPregRData.flatten.flatten.size: ${s1_fpPregRData.flatten.flatten.size}, fpRfRdata.size: ${fpRfRdata.size}")
497  s1_fpPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
498  s1_fpPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
499      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
500        iuRdata.zip(iuCfg)
501          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[FpRD]) > 0 }
502          .foreach { case (sink, cfg) => sink := fpRfRdata(cfg.find(_.isInstanceOf[FpRD]).get.port) }
503      }
504  }
505
506  println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}")
507  s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
508  s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
509      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
510        iuRdata.zip(iuCfg)
511          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[VfRD]) > 0 }
512          .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.find(_.isInstanceOf[VfRD]).get.port) }
513      }
514  }
515
516  println(s"[DataPath] s1_v0PregRData.flatten.flatten.size: ${s1_v0PregRData.flatten.flatten.size}, v0RfRdata.size: ${v0RfRdata.size}")
517  s1_v0PregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
518  s1_v0PregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
519      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
520        iuRdata.zip(iuCfg)
521          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[V0RD]) > 0 }
522          .foreach { case (sink, cfg) => sink := v0RfRdata(cfg.find(_.isInstanceOf[V0RD]).get.port) }
523      }
524  }
525
526  println(s"[DataPath] s1_vlPregRData.flatten.flatten.size: ${s1_vlPregRData.flatten.flatten.size}, vlRfRdata.size: ${vlRfRdata.size}")
527  s1_vlPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
528  s1_vlPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
529      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
530        iuRdata.zip(iuCfg)
531          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[VlRD]) > 0 }
532          .foreach { case (sink, cfg) => sink := vlRfRdata(cfg.find(_.isInstanceOf[VlRD]).get.port) }
533      }
534  }
535
536  val og0_cancel_no_load = VecInit(og0FailedVec2.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1).toSeq)
537  val exuParamsNoLoad = fromIQ.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu)
538  val is_0latency = Wire(Vec(og0_cancel_no_load.size, Bool()))
539  is_0latency := exuParamsNoLoad.map(x => is0latency(x._1.bits.common.fuType))
540  val og0_cancel_delay = RegNext(VecInit(og0_cancel_no_load.zip(is_0latency).map(x => x._1 && x._2)))
541  val isVfScheduler = VecInit(exuParamsNoLoad.map(x => x._2.schdType.isInstanceOf[VfScheduler].B))
542  val og0_cancel_delay_for_mem = VecInit(og0_cancel_delay.zip(isVfScheduler).map(x => x._1 && !x._2))
543  for (i <- fromIQ.indices) {
544    for (j <- fromIQ(i).indices) {
545      // IQ(s0) --[Ctrl]--> s1Reg ---------- begin
546      // refs
547      val s1_valid = s1_toExuValid(i)(j)
548      val s1_ready = s1_toExuReady(i)(j)
549      val s1_data = s1_toExuData(i)(j)
550      val s1_addrOH = s1_addrOHs(i)(j)
551      val s0 = fromIQ(i)(j) // s0
552
553      val srcNotBlock = Wire(Bool())
554      srcNotBlock := s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip fpRdArbWinner(i)(j) zip vfRdArbWinner(i)(j) zip v0RdArbWinner(i)(j) zip vlRdArbWinner(i)(j)).map {
555        case (source, ((((win_int, win_fp), win_vf), win_v0), win_vl)) =>
556        !source.readReg || win_int && win_fp && win_vf && win_v0 && win_vl
557      }.fold(true.B)(_ && _)
558      val notBlock = srcNotBlock && intWbNotBlock(i)(j) && fpWbNotBlock(i)(j) && vfWbNotBlock(i)(j) && v0WbNotBlock(i)(j) && vlWbNotBlock(i)(j)
559      val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush)))
560      val s1_cancel = og1FailedVec2(i)(j)
561      val s0_cancel = Wire(Bool())
562      val og0_cancel_delay_need = if (s0.bits.exuParams.schdType.isInstanceOf[MemScheduler]) og0_cancel_delay_for_mem else og0_cancel_delay
563      if (s0.bits.exuParams.isIQWakeUpSink) {
564        val exuOHNoLoad = s0.bits.common.l1ExuOH.get.map(x => x.asTypeOf(Vec(x.getWidth, Bool())).zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1))
565        s0_cancel := exuOHNoLoad.zip(s0.bits.common.dataSources).map{
566          case (exuOH, dataSource) => (VecInit(exuOH).asUInt & og0_cancel_delay_need.asUInt).orR && dataSource.readForward
567        }.reduce(_ || _) && s0.valid
568      } else s0_cancel := false.B
569      val s0_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel)
570      when (s0.fire && !s1_flush && !s0_ldCancel) {
571        s1_valid := true.B
572      }.otherwise {
573        s1_valid := false.B
574      }
575      when (s0.valid) {
576        s1_data.fromIssueBundle(s0.bits) // no src data here
577        s1_addrOH := s0.bits.addrOH
578      }
579      s0.ready := notBlock && !s0_cancel
580      // IQ(s0) --[Ctrl]--> s1Reg ---------- end
581    }
582  }
583
584  private val fromIQFire = fromIQ.map(_.map(_.fire))
585  private val toExuFire = toExu.map(_.map(_.fire))
586  toIQs.zipWithIndex.foreach {
587    case(toIQ, iqIdx) =>
588      toIQ.zipWithIndex.foreach {
589        case (toIU, iuIdx) =>
590          // IU: issue unit
591          val og0resp = toIU.og0resp
592          og0FailedVec2(iqIdx)(iuIdx)   := fromIQ(iqIdx)(iuIdx).valid && !fromIQ(iqIdx)(iuIdx).ready
593          og0resp.valid                 := og0FailedVec2(iqIdx)(iuIdx)
594          og0resp.bits.robIdx           := fromIQ(iqIdx)(iuIdx).bits.common.robIdx
595          og0resp.bits.uopIdx.foreach(_ := fromIQ(iqIdx)(iuIdx).bits.common.vpu.get.vuopIdx)
596          og0resp.bits.sqIdx.foreach(_ := 0.U.asTypeOf(new SqPtr))
597          og0resp.bits.lqIdx.foreach(_ := 0.U.asTypeOf(new LqPtr))
598          og0resp.bits.resp             := RespType.block
599          og0resp.bits.fuType           := fromIQ(iqIdx)(iuIdx).bits.common.fuType
600
601          val og1resp = toIU.og1resp
602          og1FailedVec2(iqIdx)(iuIdx)   := s1_toExuValid(iqIdx)(iuIdx) && !s1_toExuReady(iqIdx)(iuIdx)
603          og1resp.valid                 := s1_toExuValid(iqIdx)(iuIdx)
604          og1resp.bits.robIdx           := s1_toExuData(iqIdx)(iuIdx).robIdx
605          og1resp.bits.uopIdx.foreach(_ := s1_toExuData(iqIdx)(iuIdx).vpu.get.vuopIdx)
606          og1resp.bits.sqIdx.foreach(_ :=  0.U.asTypeOf(new SqPtr))
607          og1resp.bits.lqIdx.foreach(_ :=  0.U.asTypeOf(new LqPtr))
608          // respType:  success    -> IQ entry clear
609          //            uncertain  -> IQ entry no action
610          //            block      -> IQ entry issued set false, then re-issue
611          // hyu, lda and sta are uncertain at OG1 stage
612          // and all vector arith exu should check success in og2 stage
613          og1resp.bits.resp             := Mux(og1FailedVec2(iqIdx)(iuIdx),
614            RespType.block,
615            if (toIU.issueQueueParams match { case x => x.isLdAddrIQ || x.isStAddrIQ || x.isHyAddrIQ || x.isVecLduIQ || x.isVecStuIQ || x.inVfSchd})
616              RespType.uncertain
617            else
618              RespType.success,
619          )
620          og1resp.bits.fuType           := s1_toExuData(iqIdx)(iuIdx).fuType
621      }
622  }
623
624  io.og0Cancel := og0FailedVec2.flatten.zip(params.allExuParams).map{ case (cancel, params) =>
625                    if (params.isIQWakeUpSource && params.latencyCertain && params.wakeUpFuLatancySet.contains(0)) cancel else false.B
626                  }.toSeq
627  io.og1Cancel := toFlattenExu.map(x => x.valid && !x.fire)
628
629
630  if (backendParams.debugEn){
631    dontTouch(og0_cancel_no_load)
632    dontTouch(is_0latency)
633    dontTouch(og0_cancel_delay)
634    dontTouch(isVfScheduler)
635    dontTouch(og0_cancel_delay_for_mem)
636  }
637  for (i <- toExu.indices) {
638    for (j <- toExu(i).indices) {
639      // s1Reg --[Ctrl]--> exu(s1) ---------- begin
640      // refs
641      val sinkData = toExu(i)(j).bits
642      // assign
643      toExu(i)(j).valid := s1_toExuValid(i)(j)
644      s1_toExuReady(i)(j) := toExu(i)(j).ready
645      sinkData := s1_toExuData(i)(j)
646      // s1Reg --[Ctrl]--> exu(s1) ---------- end
647
648      // s1Reg --[Data]--> exu(s1) ---------- begin
649      // data source1: preg read data
650      for (k <- sinkData.src.indices) {
651        val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k)
652        val readRfMap: Seq[(Bool, UInt)] = (
653          if (k == 3) {(
654            Seq(None)
655            :+
656            OptionWrapper(s1_v0PregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(V0RegSrcDataSet).nonEmpty,
657              (SrcType.isV0(s1_srcType(i)(j)(k)) -> s1_v0PregRData(i)(j)(k)))
658          )}
659          else if (k == 4) {(
660            Seq(None)
661            :+
662            OptionWrapper(s1_vlPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VlRegSrcDataSet).nonEmpty,
663              (SrcType.isVp(s1_srcType(i)(j)(k)) -> s1_vlPregRData(i)(j)(k)))
664          )}
665          else {(
666            Seq(None)
667            :+
668            OptionWrapper(s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty,
669              (SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k)))
670            :+
671            OptionWrapper(s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VecRegSrcDataSet).nonEmpty,
672              (SrcType.isVp(s1_srcType(i)(j)(k)) -> s1_vfPregRData(i)(j)(k)))
673            :+
674            OptionWrapper(s1_fpPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(FpRegSrcDataSet).nonEmpty,
675              (SrcType.isFp(s1_srcType(i)(j)(k)) -> s1_fpPregRData(i)(j)(k)))
676          )}
677        ).filter(_.nonEmpty).map(_.get)
678
679        if (readRfMap.nonEmpty)
680          sinkData.src(k) := Mux1H(readRfMap)
681      }
682      if (sinkData.params.hasJmpFu) {
683        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
684        sinkData.pc.get := pcRdata(index)
685      }
686      if (sinkData.params.needTarget) {
687        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
688        sinkData.predictInfo.get.target := targetPCRdata(index)
689      }
690    }
691  }
692
693  if (env.AlwaysBasicDiff || env.EnableDifftest) {
694    val delayedCnt = 2
695    val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt)
696    difftestArchIntRegState.coreid := io.hartId
697    difftestArchIntRegState.value := intDebugRead.get._2
698
699    val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt)
700    difftestArchFpRegState.coreid := io.hartId
701    difftestArchFpRegState.value := fpDebugReadData.get
702
703    val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt)
704    difftestArchVecRegState.coreid := io.hartId
705    difftestArchVecRegState.value := vecDebugReadData.get
706  }
707
708  val int_regcache_size = 48
709  val int_regcache_tag = RegInit(VecInit(Seq.fill(int_regcache_size)(0.U(intSchdParams.pregIdxWidth.W))))
710  val int_regcache_enqPtr = RegInit(0.U(log2Up(int_regcache_size).W))
711  int_regcache_enqPtr := int_regcache_enqPtr + PopCount(intRfWen)
712  for (i <- intRfWen.indices) {
713    when (intRfWen(i)) {
714      int_regcache_tag(int_regcache_enqPtr + PopCount(intRfWen.take(i))) := intRfWaddr(i)
715    }
716  }
717
718  val vf_regcache_size = 48
719  val vf_regcache_tag = RegInit(VecInit(Seq.fill(vf_regcache_size)(0.U(vfSchdParams.pregIdxWidth.W))))
720  val vf_regcache_enqPtr = RegInit(0.U(log2Up(vf_regcache_size).W))
721  vf_regcache_enqPtr := vf_regcache_enqPtr + PopCount(vfRfWen.head)
722  for (i <- vfRfWen.indices) {
723    when (vfRfWen.head(i)) {
724      vf_regcache_tag(vf_regcache_enqPtr + PopCount(vfRfWen.head.take(i))) := vfRfWaddr(i)
725    }
726  }
727
728  XSPerfHistogram(s"IntRegFileRead_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
729  XSPerfHistogram(s"FpRegFileRead_hist", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
730  XSPerfHistogram(s"VfRegFileRead_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
731  XSPerfHistogram(s"IntRegFileWrite_hist", PopCount(intRFWriteReq.flatten), true.B, 0, 20, 1)
732  XSPerfHistogram(s"FpRegFileWrite_hist", PopCount(fpRFWriteReq.flatten), true.B, 0, 20, 1)
733  XSPerfHistogram(s"VfRegFileWrite_hist", PopCount(vfRFWriteReq.flatten), true.B, 0, 20, 1)
734
735  val int_regcache_part32 = (1 until 33).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
736  val int_regcache_part24 = (1 until 24).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
737  val int_regcache_part16 = (1 until 17).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
738  val int_regcache_part8 = (1 until 9).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
739
740  val int_regcache_48_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_tag.map(_ === x.bits.addr).reduce(_ || _))
741  val int_regcache_8_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part8.map(_ === x.bits.addr).reduce(_ || _))
742  val int_regcache_16_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part16.map(_ === x.bits.addr).reduce(_ || _))
743  val int_regcache_24_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part24.map(_ === x.bits.addr).reduce(_ || _))
744  val int_regcache_32_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part32.map(_ === x.bits.addr).reduce(_ || _))
745  XSPerfAccumulate("IntRegCache48Hit", PopCount(int_regcache_48_hit_vec))
746  XSPerfAccumulate("IntRegCache8Hit", PopCount(int_regcache_8_hit_vec))
747  XSPerfAccumulate("IntRegCache16Hit", PopCount(int_regcache_16_hit_vec))
748  XSPerfAccumulate("IntRegCache24Hit", PopCount(int_regcache_24_hit_vec))
749  XSPerfAccumulate("IntRegCache32Hit", PopCount(int_regcache_32_hit_vec))
750  XSPerfHistogram("IntRegCache48Hit_hist", PopCount(int_regcache_48_hit_vec), true.B, 0, 16, 2)
751
752  XSPerfAccumulate(s"IntRFReadBeforeArb", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
753  XSPerfAccumulate(s"IntRFReadAfterArb", PopCount(intRFReadArbiter.io.out.map(_.valid)))
754  XSPerfAccumulate(s"FpRFReadBeforeArb", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
755  XSPerfAccumulate(s"FpRFReadAfterArb", PopCount(fpRFReadArbiter.io.out.map(_.valid)))
756  XSPerfAccumulate(s"VfRFReadBeforeArb", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
757  XSPerfAccumulate(s"VfRFReadAfterArb", PopCount(vfRFReadArbiter.io.out.map(_.valid)))
758  XSPerfAccumulate(s"IntUopBeforeArb", PopCount(fromIntIQ.flatten.map(_.valid)))
759  XSPerfAccumulate(s"IntUopAfterArb", PopCount(fromIntIQ.flatten.map(_.fire)))
760  XSPerfAccumulate(s"MemUopBeforeArb", PopCount(fromMemIQ.flatten.map(_.valid)))
761  XSPerfAccumulate(s"MemUopAfterArb", PopCount(fromMemIQ.flatten.map(_.fire)))
762  XSPerfAccumulate(s"VfUopBeforeArb", PopCount(fromVfIQ.flatten.map(_.valid)))
763  XSPerfAccumulate(s"VfUopAfterArb", PopCount(fromVfIQ.flatten.map(_.fire)))
764
765  XSPerfHistogram(s"IntRFReadBeforeArb_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
766  XSPerfHistogram(s"IntRFReadAfterArb_hist", PopCount(intRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
767  XSPerfHistogram(s"FpRFReadBeforeArb_hist", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
768  XSPerfHistogram(s"FpRFReadAfterArb_hist", PopCount(fpRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
769  XSPerfHistogram(s"VfRFReadBeforeArb_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
770  XSPerfHistogram(s"VfRFReadAfterArb_hist", PopCount(vfRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
771  XSPerfHistogram(s"IntUopBeforeArb_hist", PopCount(fromIntIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
772  XSPerfHistogram(s"IntUopAfterArb_hist", PopCount(fromIntIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
773  XSPerfHistogram(s"MemUopBeforeArb_hist", PopCount(fromMemIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
774  XSPerfHistogram(s"MemUopAfterArb_hist", PopCount(fromMemIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
775  XSPerfHistogram(s"VfUopBeforeArb_hist", PopCount(fromVfIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
776  XSPerfHistogram(s"VfUopAfterArb_hist", PopCount(fromVfIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
777
778  // datasource perf counter (after arbiter)
779  fromIQ.foreach(iq => iq.foreach{exu =>
780    val exuParams = exu.bits.exuParams
781    if (exuParams.isIntExeUnit) {
782      for (i <- 0 until 2) {
783        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_forward",  exu.fire && exu.bits.common.dataSources(i).readForward)
784        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_bypass",   exu.fire && exu.bits.common.dataSources(i).readBypass)
785        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_regcache", exu.fire && exu.bits.common.dataSources(i).readRegCache)
786        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_reg",      exu.fire && exu.bits.common.dataSources(i).readReg)
787        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_zero",     exu.fire && exu.bits.common.dataSources(i).readZero)
788      }
789    }
790    if (exuParams.isMemExeUnit && exuParams.readIntRf) {
791      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_forward",  exu.fire && exu.bits.common.dataSources(0).readForward)
792      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_bypass",   exu.fire && exu.bits.common.dataSources(0).readBypass)
793      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_regcache", exu.fire && exu.bits.common.dataSources(0).readRegCache)
794      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_reg",      exu.fire && exu.bits.common.dataSources(0).readReg)
795      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_zero",     exu.fire && exu.bits.common.dataSources(0).readZero)
796    }
797  })
798}
799
800class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle {
801  // params
802  private val intSchdParams = params.schdParams(IntScheduler())
803  private val fpSchdParams = params.schdParams(FpScheduler())
804  private val vfSchdParams = params.schdParams(VfScheduler())
805  private val memSchdParams = params.schdParams(MemScheduler())
806  // bundles
807  val hartId = Input(UInt(8.W))
808
809  val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect))
810
811  val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle())))))
812
813  val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
814    Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
815
816  val fromFpIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
817    Flipped(MixedVec(fpSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
818
819  val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
820    Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
821
822  val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
823
824  val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle))
825
826  val toFpIQ = MixedVec(fpSchdParams.issueBlockParams.map(_.genOGRespBundle))
827
828  val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle))
829
830  val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle))
831
832  val og0Cancel = Output(ExuVec())
833
834  val og1Cancel = Output(ExuVec())
835
836  val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO))
837
838  val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle
839
840  val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(fpSchdParams.genExuInputBundle)
841
842  val toVecExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle)
843
844  val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle
845
846  val og1ImmInfo: Vec[ImmInfo] = Output(Vec(params.allExuParams.size, new ImmInfo))
847
848  val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle)
849
850  val fromFpWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genFpWriteBackBundle)
851
852  val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle)
853
854  val fromV0Wb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genV0WriteBackBundle)
855
856  val fromVlWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVlWriteBackBundle)
857
858  val fromPcTargetMem = Flipped(new PcToDataPathIO(params))
859
860  val fromBypassNetwork: Vec[RCWritePort] = Vec(params.getIntExuRCWriteSize + params.getMemExuRCWriteSize,
861    new RCWritePort(params.intSchdParams.get.rfDataWidth, RegCacheIdxWidth, params.intSchdParams.get.pregIdxWidth, params.debugEn)
862  )
863
864  val toBypassNetworkRCData: MixedVec[MixedVec[Vec[UInt]]] = MixedVec(
865    Seq(intSchdParams, fpSchdParams, vfSchdParams, memSchdParams).map(schd => schd.issueBlockParams.map(iq =>
866      MixedVec(iq.exuBlockParams.map(exu => Output(Vec(exu.numRegSrc, UInt(exu.srcDataBitsMax.W)))))
867    )).flatten
868  )
869
870  val toWakeupQueueRCIdx: Vec[UInt] = Vec(params.getIntExuRCWriteSize + params.getMemExuRCWriteSize,
871    Output(UInt(RegCacheIdxWidth.W))
872  )
873
874  val debugIntRat  = if (params.debugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None
875  val debugFpRat   = if (params.debugEn) Some(Input(Vec(32, UInt(fpSchdParams.pregIdxWidth.W)))) else None
876  val debugVecRat  = if (params.debugEn) Some(Input(Vec(31, UInt(vfSchdParams.pregIdxWidth.W)))) else None
877  val debugV0Rat   = if (params.debugEn) Some(Input(Vec(1, UInt(log2Up(V0PhyRegs).W)))) else None
878  val debugVlRat   = if (params.debugEn) Some(Input(Vec(1, UInt(log2Up(VlPhyRegs).W)))) else None
879  val debugVl      = if (params.debugEn) Some(Output(UInt(VlData().dataWidth.W))) else None
880}
881