xref: /XiangShan/src/main/scala/xiangshan/backend/datapath/DataPath.scala (revision 5f4ac341316820adf1c238acc9631c187f280ef7)
1package xiangshan.backend.datapath
2
3import org.chipsalliance.cde.config.Parameters
4import chisel3._
5import chisel3.util._
6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule}
7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
8import utility._
9import utils.SeqUtils._
10import utils.{XSPerfAccumulate, XSPerfHistogram}
11import xiangshan._
12import xiangshan.backend.BackendParams
13import xiangshan.backend.Bundles._
14import xiangshan.backend.decode.ImmUnion
15import xiangshan.backend.datapath.DataConfig._
16import xiangshan.backend.datapath.RdConfig._
17import xiangshan.backend.issue.{ImmExtractor, IntScheduler, MemScheduler, VfScheduler}
18import xiangshan.backend.regfile._
19import xiangshan.backend.PcToDataPathIO
20
21class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule {
22  override def shouldBeInlined: Boolean = false
23
24  private implicit val dpParams: BackendParams = params
25  lazy val module = new DataPathImp(this)
26
27  println(s"[DataPath] Preg Params: ")
28  println(s"[DataPath]   Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ")
29  println(s"[DataPath]   Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ")
30}
31
32class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams)
33  extends LazyModuleImp(wrapper) with HasXSParameter {
34
35  private val VCONFIG_PORT = params.vconfigPort
36  private val VLD_PORT = params.vldPort
37
38  val io = IO(new DataPathIO())
39
40  private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu)
41  private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu)
42  private val (fromVfIQ , toVfIQ , toVfExu ) = (io.fromVfIQ , io.toVfIQ , io.toFpExu)
43
44  println(s"[DataPath] IntIQ(${fromIntIQ.size}), MemIQ(${fromMemIQ.size})")
45  println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})")
46
47  // just refences for convience
48  private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromVfIQ ++ fromMemIQ).toSeq
49
50  private val toIQs = toIntIQ ++ toVfIQ ++ toMemIQ
51
52  private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = (toIntExu ++ toVfExu ++ toMemExu).toSeq
53
54  private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten
55
56  private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten
57
58  private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams))
59  private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams))
60  private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams))
61  private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams))
62
63  private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
64  private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
65
66  // port -> win
67  private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
68  private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
69  private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
70  private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
71
72  private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR))
73  private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR))
74
75  private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getIntRfReadValidBundle(xx.valid)).toSeq).toSeq
76  private val intDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources).toSeq)
77
78  intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
79    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
80      val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData())
81      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
82        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
83          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && intDataSources(iqIdx)(exuIdx)(srcIdx).readReg
84          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
85        } else {
86          arbInSeq(srcIdx).valid := false.B
87          arbInSeq(srcIdx).bits.addr := 0.U
88        }
89      }
90    }
91  }
92
93  private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getVfRfReadValidBundle(xx.valid)).toSeq).toSeq
94
95  vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
96    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
97      val srcIndices: Seq[Int] = VfRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
98      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
99        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
100          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid
101          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
102        } else {
103          arbInSeq(srcIdx).valid := false.B
104          arbInSeq(srcIdx).bits.addr := 0.U
105        }
106      }
107    }
108  }
109
110  private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq
111  private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.getVfWen.getOrElse(false.B)).toSeq).toSeq
112
113  intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
114    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
115      arbIn.valid := inRFWriteReq
116    }
117  }
118
119  vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
120    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
121      arbIn.valid := inRFWriteReq
122    }
123  }
124
125  private val intSchdParams = params.schdParams(IntScheduler())
126  private val vfSchdParams = params.schdParams(VfScheduler())
127  private val memSchdParams = params.schdParams(MemScheduler())
128
129  private val numIntRfReadByExu = intSchdParams.numIntRfReadByExu + memSchdParams.numIntRfReadByExu
130  private val numVfRfReadByExu = vfSchdParams.numVfRfReadByExu + memSchdParams.numVfRfReadByExu
131  // Todo: limit read port
132  private val numIntR = numIntRfReadByExu
133  private val numVfR = numVfRfReadByExu
134  println(s"[DataPath] RegFile read req needed by Exu: Int(${numIntRfReadByExu}), Vf(${numVfRfReadByExu})")
135  println(s"[DataPath] RegFile read port: Int(${numIntR}), Vf(${numVfR})")
136
137  private val schdParams = params.allSchdParams
138
139  private val pcReadFtqPtr = Wire(chiselTypeOf(io.pcFromPcTargetMem.fromDataPathFtqPtr))
140  private val pcReadFtqOffset = Wire(chiselTypeOf(io.pcFromPcTargetMem.fromDataPathFtqOffset))
141  private val pcRdata = io.pcFromPcTargetMem.toDataPathPC
142  private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W)))
143  private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W)))
144  private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool()))
145  private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W)))
146  private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W)))
147
148  private val vfRfSplitNum = VLEN / XLEN
149  private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W)))
150  private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W)))
151  private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool())))
152  private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W)))
153  private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W)))
154
155  val pcReadFtqPtrFormIQ = fromIntIQ.flatten.filter(x => x.bits.exuParams.needPc)
156  assert(pcReadFtqPtrFormIQ.size == pcReadFtqPtr.size, s"pcReadFtqPtrFormIQ.size ${pcReadFtqPtrFormIQ.size} not equal pcReadFtqPtr.size ${pcReadFtqPtr.size}")
157  pcReadFtqPtr.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqIdx.get)).map(x => x._1 := x._2)
158  pcReadFtqOffset.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqOffset.get)).map(x => x._1 := x._2)
159  io.pcFromPcTargetMem.fromDataPathFtqPtr := pcReadFtqPtr
160  io.pcFromPcTargetMem.fromDataPathFtqOffset := pcReadFtqOffset
161  private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] =
162    if (env.AlwaysBasicDiff || env.EnableDifftest) {
163      Some(Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W))))
164    } else { None }
165  private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] =
166    if (env.AlwaysBasicDiff || env.EnableDifftest) {
167      Some(Wire(Vec(32 + 32 + 1, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(32 + 32 + 1, UInt(VLEN.W))))
168    } else { None }
169
170  private val fpDebugReadData: Option[Vec[UInt]] =
171    if (env.AlwaysBasicDiff || env.EnableDifftest) {
172      Some(Wire(Vec(32, UInt(XLEN.W))))
173    } else { None }
174  private val vecDebugReadData: Option[Vec[UInt]] =
175    if (env.AlwaysBasicDiff || env.EnableDifftest) {
176      Some(Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0))
177    } else { None }
178  private val vconfigDebugReadData: Option[UInt] =
179    if (env.AlwaysBasicDiff || env.EnableDifftest) {
180      Some(Wire(UInt(64.W)))
181    } else { None }
182
183
184  fpDebugReadData.foreach(_ := vfDebugRead
185    .get._2
186    .slice(0, 32)
187    .map(_(63, 0))
188  ) // fp only used [63, 0]
189  vecDebugReadData.foreach(_ := vfDebugRead
190    .get._2
191    .slice(32, 64)
192    .map(x => Seq(x(63, 0), x(127, 64))).flatten
193  )
194  vconfigDebugReadData.foreach(_ := vfDebugRead
195    .get._2(64)(63, 0)
196  )
197
198  io.debugVconfig.foreach(_ := vconfigDebugReadData.get)
199
200  IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata,
201    debugReadAddr = intDebugRead.map(_._1),
202    debugReadData = intDebugRead.map(_._2))
203  VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata,
204    debugReadAddr = vfDebugRead.map(_._1),
205    debugReadData = vfDebugRead.map(_._2))
206
207  intRfWaddr := io.fromIntWb.map(_.addr).toSeq
208  intRfWdata := io.fromIntWb.map(_.data).toSeq
209  intRfWen := io.fromIntWb.map(_.wen).toSeq
210
211  for (portIdx <- intRfRaddr.indices) {
212    if (intRFReadArbiter.io.out.isDefinedAt(portIdx))
213      intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr
214    else
215      intRfRaddr(portIdx) := 0.U
216  }
217
218  vfRfWaddr := io.fromVfWb.map(_.addr).toSeq
219  vfRfWdata := io.fromVfWb.map(_.data).toSeq
220  vfRfWen.foreach(_.zip(io.fromVfWb.map(_.wen)).foreach { case (wenSink, wenSource) => wenSink := wenSource } )// Todo: support fp multi-write
221
222  for (portIdx <- vfRfRaddr.indices) {
223    if (vfRFReadArbiter.io.out.isDefinedAt(portIdx))
224      vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr
225    else
226      vfRfRaddr(portIdx) := 0.U
227  }
228
229  vfRfRaddr(VCONFIG_PORT) := io.vconfigReadPort.addr
230  io.vconfigReadPort.data := vfRfRdata(VCONFIG_PORT)
231  // vfRfRaddr(VLD_PORT) := io.vldReadPort.addr
232  io.vldReadPort.data := DontCare
233
234  intDebugRead.foreach { case (addr, _) =>
235    addr := io.debugIntRat.get
236  }
237
238  vfDebugRead.foreach { case (addr, _) =>
239    addr := io.debugFpRat.get ++ io.debugVecRat.get :+ io.debugVconfigRat.get
240  }
241  println(s"[DataPath] " +
242    s"has intDebugRead: ${intDebugRead.nonEmpty}, " +
243    s"has vfDebugRead: ${vfDebugRead.nonEmpty}")
244
245  val s1_addrOHs = Reg(MixedVec(
246    fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq
247  ))
248  val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec(
249    toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq
250  ))
251  val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq))
252  val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType).toSeq))))
253  val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire)).toSeq)))
254
255  val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
256  val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
257
258  val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs))
259
260  println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}")
261  s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
262  s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
263      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
264        val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[IntRD]) else x).flatten
265        assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size")
266        iuRdata.zip(realIuCfg)
267          .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[IntRD] }
268          .foreach { case (sink, cfg) => sink := intRfRdata(cfg.port) }
269      }
270  }
271
272  println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}")
273  s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
274  s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
275      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
276        val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[VfRD]) else x).flatten
277        assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size")
278        iuRdata.zip(realIuCfg)
279          .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[VfRD] }
280          .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.port) }
281      }
282  }
283
284  for (i <- fromIQ.indices) {
285    for (j <- fromIQ(i).indices) {
286      // IQ(s0) --[Ctrl]--> s1Reg ---------- begin
287      // refs
288      val s1_valid = s1_toExuValid(i)(j)
289      val s1_ready = s1_toExuReady(i)(j)
290      val s1_data = s1_toExuData(i)(j)
291      val s1_addrOH = s1_addrOHs(i)(j)
292      val s0 = fromIQ(i)(j) // s0
293      val srcNotBlock = s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip vfRdArbWinner(i)(j)).map { case (source, win) =>
294        !source.readReg || win._1 && win._2
295      }.fold(true.B)(_ && _)
296      val notBlock = srcNotBlock && intWbNotBlock(i)(j) && vfWbNotBlock(i)(j)
297      val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush)))
298      val s1_cancel = og1FailedVec2(i)(j)
299      val s1_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel)
300      when (s0.fire && !s1_flush && notBlock && !s1_cancel && !s1_ldCancel) {
301        s1_valid := s0.valid
302        s1_data.fromIssueBundle(s0.bits) // no src data here
303        s1_addrOH := s0.bits.addrOH
304      }.otherwise {
305        s1_valid := false.B
306      }
307      s0.ready := (s1_ready || !s1_valid) && notBlock
308      // IQ(s0) --[Ctrl]--> s1Reg ---------- end
309
310      // IQ(s0) --[Data]--> s1Reg ---------- begin
311      // imm extract
312      when (s0.fire && !s1_flush && notBlock) {
313        if (s1_data.params.immType.nonEmpty && s1_data.src.size > 1) {
314          // rs1 is always int reg, rs2 may be imm
315          when(SrcType.isImm(s0.bits.srcType(1))) {
316            s1_data.src(1) := ImmExtractor(
317              s0.bits.common.imm,
318              s0.bits.immType,
319              s1_data.params.dataBitsMax,
320              s1_data.params.immType.map(_.litValue)
321            )
322          }
323        }
324        if (s1_data.params.hasVecFu) {
325          // Fuck off riscv vector imm!!! Why not src1???
326          when(SrcType.isImm(s0.bits.srcType(0))) {
327            s1_data.src(0) := ImmExtractor(
328              s0.bits.common.imm,
329              s0.bits.immType,
330              s1_data.params.dataBitsMax,
331              s1_data.params.immType.map(_.litValue)
332            )
333          }
334        } else if (s1_data.params.hasLoadFu || s1_data.params.hasHyldaFu) {
335          // dirty code for fused_lui_load
336          when(SrcType.isImm(s0.bits.srcType(0))) {
337            s1_data.src(0) := SignExt(ImmUnion.U.toImm32(s0.bits.common.imm(s0.bits.common.imm.getWidth - 1, ImmUnion.I.len)), XLEN)
338          }
339        }
340      }
341      // IQ(s0) --[Data]--> s1Reg ---------- end
342    }
343  }
344
345  private val fromIQFire = fromIQ.map(_.map(_.fire))
346  private val toExuFire = toExu.map(_.map(_.fire))
347  toIQs.zipWithIndex.foreach {
348    case(toIQ, iqIdx) =>
349      toIQ.zipWithIndex.foreach {
350        case (toIU, iuIdx) =>
351          // IU: issue unit
352          val og0resp = toIU.og0resp
353          og0FailedVec2(iqIdx)(iuIdx) := fromIQ(iqIdx)(iuIdx).valid && (!fromIQFire(iqIdx)(iuIdx))
354          og0resp.valid := og0FailedVec2(iqIdx)(iuIdx)
355          og0resp.bits.respType := RSFeedbackType.rfArbitFail
356          og0resp.bits.dataInvalidSqIdx := DontCare
357          og0resp.bits.robIdx := fromIQ(iqIdx)(iuIdx).bits.common.robIdx
358          og0resp.bits.uopIdx := fromIQ(iqIdx)(iuIdx).bits.common.vpu.getOrElse(0.U.asTypeOf(new VPUCtrlSignals)).vuopIdx
359          og0resp.bits.rfWen := fromIQ(iqIdx)(iuIdx).bits.common.rfWen.getOrElse(false.B)
360          og0resp.bits.fuType := fromIQ(iqIdx)(iuIdx).bits.common.fuType
361
362          val og1resp = toIU.og1resp
363          og1FailedVec2(iqIdx)(iuIdx) := s1_toExuValid(iqIdx)(iuIdx) && !toExuFire(iqIdx)(iuIdx)
364          og1resp.valid := s1_toExuValid(iqIdx)(iuIdx)
365          // respType:  fuIdle      ->IQ entry clear
366          //            fuUncertain ->IQ entry no action
367          //            fuBusy      ->IQ entry issued set false, then re-issue
368          // Only hyu, lda and sta are fuUncertain at OG1 stage
369          og1resp.bits.respType := Mux(
370            !og1FailedVec2(iqIdx)(iuIdx),
371            if (toIU.issueQueueParams match { case x => x.isHyAddrIQ || x.isLdAddrIQ || x.isStAddrIQ } ) RSFeedbackType.fuUncertain else RSFeedbackType.fuIdle,
372            RSFeedbackType.fuBusy
373          )
374          og1resp.bits.dataInvalidSqIdx := DontCare
375          og1resp.bits.robIdx := s1_toExuData(iqIdx)(iuIdx).robIdx
376          og1resp.bits.uopIdx := s1_toExuData(iqIdx)(iuIdx).vpu.getOrElse(0.U.asTypeOf(new VPUCtrlSignals)).vuopIdx
377          og1resp.bits.rfWen := s1_toExuData(iqIdx)(iuIdx).rfWen.getOrElse(false.B)
378          og1resp.bits.fuType := s1_toExuData(iqIdx)(iuIdx).fuType
379      }
380  }
381
382  io.og0CancelOH := VecInit(fromFlattenIQ.map(x => x.valid && !x.fire)).asUInt
383  io.og1CancelOH := VecInit(toFlattenExu.map(x => x.valid && !x.fire)).asUInt
384
385  io.cancelToBusyTable.zipWithIndex.foreach { case (cancel, i) =>
386    cancel.valid := fromFlattenIQ(i).valid && !fromFlattenIQ(i).fire && {
387      if (fromFlattenIQ(i).bits.common.rfWen.isDefined)
388        fromFlattenIQ(i).bits.common.rfWen.get && fromFlattenIQ(i).bits.common.pdest =/= 0.U
389      else
390        true.B
391    }
392    cancel.bits.rfWen := fromFlattenIQ(i).bits.common.rfWen.getOrElse(false.B)
393    cancel.bits.fpWen := fromFlattenIQ(i).bits.common.fpWen.getOrElse(false.B)
394    cancel.bits.vecWen := fromFlattenIQ(i).bits.common.vecWen.getOrElse(false.B)
395    cancel.bits.pdest := fromFlattenIQ(i).bits.common.pdest
396  }
397
398  for (i <- toExu.indices) {
399    for (j <- toExu(i).indices) {
400      // s1Reg --[Ctrl]--> exu(s1) ---------- begin
401      // refs
402      val sinkData = toExu(i)(j).bits
403      // assign
404      toExu(i)(j).valid := s1_toExuValid(i)(j)
405      s1_toExuReady(i)(j) := toExu(i)(j).ready
406      sinkData := s1_toExuData(i)(j)
407      // s1Reg --[Ctrl]--> exu(s1) ---------- end
408
409      // s1Reg --[Data]--> exu(s1) ---------- begin
410      // data source1: preg read data
411      for (k <- sinkData.src.indices) {
412        val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k)
413
414        val readRfMap: Seq[(Bool, UInt)] = (Seq(None) :+
415          (if (s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty)
416            Some(SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k))
417          else None) :+
418          (if (s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VfRegSrcDataSet).nonEmpty)
419            Some(SrcType.isVfp(s1_srcType(i)(j)(k))-> s1_vfPregRData(i)(j)(k))
420          else None)
421        ).filter(_.nonEmpty).map(_.get)
422        if (readRfMap.nonEmpty)
423          sinkData.src(k) := Mux1H(readRfMap)
424      }
425
426      // data source2: extracted imm and pc saved in s1Reg
427      if (sinkData.params.immType.nonEmpty && sinkData.src.size > 1) {
428        when(SrcType.isImm(s1_srcType(i)(j)(1))) {
429          sinkData.src(1) := s1_toExuData(i)(j).src(1)
430        }
431      }
432      if (sinkData.params.hasJmpFu) {
433        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
434        sinkData.pc.get := pcRdata(index)
435      } else if (sinkData.params.hasVecFu) {
436        when(SrcType.isImm(s1_srcType(i)(j)(0))) {
437          sinkData.src(0) := s1_toExuData(i)(j).src(0)
438        }
439      } else if (sinkData.params.hasLoadFu || sinkData.params.hasHyldaFu) {
440        when(SrcType.isImm(s1_srcType(i)(j)(0))) {
441          sinkData.src(0) := s1_toExuData(i)(j).src(0)
442        }
443      }
444      // s1Reg --[Data]--> exu(s1) ---------- end
445    }
446  }
447
448  if (env.AlwaysBasicDiff || env.EnableDifftest) {
449    val delayedCnt = 2
450    val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt)
451    difftestArchIntRegState.coreid := io.hartId
452    difftestArchIntRegState.value := intDebugRead.get._2
453
454    val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt)
455    difftestArchFpRegState.coreid := io.hartId
456    difftestArchFpRegState.value := fpDebugReadData.get
457
458    val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt)
459    difftestArchVecRegState.coreid := io.hartId
460    difftestArchVecRegState.value := vecDebugReadData.get
461  }
462
463  val int_regcache_size = 48
464  val int_regcache_tag = RegInit(VecInit(Seq.fill(int_regcache_size)(0.U(intSchdParams.pregIdxWidth.W))))
465  val int_regcache_enqPtr = RegInit(0.U(log2Up(int_regcache_size).W))
466  int_regcache_enqPtr := int_regcache_enqPtr + PopCount(intRfWen)
467  for (i <- intRfWen.indices) {
468    when (intRfWen(i)) {
469      int_regcache_tag(int_regcache_enqPtr + PopCount(intRfWen.take(i))) := intRfWaddr(i)
470    }
471  }
472
473  val vf_regcache_size = 48
474  val vf_regcache_tag = RegInit(VecInit(Seq.fill(vf_regcache_size)(0.U(vfSchdParams.pregIdxWidth.W))))
475  val vf_regcache_enqPtr = RegInit(0.U(log2Up(vf_regcache_size).W))
476  vf_regcache_enqPtr := vf_regcache_enqPtr + PopCount(vfRfWen.head)
477  for (i <- vfRfWen.indices) {
478    when (vfRfWen.head(i)) {
479      vf_regcache_tag(vf_regcache_enqPtr + PopCount(vfRfWen.head.take(i))) := vfRfWaddr(i)
480    }
481  }
482
483  XSPerfHistogram(s"IntRegFileRead_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
484  XSPerfHistogram(s"VfRegFileRead_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
485  XSPerfHistogram(s"IntRegFileWrite_hist", PopCount(intRFWriteReq.flatten), true.B, 0, 20, 1)
486  XSPerfHistogram(s"VfRegFileWrite_hist", PopCount(vfRFWriteReq.flatten), true.B, 0, 20, 1)
487
488  val int_regcache_part32 = (1 until 33).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
489  val int_regcache_part24 = (1 until 24).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
490  val int_regcache_part16 = (1 until 17).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
491  val int_regcache_part8 = (1 until 9).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
492
493  val int_regcache_48_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_tag.map(_ === x.bits.addr).reduce(_ || _))
494  val int_regcache_8_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part8.map(_ === x.bits.addr).reduce(_ || _))
495  val int_regcache_16_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part16.map(_ === x.bits.addr).reduce(_ || _))
496  val int_regcache_24_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part24.map(_ === x.bits.addr).reduce(_ || _))
497  val int_regcache_32_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part32.map(_ === x.bits.addr).reduce(_ || _))
498  XSPerfAccumulate("IntRegCache48Hit", PopCount(int_regcache_48_hit_vec))
499  XSPerfAccumulate("IntRegCache8Hit", PopCount(int_regcache_8_hit_vec))
500  XSPerfAccumulate("IntRegCache16Hit", PopCount(int_regcache_16_hit_vec))
501  XSPerfAccumulate("IntRegCache24Hit", PopCount(int_regcache_24_hit_vec))
502  XSPerfAccumulate("IntRegCache32Hit", PopCount(int_regcache_32_hit_vec))
503  XSPerfHistogram("IntRegCache48Hit_hist", PopCount(int_regcache_48_hit_vec), true.B, 0, 16, 2)
504
505  XSPerfAccumulate(s"IntRFReadBeforeArb", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
506  XSPerfAccumulate(s"IntRFReadAfterArb", PopCount(intRFReadArbiter.io.out.map(_.valid)))
507  XSPerfAccumulate(s"VfRFReadBeforeArb", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
508  XSPerfAccumulate(s"VfRFReadAfterArb", PopCount(vfRFReadArbiter.io.out.map(_.valid)))
509  XSPerfAccumulate(s"IntUopBeforeArb", PopCount(fromIntIQ.flatten.map(_.valid)))
510  XSPerfAccumulate(s"IntUopAfterArb", PopCount(fromIntIQ.flatten.map(_.fire)))
511  XSPerfAccumulate(s"MemUopBeforeArb", PopCount(fromMemIQ.flatten.map(_.valid)))
512  XSPerfAccumulate(s"MemUopAfterArb", PopCount(fromMemIQ.flatten.map(_.fire)))
513  XSPerfAccumulate(s"VfUopBeforeArb", PopCount(fromVfIQ.flatten.map(_.valid)))
514  XSPerfAccumulate(s"VfUopAfterArb", PopCount(fromVfIQ.flatten.map(_.fire)))
515
516  XSPerfHistogram(s"IntRFReadBeforeArb_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
517  XSPerfHistogram(s"IntRFReadAfterArb_hist", PopCount(intRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
518  XSPerfHistogram(s"VfRFReadBeforeArb_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
519  XSPerfHistogram(s"VfRFReadAfterArb_hist", PopCount(vfRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
520  XSPerfHistogram(s"IntUopBeforeArb_hist", PopCount(fromIntIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
521  XSPerfHistogram(s"IntUopAfterArb_hist", PopCount(fromIntIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
522  XSPerfHistogram(s"MemUopBeforeArb_hist", PopCount(fromMemIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
523  XSPerfHistogram(s"MemUopAfterArb_hist", PopCount(fromMemIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
524  XSPerfHistogram(s"VfUopBeforeArb_hist", PopCount(fromVfIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
525  XSPerfHistogram(s"VfUopAfterArb_hist", PopCount(fromVfIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
526}
527
528class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle {
529  // params
530  private val intSchdParams = params.schdParams(IntScheduler())
531  private val vfSchdParams = params.schdParams(VfScheduler())
532  private val memSchdParams = params.schdParams(MemScheduler())
533  // bundles
534  val hartId = Input(UInt(8.W))
535
536  val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect))
537
538  // Todo: check if this can be removed
539  val vconfigReadPort = new RfReadPort(XLEN, PhyRegIdxWidth)
540
541  val vldReadPort = new RfReadPort(VLEN, PhyRegIdxWidth)
542
543  val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle())))))
544
545  val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
546    Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
547
548  val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
549    Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
550
551  val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
552
553  val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle))
554
555  val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle))
556
557  val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle))
558
559  val og0CancelOH = Output(ExuOH(backendParams.numExu))
560
561  val og1CancelOH = Output(ExuOH(backendParams.numExu))
562
563  val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO))
564
565  val cancelToBusyTable = Vec(backendParams.numExu, ValidIO(new CancelSignal))
566
567  val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle
568
569  val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle)
570
571  val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle
572
573  val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle)
574
575  val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle)
576
577  val pcFromPcTargetMem = Flipped(new PcToDataPathIO(params))
578
579  val debugIntRat     = if (params.debugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None
580  val debugFpRat      = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None
581  val debugVecRat     = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None
582  val debugVconfigRat = if (params.debugEn) Some(Input(UInt(vfSchdParams.pregIdxWidth.W))) else None
583  val debugVconfig    = if (params.debugEn) Some(Output(UInt(XLEN.W))) else None
584}
585