1package xiangshan.backend.datapath 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3._ 5import chisel3.util._ 6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule} 7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 8import utility._ 9import utils.SeqUtils._ 10import utils.{XSPerfAccumulate, XSPerfHistogram} 11import xiangshan._ 12import xiangshan.backend.BackendParams 13import xiangshan.backend.Bundles._ 14import xiangshan.backend.decode.ImmUnion 15import xiangshan.backend.datapath.DataConfig._ 16import xiangshan.backend.datapath.RdConfig._ 17import xiangshan.backend.issue.{ImmExtractor, IntScheduler, MemScheduler, VfScheduler} 18import xiangshan.backend.issue.EntryBundles._ 19import xiangshan.backend.regfile._ 20import xiangshan.backend.PcToDataPathIO 21 22class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule { 23 override def shouldBeInlined: Boolean = false 24 25 private implicit val dpParams: BackendParams = params 26 lazy val module = new DataPathImp(this) 27 28 println(s"[DataPath] Preg Params: ") 29 println(s"[DataPath] Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ") 30 println(s"[DataPath] Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ") 31} 32 33class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams) 34 extends LazyModuleImp(wrapper) with HasXSParameter { 35 36 val io = IO(new DataPathIO()) 37 38 private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu) 39 private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu) 40 private val (fromVfIQ , toVfIQ , toVfExu ) = (io.fromVfIQ , io.toVfIQ , io.toFpExu) 41 42 println(s"[DataPath] IntIQ(${fromIntIQ.size}), MemIQ(${fromMemIQ.size})") 43 println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})") 44 45 // just refences for convience 46 private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromVfIQ ++ fromMemIQ).toSeq 47 48 private val toIQs = toIntIQ ++ toVfIQ ++ toMemIQ 49 50 private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = (toIntExu ++ toVfExu ++ toMemExu).toSeq 51 52 private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten 53 54 private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten 55 56 private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams)) 57 private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams)) 58 private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams)) 59 private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams)) 60 61 private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq)) 62 private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq)) 63 64 // port -> win 65 private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq 66 private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq 67 private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq 68 private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq 69 70 private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR)) 71 private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR)) 72 73 private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getIntRfReadValidBundle(xx.valid)).toSeq).toSeq 74 private val intDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources).toSeq) 75 private val intNumRegSrcs: Seq[Seq[Int]] = fromIQ.map(x => x.map(xx => xx.bits.exuParams.numRegSrc).toSeq) 76 77 intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) => 78 arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) => 79 val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData()) 80 for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) { 81 if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) { 82 if (intNumRegSrcs(iqIdx)(exuIdx) == 2) { 83 val src0Req = inRFReadReqSeq(0).valid && intDataSources(iqIdx)(exuIdx)(0).readReg 84 val src1Req = inRFReadReqSeq(1).valid && intDataSources(iqIdx)(exuIdx)(1).readReg 85 if (srcIdx == 0) { 86 arbInSeq(srcIdx).valid := src0Req || src1Req 87 arbInSeq(srcIdx).bits.addr := Mux(src1Req && !src0Req, inRFReadReqSeq(1).bits.addr,inRFReadReqSeq(0).bits.addr) 88 } else { 89 arbInSeq(srcIdx).valid := src0Req && src1Req 90 arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr 91 } 92 } else { 93 arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && intDataSources(iqIdx)(exuIdx)(srcIdx).readReg 94 arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr 95 } 96 } else { 97 arbInSeq(srcIdx).valid := false.B 98 arbInSeq(srcIdx).bits.addr := 0.U 99 } 100 } 101 } 102 } 103 104 private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getVfRfReadValidBundle(xx.valid)).toSeq).toSeq 105 106 vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) => 107 arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) => 108 val srcIndices: Seq[Int] = VfRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted 109 for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) { 110 if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) { 111 arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid 112 arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr 113 } else { 114 arbInSeq(srcIdx).valid := false.B 115 arbInSeq(srcIdx).bits.addr := 0.U 116 } 117 } 118 } 119 } 120 121 private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq 122 private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.getVfWen.getOrElse(false.B)).toSeq).toSeq 123 124 intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) => 125 arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) => 126 arbIn.valid := inRFWriteReq 127 } 128 } 129 130 vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) => 131 arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) => 132 arbIn.valid := inRFWriteReq 133 } 134 } 135 136 private val intSchdParams = params.schdParams(IntScheduler()) 137 private val vfSchdParams = params.schdParams(VfScheduler()) 138 private val memSchdParams = params.schdParams(MemScheduler()) 139 140 private val numIntRfReadByExu = intSchdParams.numIntRfReadByExu + memSchdParams.numIntRfReadByExu 141 private val numVfRfReadByExu = vfSchdParams.numVfRfReadByExu + memSchdParams.numVfRfReadByExu 142 // Todo: limit read port 143 private val numIntR = numIntRfReadByExu 144 private val numVfR = numVfRfReadByExu 145 println(s"[DataPath] RegFile read req needed by Exu: Int(${numIntRfReadByExu}), Vf(${numVfRfReadByExu})") 146 println(s"[DataPath] RegFile read port: Int(${numIntR}), Vf(${numVfR})") 147 148 private val schdParams = params.allSchdParams 149 150 private val pcReadValid = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathValid)) 151 private val pcReadFtqPtr = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqPtr)) 152 private val pcReadFtqOffset = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqOffset)) 153 private val targetPCRdata = io.fromPcTargetMem.toDataPathTargetPC 154 private val pcRdata = io.fromPcTargetMem.toDataPathPC 155 private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W))) 156 private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W))) 157 private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool())) 158 private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W))) 159 private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W))) 160 161 private val vfRfSplitNum = VLEN / XLEN 162 private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W))) 163 private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W))) 164 private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool()))) 165 private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W))) 166 private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W))) 167 168 val pcReadFtqPtrFormIQ = fromIntIQ.flatten.filter(x => x.bits.exuParams.needPc) 169 assert(pcReadFtqPtrFormIQ.size == pcReadFtqPtr.size, s"pcReadFtqPtrFormIQ.size ${pcReadFtqPtrFormIQ.size} not equal pcReadFtqPtr.size ${pcReadFtqPtr.size}") 170 pcReadValid.zip(pcReadFtqPtrFormIQ.map(_.valid)).map(x => x._1 := x._2) 171 pcReadFtqPtr.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqIdx.get)).map(x => x._1 := x._2) 172 pcReadFtqOffset.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqOffset.get)).map(x => x._1 := x._2) 173 io.fromPcTargetMem.fromDataPathValid := pcReadValid 174 io.fromPcTargetMem.fromDataPathFtqPtr := pcReadFtqPtr 175 io.fromPcTargetMem.fromDataPathFtqOffset := pcReadFtqOffset 176 177 private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] = 178 if (env.AlwaysBasicDiff || env.EnableDifftest) { 179 Some(Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))) 180 } else { None } 181 private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] = 182 if (env.AlwaysBasicDiff || env.EnableDifftest) { 183 Some(Wire(Vec(32 + 32 + 1, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(32 + 32 + 1, UInt(VLEN.W)))) 184 } else { None } 185 186 private val fpDebugReadData: Option[Vec[UInt]] = 187 if (env.AlwaysBasicDiff || env.EnableDifftest) { 188 Some(Wire(Vec(32, UInt(XLEN.W)))) 189 } else { None } 190 private val vecDebugReadData: Option[Vec[UInt]] = 191 if (env.AlwaysBasicDiff || env.EnableDifftest) { 192 Some(Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0)) 193 } else { None } 194 private val vconfigDebugReadData: Option[UInt] = 195 if (env.AlwaysBasicDiff || env.EnableDifftest) { 196 Some(Wire(UInt(64.W))) 197 } else { None } 198 199 200 fpDebugReadData.foreach(_ := vfDebugRead 201 .get._2 202 .slice(0, 32) 203 .map(_(63, 0)) 204 ) // fp only used [63, 0] 205 vecDebugReadData.foreach(_ := vfDebugRead 206 .get._2 207 .slice(32, 64) 208 .map(x => Seq(x(63, 0), x(127, 64))).flatten 209 ) 210 vconfigDebugReadData.foreach(_ := vfDebugRead 211 .get._2(64)(63, 0) 212 ) 213 214 io.debugVconfig.foreach(_ := vconfigDebugReadData.get) 215 216 IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata, 217 bankNum = 1, 218 debugReadAddr = intDebugRead.map(_._1), 219 debugReadData = intDebugRead.map(_._2)) 220 VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata, 221 debugReadAddr = vfDebugRead.map(_._1), 222 debugReadData = vfDebugRead.map(_._2)) 223 224 intRfWaddr := io.fromIntWb.map(_.addr).toSeq 225 intRfWdata := io.fromIntWb.map(_.data).toSeq 226 intRfWen := io.fromIntWb.map(_.wen).toSeq 227 228 for (portIdx <- intRfRaddr.indices) { 229 if (intRFReadArbiter.io.out.isDefinedAt(portIdx)) 230 intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr 231 else 232 intRfRaddr(portIdx) := 0.U 233 } 234 235 vfRfWaddr := io.fromVfWb.map(_.addr).toSeq 236 vfRfWdata := io.fromVfWb.map(_.data).toSeq 237 vfRfWen.foreach(_.zip(io.fromVfWb.map(_.wen)).foreach { case (wenSink, wenSource) => wenSink := wenSource } )// Todo: support fp multi-write 238 239 for (portIdx <- vfRfRaddr.indices) { 240 if (vfRFReadArbiter.io.out.isDefinedAt(portIdx)) 241 vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr 242 else 243 vfRfRaddr(portIdx) := 0.U 244 } 245 246 247 intDebugRead.foreach { case (addr, _) => 248 addr := io.debugIntRat.get 249 } 250 251 vfDebugRead.foreach { case (addr, _) => 252 addr := io.debugFpRat.get ++ io.debugVecRat.get :+ io.debugVconfigRat.get 253 } 254 println(s"[DataPath] " + 255 s"has intDebugRead: ${intDebugRead.nonEmpty}, " + 256 s"has vfDebugRead: ${vfDebugRead.nonEmpty}") 257 258 val s1_addrOHs = Reg(MixedVec( 259 fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq 260 )) 261 val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec( 262 toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq 263 )) 264 val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq)) 265 val s1_immInfo = Reg(MixedVec(toExu.map(x => MixedVec(x.map(x => new ImmInfo).toSeq)).toSeq)) 266 s1_immInfo.zip(fromIQ).map { case (s1Vec, s0Vec) => 267 s1Vec.zip(s0Vec).map { case (s1, s0) => 268 s1.imm := Mux(s0.valid, s0.bits.common.imm, s1.imm) 269 s1.immType := Mux(s0.valid, s0.bits.immType, s1.immType) 270 } 271 } 272 io.og1ImmInfo.zip(s1_immInfo.flatten).map{ case(out, reg) => 273 out := reg 274 } 275 val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType).toSeq)))) 276 val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire)).toSeq))) 277 278 val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq)))) 279 val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq)))) 280 281 val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs)) 282 283 println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}") 284 s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) 285 s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) => 286 iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) => 287 val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[IntRD]) else x).flatten 288 assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size") 289 iuRdata.zip(realIuCfg) 290 .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[IntRD] } 291 .foreach { case (sink, cfg) => sink := intRfRdata(cfg.port) } 292 } 293 } 294 295 println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}") 296 s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) 297 s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) => 298 iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) => 299 val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[VfRD]) else x).flatten 300 assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size") 301 iuRdata.zip(realIuCfg) 302 .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[VfRD] } 303 .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.port) } 304 } 305 } 306 307 val og0_cancel_no_load = og0FailedVec2.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1) 308 val og0_cancel_delay = RegNext(VecInit(og0_cancel_no_load.toSeq)) 309 for (i <- fromIQ.indices) { 310 for (j <- fromIQ(i).indices) { 311 // IQ(s0) --[Ctrl]--> s1Reg ---------- begin 312 // refs 313 val s1_valid = s1_toExuValid(i)(j) 314 val s1_ready = s1_toExuReady(i)(j) 315 val s1_data = s1_toExuData(i)(j) 316 val s1_addrOH = s1_addrOHs(i)(j) 317 val s0 = fromIQ(i)(j) // s0 318 319 val srcNotBlock = Wire(Bool()) 320 srcNotBlock := s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip vfRdArbWinner(i)(j)).map { case (source, win) => 321 !source.readReg || win._1 && win._2 322 }.fold(true.B)(_ && _) 323 if (fromIQ(i)(j).bits.exuParams.schdType.isInstanceOf[IntScheduler] && (fromIQ(i)(j).bits.exuParams.numRegSrc == 2)) { 324 val src0VfBlock = s0.bits.common.dataSources(0).readReg && !vfRdArbWinner(i)(j)(0) 325 val src1VfBlock = s0.bits.common.dataSources(1).readReg && !vfRdArbWinner(i)(j)(1) 326 val src1IntBlock = s0.bits.common.dataSources(0).readReg && s0.bits.common.dataSources(1).readReg && !intRdArbWinner(i)(j)(1) 327 val src0IntBlock = (s0.bits.common.dataSources(0).readReg || s0.bits.common.dataSources(1).readReg) && !intRdArbWinner(i)(j)(0) 328 srcNotBlock := !src0VfBlock && !src1VfBlock && !src1IntBlock && !src0IntBlock 329 } 330 val notBlock = srcNotBlock && intWbNotBlock(i)(j) && vfWbNotBlock(i)(j) 331 val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush))) 332 val s1_cancel = og1FailedVec2(i)(j) 333 val s0_cancel = Wire(Bool()) 334 if (s0.bits.exuParams.isIQWakeUpSink) { 335 val exuOHNoLoad = s0.bits.common.l1ExuOH.get.map(x => x.asTypeOf(Vec(x.getWidth, Bool())).zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1)) 336 s0_cancel := exuOHNoLoad.zip(s0.bits.common.dataSources).map{ 337 case (exuOH, dataSource) => (VecInit(exuOH).asUInt & og0_cancel_delay.asUInt).orR && dataSource.readForward 338 }.reduce(_ || _) && s0.valid 339 } else s0_cancel := false.B 340 val s0_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel) 341 when (s0.fire && !s1_flush && notBlock && !s1_cancel && !s0_ldCancel && !s0_cancel) { 342 s1_valid := s0.valid 343 s1_data.fromIssueBundle(s0.bits) // no src data here 344 if (fromIQ(i)(j).bits.exuParams.schdType.isInstanceOf[IntScheduler] && (fromIQ(i)(j).bits.exuParams.numRegSrc == 2)) { 345 s1_data.dataSources(1).value := Mux(!s0.bits.common.dataSources(0).readReg && s0.bits.common.dataSources(1).readReg, DataSource.anotherReg, s0.bits.common.dataSources(1).value) 346 } 347 s1_addrOH := s0.bits.addrOH 348 }.otherwise { 349 s1_valid := false.B 350 } 351 s0.ready := (s1_ready || !s1_valid) && notBlock && !s1_cancel && !s0_ldCancel && !s0_cancel 352 // IQ(s0) --[Ctrl]--> s1Reg ---------- end 353 } 354 } 355 356 private val fromIQFire = fromIQ.map(_.map(_.fire)) 357 private val toExuFire = toExu.map(_.map(_.fire)) 358 toIQs.zipWithIndex.foreach { 359 case(toIQ, iqIdx) => 360 toIQ.zipWithIndex.foreach { 361 case (toIU, iuIdx) => 362 // IU: issue unit 363 val og0resp = toIU.og0resp 364 og0FailedVec2(iqIdx)(iuIdx) := fromIQ(iqIdx)(iuIdx).valid && (!fromIQFire(iqIdx)(iuIdx)) 365 og0resp.valid := og0FailedVec2(iqIdx)(iuIdx) 366 og0resp.bits.robIdx := fromIQ(iqIdx)(iuIdx).bits.common.robIdx 367 og0resp.bits.uopIdx.foreach(_ := fromIQ(iqIdx)(iuIdx).bits.common.vpu.get.vuopIdx) 368 og0resp.bits.resp := RespType.block 369 og0resp.bits.fuType := fromIQ(iqIdx)(iuIdx).bits.common.fuType 370 371 val og1resp = toIU.og1resp 372 og1FailedVec2(iqIdx)(iuIdx) := s1_toExuValid(iqIdx)(iuIdx) && !toExuFire(iqIdx)(iuIdx) 373 og1resp.valid := s1_toExuValid(iqIdx)(iuIdx) 374 og1resp.bits.robIdx := s1_toExuData(iqIdx)(iuIdx).robIdx 375 og1resp.bits.uopIdx.foreach(_ := s1_toExuData(iqIdx)(iuIdx).vpu.get.vuopIdx) 376 // respType: fuIdle ->IQ entry clear 377 // fuUncertain ->IQ entry no action 378 // fuBusy ->IQ entry issued set false, then re-issue 379 // Only hyu, lda and sta are fuUncertain at OG1 stage 380 og1resp.bits.resp := Mux(!og1FailedVec2(iqIdx)(iuIdx), 381 if (toIU.issueQueueParams match { case x => x.isMemAddrIQ && !x.isVecMemIQ }) RespType.uncertain else RespType.success, 382 RespType.block 383 ) 384 og1resp.bits.fuType := s1_toExuData(iqIdx)(iuIdx).fuType 385 } 386 } 387 388 io.og0CancelOH := VecInit(fromFlattenIQ.map(x => x.valid && !x.fire)).asUInt 389 io.og1CancelOH := VecInit(toFlattenExu.map(x => x.valid && !x.fire)).asUInt 390 391 io.cancelToBusyTable.zipWithIndex.foreach { case (cancel, i) => 392 cancel.valid := fromFlattenIQ(i).valid && !fromFlattenIQ(i).fire 393 cancel.bits.rfWen := fromFlattenIQ(i).bits.common.rfWen.getOrElse(false.B) 394 cancel.bits.fpWen := fromFlattenIQ(i).bits.common.fpWen.getOrElse(false.B) 395 cancel.bits.vecWen := fromFlattenIQ(i).bits.common.vecWen.getOrElse(false.B) 396 cancel.bits.pdest := fromFlattenIQ(i).bits.common.pdest 397 } 398 399 for (i <- toExu.indices) { 400 for (j <- toExu(i).indices) { 401 // s1Reg --[Ctrl]--> exu(s1) ---------- begin 402 // refs 403 val sinkData = toExu(i)(j).bits 404 // assign 405 toExu(i)(j).valid := s1_toExuValid(i)(j) 406 s1_toExuReady(i)(j) := toExu(i)(j).ready 407 sinkData := s1_toExuData(i)(j) 408 // s1Reg --[Ctrl]--> exu(s1) ---------- end 409 410 // s1Reg --[Data]--> exu(s1) ---------- begin 411 // data source1: preg read data 412 for (k <- sinkData.src.indices) { 413 val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k) 414 415 val readRfMap: Seq[(Bool, UInt)] = (Seq(None) :+ 416 (if (s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty) 417 Some(SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k)) 418 else None) :+ 419 (if (s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VfRegSrcDataSet).nonEmpty) 420 Some(SrcType.isVfp(s1_srcType(i)(j)(k))-> s1_vfPregRData(i)(j)(k)) 421 else None) 422 ).filter(_.nonEmpty).map(_.get) 423 if (readRfMap.nonEmpty) 424 sinkData.src(k) := Mux1H(readRfMap) 425 } 426 if (sinkData.params.hasJmpFu) { 427 val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params) 428 sinkData.pc.get := pcRdata(index) 429 } 430 if (sinkData.params.needTarget) { 431 val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params) 432 sinkData.predictInfo.get.target := targetPCRdata(index) 433 } 434 } 435 } 436 437 if (env.AlwaysBasicDiff || env.EnableDifftest) { 438 val delayedCnt = 2 439 val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt) 440 difftestArchIntRegState.coreid := io.hartId 441 difftestArchIntRegState.value := intDebugRead.get._2 442 443 val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt) 444 difftestArchFpRegState.coreid := io.hartId 445 difftestArchFpRegState.value := fpDebugReadData.get 446 447 val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt) 448 difftestArchVecRegState.coreid := io.hartId 449 difftestArchVecRegState.value := vecDebugReadData.get 450 } 451 452 val int_regcache_size = 48 453 val int_regcache_tag = RegInit(VecInit(Seq.fill(int_regcache_size)(0.U(intSchdParams.pregIdxWidth.W)))) 454 val int_regcache_enqPtr = RegInit(0.U(log2Up(int_regcache_size).W)) 455 int_regcache_enqPtr := int_regcache_enqPtr + PopCount(intRfWen) 456 for (i <- intRfWen.indices) { 457 when (intRfWen(i)) { 458 int_regcache_tag(int_regcache_enqPtr + PopCount(intRfWen.take(i))) := intRfWaddr(i) 459 } 460 } 461 462 val vf_regcache_size = 48 463 val vf_regcache_tag = RegInit(VecInit(Seq.fill(vf_regcache_size)(0.U(vfSchdParams.pregIdxWidth.W)))) 464 val vf_regcache_enqPtr = RegInit(0.U(log2Up(vf_regcache_size).W)) 465 vf_regcache_enqPtr := vf_regcache_enqPtr + PopCount(vfRfWen.head) 466 for (i <- vfRfWen.indices) { 467 when (vfRfWen.head(i)) { 468 vf_regcache_tag(vf_regcache_enqPtr + PopCount(vfRfWen.head.take(i))) := vfRfWaddr(i) 469 } 470 } 471 472 XSPerfHistogram(s"IntRegFileRead_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1) 473 XSPerfHistogram(s"VfRegFileRead_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1) 474 XSPerfHistogram(s"IntRegFileWrite_hist", PopCount(intRFWriteReq.flatten), true.B, 0, 20, 1) 475 XSPerfHistogram(s"VfRegFileWrite_hist", PopCount(vfRFWriteReq.flatten), true.B, 0, 20, 1) 476 477 val int_regcache_part32 = (1 until 33).map(i => int_regcache_tag(int_regcache_enqPtr - i.U)) 478 val int_regcache_part24 = (1 until 24).map(i => int_regcache_tag(int_regcache_enqPtr - i.U)) 479 val int_regcache_part16 = (1 until 17).map(i => int_regcache_tag(int_regcache_enqPtr - i.U)) 480 val int_regcache_part8 = (1 until 9).map(i => int_regcache_tag(int_regcache_enqPtr - i.U)) 481 482 val int_regcache_48_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_tag.map(_ === x.bits.addr).reduce(_ || _)) 483 val int_regcache_8_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part8.map(_ === x.bits.addr).reduce(_ || _)) 484 val int_regcache_16_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part16.map(_ === x.bits.addr).reduce(_ || _)) 485 val int_regcache_24_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part24.map(_ === x.bits.addr).reduce(_ || _)) 486 val int_regcache_32_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part32.map(_ === x.bits.addr).reduce(_ || _)) 487 XSPerfAccumulate("IntRegCache48Hit", PopCount(int_regcache_48_hit_vec)) 488 XSPerfAccumulate("IntRegCache8Hit", PopCount(int_regcache_8_hit_vec)) 489 XSPerfAccumulate("IntRegCache16Hit", PopCount(int_regcache_16_hit_vec)) 490 XSPerfAccumulate("IntRegCache24Hit", PopCount(int_regcache_24_hit_vec)) 491 XSPerfAccumulate("IntRegCache32Hit", PopCount(int_regcache_32_hit_vec)) 492 XSPerfHistogram("IntRegCache48Hit_hist", PopCount(int_regcache_48_hit_vec), true.B, 0, 16, 2) 493 494 XSPerfAccumulate(s"IntRFReadBeforeArb", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid))) 495 XSPerfAccumulate(s"IntRFReadAfterArb", PopCount(intRFReadArbiter.io.out.map(_.valid))) 496 XSPerfAccumulate(s"VfRFReadBeforeArb", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid))) 497 XSPerfAccumulate(s"VfRFReadAfterArb", PopCount(vfRFReadArbiter.io.out.map(_.valid))) 498 XSPerfAccumulate(s"IntUopBeforeArb", PopCount(fromIntIQ.flatten.map(_.valid))) 499 XSPerfAccumulate(s"IntUopAfterArb", PopCount(fromIntIQ.flatten.map(_.fire))) 500 XSPerfAccumulate(s"MemUopBeforeArb", PopCount(fromMemIQ.flatten.map(_.valid))) 501 XSPerfAccumulate(s"MemUopAfterArb", PopCount(fromMemIQ.flatten.map(_.fire))) 502 XSPerfAccumulate(s"VfUopBeforeArb", PopCount(fromVfIQ.flatten.map(_.valid))) 503 XSPerfAccumulate(s"VfUopAfterArb", PopCount(fromVfIQ.flatten.map(_.fire))) 504 505 XSPerfHistogram(s"IntRFReadBeforeArb_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2) 506 XSPerfHistogram(s"IntRFReadAfterArb_hist", PopCount(intRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2) 507 XSPerfHistogram(s"VfRFReadBeforeArb_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2) 508 XSPerfHistogram(s"VfRFReadAfterArb_hist", PopCount(vfRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2) 509 XSPerfHistogram(s"IntUopBeforeArb_hist", PopCount(fromIntIQ.flatten.map(_.valid)), true.B, 0, 8, 2) 510 XSPerfHistogram(s"IntUopAfterArb_hist", PopCount(fromIntIQ.flatten.map(_.fire)), true.B, 0, 8, 2) 511 XSPerfHistogram(s"MemUopBeforeArb_hist", PopCount(fromMemIQ.flatten.map(_.valid)), true.B, 0, 8, 2) 512 XSPerfHistogram(s"MemUopAfterArb_hist", PopCount(fromMemIQ.flatten.map(_.fire)), true.B, 0, 8, 2) 513 XSPerfHistogram(s"VfUopBeforeArb_hist", PopCount(fromVfIQ.flatten.map(_.valid)), true.B, 0, 8, 2) 514 XSPerfHistogram(s"VfUopAfterArb_hist", PopCount(fromVfIQ.flatten.map(_.fire)), true.B, 0, 8, 2) 515} 516 517class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle { 518 // params 519 private val intSchdParams = params.schdParams(IntScheduler()) 520 private val vfSchdParams = params.schdParams(VfScheduler()) 521 private val memSchdParams = params.schdParams(MemScheduler()) 522 // bundles 523 val hartId = Input(UInt(8.W)) 524 525 val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect)) 526 527 val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle()))))) 528 529 val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = 530 Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 531 532 val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = 533 Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 534 535 val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 536 537 val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle)) 538 539 val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle)) 540 541 val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle)) 542 543 val og0CancelOH = Output(ExuOH(backendParams.numExu)) 544 545 val og1CancelOH = Output(ExuOH(backendParams.numExu)) 546 547 val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO)) 548 549 val cancelToBusyTable = Vec(backendParams.numExu, ValidIO(new CancelSignal)) 550 551 val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle 552 553 val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle) 554 555 val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle 556 557 val og1ImmInfo: Vec[ImmInfo] = Output(Vec(params.allExuParams.size, new ImmInfo)) 558 559 val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle) 560 561 val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle) 562 563 val fromPcTargetMem = Flipped(new PcToDataPathIO(params)) 564 565 val debugIntRat = if (params.debugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None 566 val debugFpRat = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None 567 val debugVecRat = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None 568 val debugVconfigRat = if (params.debugEn) Some(Input(UInt(vfSchdParams.pregIdxWidth.W))) else None 569 val debugVconfig = if (params.debugEn) Some(Output(UInt(XLEN.W))) else None 570} 571