1package xiangshan.backend.datapath 2 3import chipsalliance.rocketchip.config.Parameters 4import chisel3.{Data, _} 5import chisel3.util._ 6import difftest.{DifftestArchFpRegState, DifftestArchIntRegState, DifftestArchVecRegState} 7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 8import utility._ 9import xiangshan._ 10import xiangshan.backend.BackendParams 11import xiangshan.backend.datapath.DataConfig._ 12import xiangshan.backend.datapath.RdConfig._ 13import xiangshan.backend.issue.{ImmExtractor, IntScheduler, MemScheduler, VfScheduler} 14import xiangshan.backend.Bundles._ 15import xiangshan.backend.regfile._ 16import xiangshan.backend.datapath.WbConfig.{IntWB, PregWB, VfWB} 17 18class WbBusyArbiterIO(inPortSize: Int, outPortSize: Int)(implicit p: Parameters) extends XSBundle { 19 val in = Vec(inPortSize, Flipped(DecoupledIO(new Bundle{}))) // TODO: remote the bool 20 val flush = Flipped(ValidIO(new Redirect)) 21} 22 23class WbBusyArbiter(isInt: Boolean)(implicit p: Parameters) extends XSModule { 24 val allExuParams = backendParams.allExuParams 25 26 val portConfigs = allExuParams.flatMap(_.wbPortConfigs).filter{ 27 wbPortConfig => 28 if(isInt){ 29 wbPortConfig.isInstanceOf[IntWB] 30 } 31 else{ 32 wbPortConfig.isInstanceOf[VfWB] 33 } 34 } 35 36 val numRfWrite = if (isInt) backendParams.numIntWb else backendParams.numVfWb 37 38 val io = IO(new WbBusyArbiterIO(portConfigs.size, numRfWrite)) 39 // inGroup[port -> Bundle] 40 val inGroup = io.in.zip(portConfigs).groupBy{ case(port, config) => config.port} 41 // sort by priority 42 val inGroupSorted = inGroup.map{ 43 case(key, value) => (key -> value.sortBy{ case(port, config) => config.asInstanceOf[PregWB].priority}) 44 } 45 46 private val arbiters = Seq.tabulate(numRfWrite) { x => { 47 if (inGroupSorted.contains(x)) { 48 Some(Module(new Arbiter( new Bundle{} ,n = inGroupSorted(x).length))) 49 } else { 50 None 51 } 52 }} 53 54 arbiters.zipWithIndex.foreach { case (arb, i) => 55 if (arb.nonEmpty) { 56 arb.get.io.in.zip(inGroupSorted(i).map(_._1)).foreach { case (arbIn, addrIn) => 57 arbIn <> addrIn 58 } 59 } 60 } 61 62 arbiters.foreach(_.foreach(_.io.out.ready := true.B)) 63} 64 65class RFArbiterBundle(addrWidth: Int)(implicit p: Parameters) extends XSBundle { 66 val addr = UInt(addrWidth.W) 67} 68 69class RFReadArbiterIO(inPortSize: Int, outPortSize: Int, pregWidth: Int)(implicit p: Parameters) extends XSBundle { 70 val in = Vec(inPortSize, Flipped(DecoupledIO(new RFArbiterBundle(pregWidth)))) 71 val out = Vec(outPortSize, Valid(new RFArbiterBundle(pregWidth))) 72 val flush = Flipped(ValidIO(new Redirect)) 73} 74 75class RFReadArbiter(isInt: Boolean)(implicit p: Parameters) extends XSModule { 76 val allExuParams = backendParams.allExuParams 77 78 val portConfigs: Seq[RdConfig] = allExuParams.map(_.rfrPortConfigs.flatten).flatten.filter{ 79 rfrPortConfigs => 80 if(isInt){ 81 rfrPortConfigs.isInstanceOf[IntRD] 82 } 83 else{ 84 rfrPortConfigs.isInstanceOf[VfRD] 85 } 86 } 87 88 private val moduleName = this.getClass.getName + (if (isInt) "Int" else "Vf") 89 90 println(s"[$moduleName] ports(${portConfigs.size})") 91 for (portCfg <- portConfigs) { 92 println(s"[$moduleName] port: ${portCfg.port}, priority: ${portCfg.priority}") 93 } 94 95 val pregParams = if(isInt) backendParams.intPregParams else backendParams.vfPregParams 96 97 val io = IO(new RFReadArbiterIO(portConfigs.size, backendParams.numRfRead, pregParams.addrWidth)) 98 // inGroup[port -> Bundle] 99 val inGroup: Map[Int, IndexedSeq[(DecoupledIO[RFArbiterBundle], RdConfig)]] = io.in.zip(portConfigs).groupBy{ case(port, config) => config.port} 100 // sort by priority 101 val inGroupSorted: Map[Int, IndexedSeq[(DecoupledIO[RFArbiterBundle], RdConfig)]] = inGroup.map{ 102 case(key, value) => (key -> value.sortBy{ case(port, config) => config.priority}) 103 } 104 105 private val arbiters: Seq[Option[Arbiter[RFArbiterBundle]]] = Seq.tabulate(backendParams.numRfRead) { x => { 106 if (inGroupSorted.contains(x)) { 107 Some(Module(new Arbiter(new RFArbiterBundle(pregParams.addrWidth), inGroupSorted(x).length))) 108 } else { 109 None 110 } 111 }} 112 113 arbiters.zipWithIndex.foreach { case (arb, i) => 114 if (arb.nonEmpty) { 115 arb.get.io.in.zip(inGroupSorted(i).map(_._1)).foreach { case (arbIn, addrIn) => 116 arbIn <> addrIn 117 } 118 } 119 } 120 121 io.out.zip(arbiters).foreach { case (addrOut, arb) => 122 if (arb.nonEmpty) { 123 val arbOut = arb.get.io.out 124 arbOut.ready := true.B 125 addrOut.valid := arbOut.valid 126 addrOut.bits := arbOut.bits 127 } else { 128 addrOut := 0.U.asTypeOf(addrOut) 129 } 130 } 131} 132 133class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule { 134 private implicit val dpParams: BackendParams = params 135 lazy val module = new DataPathImp(this) 136} 137 138class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams) 139 extends LazyModuleImp(wrapper) with HasXSParameter { 140 141 private val VCONFIG_PORT = params.vconfigPort 142 143 val io = IO(new DataPathIO()) 144 145 private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu) 146 private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu) 147 private val (fromVfIQ , toVfIQ , toVfExu ) = (io.fromVfIQ , io.toVfIQ , io.toFpExu) 148 149 println(s"[DataPath] IntIQ(${fromIntIQ.size}), MemIQ(${fromMemIQ.size})") 150 println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})") 151 152 // just refences for convience 153 private val fromIQ = fromIntIQ ++ fromVfIQ ++ fromMemIQ 154 155 private val toIQs = toIntIQ ++ toVfIQ ++ toMemIQ 156 157 private val toExu = toIntExu ++ toVfExu ++ toMemExu 158 159 private val intWbBusyArbiter = Module(new WbBusyArbiter(true)) 160 private val vfWbBusyArbiter = Module(new WbBusyArbiter(false)) 161 private val intRFReadArbiter = Module(new RFReadArbiter(true)) 162 private val vfRFReadArbiter = Module(new RFReadArbiter(false)) 163 164 private val issuePortsIn = fromIQ.flatten 165 private val intNotBlocksW = fromIQ.map { case iq => Wire(Vec(iq.size, Bool())) } 166 private val intNotBlocksSeqW = intNotBlocksW.flatten 167 private val vfNotBlocksW = fromIQ.map { case iq => Wire(Vec(iq.size, Bool())) } 168 private val vfNotBlocksSeqW = vfNotBlocksW.flatten 169 private val intBlocks = fromIQ.map{ case iq => Wire(Vec(iq.size, Bool())) } 170 private val intBlocksSeq = intBlocks.flatten 171 private val vfBlocks = fromIQ.map { case iq => Wire(Vec(iq.size, Bool())) } 172 private val vfBlocksSeq = vfBlocks.flatten 173 private val intWbConflictReads = io.wbConfictRead.flatten.flatten.map(_.intConflict) 174 private val vfWbConflictReads = io.wbConfictRead.flatten.flatten.map(_.vfConflict) 175 176 val intWbBusyInSize = issuePortsIn.map(issuePortIn => issuePortIn.bits.getIntWbBusyBundle.size).scan(0)(_ + _) 177 val intReadPortInSize: IndexedSeq[Int] = issuePortsIn.map(issuePortIn => issuePortIn.bits.getIntRfReadBundle.size).scan(0)(_ + _) 178 issuePortsIn.zipWithIndex.foreach{ 179 case (issuePortIn, idx) => 180 val wbBusyIn = issuePortIn.bits.getIntWbBusyBundle 181 val lw = intWbBusyInSize(idx) 182 val rw = intWbBusyInSize(idx + 1) 183 val arbiterInW = intWbBusyArbiter.io.in.slice(lw, rw) 184 arbiterInW.zip(wbBusyIn).foreach { 185 case (sink, source) => 186 sink.bits := DontCare 187 sink.valid := issuePortIn.valid && source 188 } 189 val notBlockFlag = if (rw > lw) { 190 val arbiterRes = arbiterInW.zip(wbBusyIn).map { 191 case (sink, source) => sink.ready 192 }.reduce(_ & _) 193 if (intWbConflictReads(idx).isDefined) { 194 Mux(intWbConflictReads(idx).get, arbiterRes, true.B) 195 } else arbiterRes 196 } else true.B 197 intNotBlocksSeqW(idx) := notBlockFlag 198 val readPortIn = issuePortIn.bits.getIntRfReadBundle 199 val l = intReadPortInSize(idx) 200 val r = intReadPortInSize(idx + 1) 201 val arbiterIn = intRFReadArbiter.io.in.slice(l, r) 202 arbiterIn.zip(readPortIn).foreach{ 203 case(sink, source) => 204 sink.bits.addr := source.addr 205 sink.valid := issuePortIn.valid && SrcType.isXp(source.srcType) 206 } 207 if(r > l){ 208 intBlocksSeq(idx) := !arbiterIn.zip(readPortIn).map { 209 case (sink, source) => Mux(SrcType.isXp(source.srcType), sink.ready, true.B) 210 }.reduce(_ & _) 211 } 212 else{ 213 intBlocksSeq(idx) := false.B 214 } 215 } 216 intWbBusyArbiter.io.flush := io.flush 217 intRFReadArbiter.io.flush := io.flush 218 219 val vfWbBusyInSize = issuePortsIn.map(issuePortIn => issuePortIn.bits.getVfWbBusyBundle.size).scan(0)(_ + _) 220 val vfReadPortInSize: IndexedSeq[Int] = issuePortsIn.map(issuePortIn => issuePortIn.bits.getVfRfReadBundle.size).scan(0)(_ + _) 221 println(s"vfReadPortInSize: $vfReadPortInSize") 222 223 issuePortsIn.zipWithIndex.foreach { 224 case (issuePortIn, idx) => 225 val wbBusyIn = issuePortIn.bits.getVfWbBusyBundle 226 val lw = vfWbBusyInSize(idx) 227 val rw = vfWbBusyInSize(idx + 1) 228 val arbiterInW = vfWbBusyArbiter.io.in.slice(lw, rw) 229 arbiterInW.zip(wbBusyIn).foreach { 230 case (sink, source) => 231 sink.bits := DontCare 232 sink.valid := issuePortIn.valid && source 233 } 234 val notBlockFlag = if (rw > lw){ 235 val arbiterRes = arbiterInW.zip(wbBusyIn).map { 236 case (sink, source) => sink.ready 237 }.reduce(_ & _) 238 if(vfWbConflictReads(idx).isDefined) { 239 Mux(vfWbConflictReads(idx).get, arbiterRes, true.B) 240 }else arbiterRes 241 }else true.B 242 vfNotBlocksSeqW(idx) := notBlockFlag 243 244 val readPortIn = issuePortIn.bits.getVfRfReadBundle 245 val l = vfReadPortInSize(idx) 246 val r = vfReadPortInSize(idx + 1) 247 val arbiterIn = vfRFReadArbiter.io.in.slice(l, r) 248 arbiterIn.zip(readPortIn).foreach { 249 case (sink, source) => 250 sink.bits.addr := source.addr 251 sink.valid := issuePortIn.valid && SrcType.isVfp(source.srcType) 252 } 253 if (r > l) { 254 vfBlocksSeq(idx) := !arbiterIn.zip(readPortIn).map { 255 case (sink, source) => Mux(SrcType.isVfp(source.srcType), sink.ready, true.B) 256 }.reduce(_ & _) 257 } 258 else { 259 vfBlocksSeq(idx) := false.B 260 } 261 } 262 vfWbBusyArbiter.io.flush := io.flush 263 vfRFReadArbiter.io.flush := io.flush 264 265 private val intSchdParams = params.schdParams(IntScheduler()) 266 private val vfSchdParams = params.schdParams(VfScheduler()) 267 private val memSchdParams = params.schdParams(MemScheduler()) 268 269 private val numIntRfReadByExu = intSchdParams.numIntRfReadByExu + memSchdParams.numIntRfReadByExu 270 private val numVfRfReadByExu = vfSchdParams.numVfRfReadByExu + memSchdParams.numVfRfReadByExu 271 // Todo: limit read port 272 private val numIntR = numIntRfReadByExu 273 private val numVfR = numVfRfReadByExu 274 println(s"[DataPath] RegFile read req needed by Exu: Int(${numIntRfReadByExu}), Vf(${numVfRfReadByExu})") 275 println(s"[DataPath] RegFile read port: Int(${numIntR}), Vf(${numVfR})") 276 277 private val schdParams = params.allSchdParams 278 279 private val intRfRaddr = Wire(Vec(params.numRfRead, UInt(intSchdParams.pregIdxWidth.W))) 280 private val intRfRdata = Wire(Vec(params.numRfRead, UInt(intSchdParams.rfDataWidth.W))) 281 private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool())) 282 private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W))) 283 private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W))) 284 285 private val vfRfSplitNum = VLEN / XLEN 286 private val vfRfRaddr = Wire(Vec(params.numRfRead, UInt(vfSchdParams.pregIdxWidth.W))) 287 private val vfRfRdata = Wire(Vec(params.numRfRead, UInt(vfSchdParams.rfDataWidth.W))) 288 private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool()))) 289 private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W))) 290 private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W))) 291 292 private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] = 293 if (env.AlwaysBasicDiff || env.EnableDifftest) { 294 Some(Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))) 295 } else { None } 296 private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] = 297 if (env.AlwaysBasicDiff || env.EnableDifftest) { 298 Some(Wire(Vec(32 + 32 + 1, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(32 + 32 + 1, UInt(VLEN.W)))) 299 } else { None } 300 301 private val fpDebugReadData: Option[Vec[UInt]] = 302 if (env.AlwaysBasicDiff || env.EnableDifftest) { 303 Some(Wire(Vec(32, UInt(XLEN.W)))) 304 } else { None } 305 private val vecDebugReadData: Option[Vec[UInt]] = 306 if (env.AlwaysBasicDiff || env.EnableDifftest) { 307 Some(Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0)) 308 } else { None } 309 private val vconfigDebugReadData: Option[UInt] = 310 if (env.AlwaysBasicDiff || env.EnableDifftest) { 311 Some(Wire(UInt(64.W))) 312 } else { None } 313 314 315 fpDebugReadData.foreach(_ := vfDebugRead 316 .get._2 317 .slice(0, 32) 318 .map(_(63, 0)) 319 ) // fp only used [63, 0] 320 vecDebugReadData.foreach(_ := vfDebugRead 321 .get._2 322 .slice(32, 64) 323 .map(x => Seq(x(63, 0), x(127, 64))).flatten 324 ) 325 vconfigDebugReadData.foreach(_ := vfDebugRead 326 .get._2(64)(63, 0) 327 ) 328 329 io.debugVconfig := vconfigDebugReadData.get 330 331 IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata, 332 debugReadAddr = intDebugRead.map(_._1), 333 debugReadData = intDebugRead.map(_._2)) 334 VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata, 335 debugReadAddr = vfDebugRead.map(_._1), 336 debugReadData = vfDebugRead.map(_._2)) 337 338 intRfWaddr := io.fromIntWb.map(_.addr) 339 intRfWdata := io.fromIntWb.map(_.data) 340 intRfWen := io.fromIntWb.map(_.wen) 341 342 intRFReadArbiter.io.out.map(_.bits.addr).zip(intRfRaddr).foreach{ case(source, sink) => sink := source } 343 344 vfRfWaddr := io.fromVfWb.map(_.addr) 345 vfRfWdata := io.fromVfWb.map(_.data) 346 vfRfWen.foreach(_.zip(io.fromVfWb.map(_.wen)).foreach { case (wenSink, wenSource) => wenSink := wenSource } )// Todo: support fp multi-write 347 348 vfRFReadArbiter.io.out.map(_.bits.addr).zip(vfRfRaddr).foreach{ case(source, sink) => sink := source } 349 vfRfRaddr(VCONFIG_PORT) := io.vconfigReadPort.addr 350 io.vconfigReadPort.data := vfRfRdata(VCONFIG_PORT) 351 352 intDebugRead.foreach { case (addr, _) => 353 addr := io.debugIntRat 354 } 355 356 vfDebugRead.foreach { case (addr, _) => 357 addr := io.debugFpRat ++ io.debugVecRat :+ io.debugVconfigRat 358 } 359 println(s"[DataPath] " + 360 s"has intDebugRead: ${intDebugRead.nonEmpty}, " + 361 s"has vfDebugRead: ${vfDebugRead.nonEmpty}") 362 363 val s1_addrOHs = Reg(MixedVec( 364 fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType))) 365 )) 366 val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec( 367 toExu.map(x => MixedVec(x.map(_.valid.cloneType))) 368 )) 369 val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType))))) 370 val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType))))) // Todo 371 val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire))))) 372 373 val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType))))) 374 val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType))))) 375 376 val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs)) 377 378 println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}") 379 s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) 380 s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) => 381 iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) => 382 val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[IntRD]) else x).flatten 383 assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size") 384 iuRdata.zip(realIuCfg) 385 .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[IntRD] } 386 .foreach { case (sink, cfg) => sink := intRfRdata(cfg.port) } 387 } 388 } 389 390 println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}") 391 s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) 392 s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) => 393 iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) => 394 val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[VfRD]) else x).flatten 395 assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size") 396 iuRdata.zip(realIuCfg) 397 .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[VfRD] } 398 .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.port) } 399 } 400 } 401 402 for (i <- fromIQ.indices) { 403 for (j <- fromIQ(i).indices) { 404 // IQ(s0) --[Ctrl]--> s1Reg ---------- begin 405 // refs 406 val s1_valid = s1_toExuValid(i)(j) 407 val s1_ready = s1_toExuReady(i)(j) 408 val s1_data = s1_toExuData(i)(j) 409 val s1_addrOH = s1_addrOHs(i)(j) 410 val s0 = fromIQ(i)(j) // s0 411 val block = (intBlocks(i)(j) || !intNotBlocksW(i)(j)) || (vfBlocks(i)(j) || !vfNotBlocksW(i)(j)) 412 val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush))) 413 when (s0.fire && !s1_flush && !block) { 414 s1_valid := s0.valid 415 s1_data.fromIssueBundle(s0.bits) // no src data here 416 s1_addrOH := s0.bits.addrOH 417 }.otherwise { 418 s1_valid := false.B 419 } 420 dontTouch(block) 421 s0.ready := (s1_ready || !s1_valid) && !block 422 // IQ(s0) --[Ctrl]--> s1Reg ---------- end 423 424 // IQ(s0) --[Data]--> s1Reg ---------- begin 425 // imm extract 426 when (s0.fire && !s1_flush && !block) { 427 if (s1_data.params.immType.nonEmpty && s1_data.src.size > 1) { 428 // rs1 is always int reg, rs2 may be imm 429 when(SrcType.isImm(s0.bits.srcType(1))) { 430 s1_data.src(1) := ImmExtractor( 431 s0.bits.common.imm, 432 s0.bits.immType, 433 s1_data.params.dataBitsMax, 434 s1_data.params.immType.map(_.litValue) 435 ) 436 } 437 } 438 if (s1_data.params.hasJmpFu) { 439 when(SrcType.isPc(s0.bits.srcType(0))) { 440 s1_data.src(0) := SignExt(s0.bits.jmp.get.pc, XLEN) 441 } 442 } else if (s1_data.params.hasVecFu) { 443 // Fuck off riscv vector imm!!! Why not src1??? 444 when(SrcType.isImm(s0.bits.srcType(0))) { 445 s1_data.src(0) := ImmExtractor( 446 s0.bits.common.imm, 447 s0.bits.immType, 448 s1_data.params.dataBitsMax, 449 s1_data.params.immType.map(_.litValue) 450 ) 451 } 452 } 453 } 454 // IQ(s0) --[Data]--> s1Reg ---------- end 455 } 456 } 457 458 private val fromIQFire = fromIQ.map(_.map(_.fire)) 459 private val toExuFire = toExu.map(_.map(_.fire)) 460 toIQs.zipWithIndex.foreach { 461 case(toIQ, iqIdx) => 462 toIQ.zipWithIndex.foreach { 463 case (toIU, iuIdx) => 464 // IU: issue unit 465 val og0resp = toIU.og0resp 466 og0resp.valid := fromIQ(iqIdx)(iuIdx).valid && (!fromIQFire(iqIdx)(iuIdx)) 467 og0resp.bits.respType := RSFeedbackType.rfArbitFail 468 og0resp.bits.success := false.B 469 og0resp.bits.addrOH := fromIQ(iqIdx)(iuIdx).bits.addrOH 470 og0resp.bits.rfWen := fromIQ(iqIdx)(iuIdx).bits.common.rfWen.getOrElse(false.B) 471 og0resp.bits.fuType := fromIQ(iqIdx)(iuIdx).bits.common.fuType 472 473 val og1resp = toIU.og1resp 474 og1resp.valid := s1_toExuValid(iqIdx)(iuIdx) 475 og1resp.bits.respType := Mux(toExuFire(iqIdx)(iuIdx), RSFeedbackType.fuIdle, RSFeedbackType.fuBusy) 476 og1resp.bits.success := false.B 477 og1resp.bits.addrOH := s1_addrOHs(iqIdx)(iuIdx) 478 og1resp.bits.rfWen := s1_toExuData(iqIdx)(iuIdx).rfWen.getOrElse(false.B) 479 og1resp.bits.fuType := s1_toExuData(iqIdx)(iuIdx).fuType 480 } 481 } 482 483 for (i <- toExu.indices) { 484 for (j <- toExu(i).indices) { 485 // s1Reg --[Ctrl]--> exu(s1) ---------- begin 486 // refs 487 val sinkData = toExu(i)(j).bits 488 // assign 489 toExu(i)(j).valid := s1_toExuValid(i)(j) 490 s1_toExuReady(i)(j) := toExu(i)(j).ready 491 sinkData := s1_toExuData(i)(j) 492 // s1Reg --[Ctrl]--> exu(s1) ---------- end 493 494 // s1Reg --[Data]--> exu(s1) ---------- begin 495 // data source1: preg read data 496 for (k <- sinkData.src.indices) { 497 val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k) 498 499 val readRfMap: Seq[(Bool, UInt)] = (Seq(None) :+ 500 (if (s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty) 501 Some(SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k)) 502 else None) :+ 503 (if (s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VfRegSrcDataSet).nonEmpty) 504 Some(SrcType.isVfp(s1_srcType(i)(j)(k))-> s1_vfPregRData(i)(j)(k)) 505 else None) 506 ).filter(_.nonEmpty).map(_.get) 507 if (readRfMap.nonEmpty) 508 sinkData.src(k) := Mux1H(readRfMap) 509 } 510 511 // data source2: extracted imm and pc saved in s1Reg 512 if (sinkData.params.immType.nonEmpty && sinkData.src.size > 1) { 513 when(SrcType.isImm(s1_srcType(i)(j)(1))) { 514 sinkData.src(1) := s1_toExuData(i)(j).src(1) 515 } 516 } 517 if (sinkData.params.hasJmpFu) { 518 when(SrcType.isPc(s1_srcType(i)(j)(0))) { 519 sinkData.src(0) := s1_toExuData(i)(j).src(0) 520 } 521 } else if (sinkData.params.hasVecFu) { 522 when(SrcType.isImm(s1_srcType(i)(j)(0))) { 523 sinkData.src(0) := s1_toExuData(i)(j).src(0) 524 } 525 } 526 // s1Reg --[Data]--> exu(s1) ---------- end 527 } 528 } 529 530 if (env.AlwaysBasicDiff || env.EnableDifftest) { 531 val delayedCnt = 2 532 val difftestArchIntRegState = Module(new DifftestArchIntRegState) 533 difftestArchIntRegState.io.clock := clock 534 difftestArchIntRegState.io.coreid := io.hartId 535 difftestArchIntRegState.io.gpr := DelayN(intDebugRead.get._2, delayedCnt) 536 537 val difftestArchFpRegState = Module(new DifftestArchFpRegState) 538 difftestArchFpRegState.io.clock := clock 539 difftestArchFpRegState.io.coreid := io.hartId 540 difftestArchFpRegState.io.fpr := DelayN(fpDebugReadData.get, delayedCnt) 541 542 val difftestArchVecRegState = Module(new DifftestArchVecRegState) 543 difftestArchVecRegState.io.clock := clock 544 difftestArchVecRegState.io.coreid := io.hartId 545 difftestArchVecRegState.io.vpr := DelayN(vecDebugReadData.get, delayedCnt) 546 } 547} 548 549class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle { 550 // params 551 private val intSchdParams = params.schdParams(IntScheduler()) 552 private val vfSchdParams = params.schdParams(VfScheduler()) 553 private val memSchdParams = params.schdParams(MemScheduler()) 554 // bundles 555 val hartId = Input(UInt(8.W)) 556 557 val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect)) 558 559 // Todo: check if this can be removed 560 val vconfigReadPort = new RfReadPort(XLEN, PhyRegIdxWidth) 561 562 val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle()))))) 563 564 val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = 565 Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 566 567 val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = 568 Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 569 570 val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 571 572 val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle)) 573 574 val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle)) 575 576 val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle)) 577 578 val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle 579 580 val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle) 581 582 val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle 583 584 val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle) 585 586 val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle) 587 588 val debugIntRat = Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W))) 589 val debugFpRat = Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W))) 590 val debugVecRat = Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W))) 591 val debugVconfigRat = Input(UInt(vfSchdParams.pregIdxWidth.W)) 592 val debugVconfig = Output(UInt(XLEN.W)) 593 594} 595