1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.backend 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 23import utility._ 24import utils._ 25import xiangshan.ExceptionNO._ 26import xiangshan._ 27import xiangshan.backend.ctrlblock.{MemCtrl, RedirectGenerator} 28import xiangshan.backend.decode.{DecodeStage, FusionDecoder} 29import xiangshan.backend.dispatch.{Dispatch, DispatchQueue} 30import xiangshan.backend.fu.PFEvent 31import xiangshan.backend.rename.{Rename, RenameTableWrapper} 32import xiangshan.backend.rob.{Rob, RobCSRIO, RobLsqIO} 33import xiangshan.frontend.Ftq_RF_Components 34import xiangshan.v2backend.Bundles.{DecodedInst, DynInst, ExceptionInfo} 35import xiangshan.v2backend.{BackendParams, VAddrData} 36 37class CtrlToFtqIO(implicit p: Parameters) extends XSBundle { 38 def numRedirect = backendParams.numRedirect 39 val rob_commits = Vec(CommitWidth, Valid(new RobCommitInfo)) 40 val redirect = Valid(new Redirect) 41} 42 43class CtrlBlock(params: BackendParams)(implicit p: Parameters) extends LazyModule { 44 val rob = LazyModule(new Rob(params)) 45 46// override def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]]): HasWritebackSink = { 47// rob.addWritebackSink(Seq(this), Some(Seq(writebackSinks.length))) 48// super.addWritebackSink(source, index) 49// } 50 51 lazy val module = new CtrlBlockImp(this)(p, params) 52 53// override lazy val writebackSourceParams: Seq[WritebackSourceParams] = { 54// writebackSinksParams 55// } 56// override lazy val writebackSourceImp: HasWritebackSourceImp = module 57// 58// override def generateWritebackIO( 59// thisMod: Option[HasWritebackSource] = None, 60// thisModImp: Option[HasWritebackSourceImp] = None 61// ): Unit = { 62// module.io.writeback.zip(writebackSinksImp(thisMod, thisModImp)).foreach(x => x._1 := x._2) 63// } 64} 65 66class CtrlBlockImp( 67 override val wrapper: CtrlBlock 68)(implicit 69 p: Parameters, 70 params: BackendParams 71) extends LazyModuleImp(wrapper) 72 with HasXSParameter 73 with HasCircularQueuePtrHelper 74 with HasPerfEvents 75{ 76 // bjIssueQueue.enq(4) + redirects (1) + loadPredUpdate (1) + robFlush (1) 77 private val numPcMemReadForExu = params.numPcReadPort 78 private val numPcMemRead = params.numPcReadPort + 1 + 1 + 1 79 private val numTargetMemRead = numPcMemReadForExu 80 private val pcMemReadIdxForRedirect = numPcMemReadForExu 81 private val pcMemReadIdxForMemPred = numPcMemReadForExu + 1 82 private val pcMemReadIdxForRobFlush = numPcMemReadForExu + 2 83 84 println(s"pcMem read num: $numPcMemRead") 85 println(s"pcMem read num for exu: $numPcMemReadForExu") 86 println(s"targetMem read num: $numTargetMemRead") 87 88 val io = IO(new CtrlBlockIO()) 89 90// override def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = { 91// Some(io.writeback.map(writeback => { 92// val exuOutput = WireInit(writeback) 93// val timer = GTimer() 94// for ((wb_next, wb) <- exuOutput.zip(writeback)) { 95// wb_next.valid := RegNext(wb.valid && !wb.bits.uop.robIdx.needFlush(Seq(stage2Redirect, redirectForExu))) 96// wb_next.bits := RegNext(wb.bits) 97// wb_next.bits.uop.debugInfo.writebackTime := timer 98// } 99// exuOutput 100// })) 101// } 102 103 val decode = Module(new DecodeStage) 104 val fusionDecoder = Module(new FusionDecoder) 105 val rat = Module(new RenameTableWrapper) 106 val rename = Module(new Rename) 107 val dispatch = Module(new Dispatch) 108 val intDq = Module(new DispatchQueue(dpParams.IntDqSize, RenameWidth, dpParams.IntDqDeqWidth)) 109 val fpDq = Module(new DispatchQueue(dpParams.FpDqSize, RenameWidth, dpParams.FpDqDeqWidth)) 110 val lsDq = Module(new DispatchQueue(dpParams.LsDqSize, RenameWidth, dpParams.LsDqDeqWidth)) 111 val redirectGen = Module(new RedirectGenerator) 112 private val pcMem = Module(new SyncDataModuleTemplate(new Ftq_RF_Components, FtqSize, numPcMemRead, 1, "BackendPC")) 113 private val targetMem = Module(new SyncDataModuleTemplate(UInt(VAddrData().dataWidth.W), FtqSize, numTargetMemRead, 1)) 114 private val rob = wrapper.rob.module 115 private val memCtrl = Module(new MemCtrl(params)) 116 117 private val disableFusion = decode.io.csrCtrl.singlestep || !decode.io.csrCtrl.fusion_enable 118 119 private val s0_robFlushRedirect = rob.io.flushOut 120 private val s1_robFlushRedirect = Wire(Valid(new Redirect)) 121 s1_robFlushRedirect.valid := RegNext(s0_robFlushRedirect.valid) 122 s1_robFlushRedirect.bits := RegEnable(s0_robFlushRedirect.bits, s0_robFlushRedirect.valid) 123 124 pcMem.io.raddr(pcMemReadIdxForRobFlush) := s0_robFlushRedirect.bits.ftqIdx.value 125 private val s1_robFlushPc = pcMem.io.rdata(pcMemReadIdxForRobFlush).getPc(RegNext(s0_robFlushRedirect.bits.ftqOffset)) 126 private val s3_redirectGen = redirectGen.io.stage2Redirect 127 private val s1_s3_redirect = Mux(s1_robFlushRedirect.valid, s1_robFlushRedirect, s3_redirectGen) 128 private val s2_s4_pendingRedirectValid = RegInit(false.B) 129 when (s1_s3_redirect.valid) { 130 s2_s4_pendingRedirectValid := true.B 131 }.elsewhen (RegNext(io.frontend.toFtq.redirect.valid)) { 132 s2_s4_pendingRedirectValid := false.B 133 } 134 135 // Redirect will be RegNext at ExuBlocks. 136 val s2_s4_redirect = RegNextWithEnable(s1_s3_redirect) 137 138 private val exuPredecode = RegNext(VecInit( 139 io.fromWB.wbData.filter(_.bits.redirect.nonEmpty).map(x => x.bits.predecodeInfo.get) 140 )) 141 142 private val exuRedirects: IndexedSeq[ValidIO[Redirect]] = io.fromWB.wbData.filter(_.bits.redirect.nonEmpty).map(x => { 143 val valid = x.valid && x.bits.redirect.get.valid 144 val killedByOlder = x.bits.robIdx.needFlush(Seq(s1_s3_redirect, s2_s4_redirect)) 145 val delayed = Wire(Valid(new Redirect())) 146 delayed.valid := RegNext(valid && !killedByOlder, init = false.B) 147 delayed.bits := RegEnable(x.bits.redirect.get.bits, x.valid) 148 delayed 149 }) 150 151 private val memViolation = io.fromMem.violation 152 val loadReplay = Wire(ValidIO(new Redirect)) 153 loadReplay.valid := RegNext(memViolation.valid && 154 !memViolation.bits.robIdx.needFlush(Seq(s1_s3_redirect, s2_s4_redirect)) 155 ) 156 loadReplay.bits := RegEnable(memViolation.bits, memViolation.valid) 157 158 // val isCommitWriteVconfigVec = rob.io.commits.commitValid.zip(rob.io.commits.info).map{case (valid, info) => valid && info.ldest === 32.U}.reverse 159// val isWalkWriteVconfigVec = rob.io.commits.walkValid.zip(rob.io.commits.info).map{case (valid, info) => valid && info.ldest === 32.U}.reverse 160 val pdestReverse = rob.io.commits.info.map(info => info.pdest).reverse 161// val commitSel = PriorityMux(isCommitWriteVconfigVec, pdestReverse) 162// val walkSel = PriorityMux(isWalkWriteVconfigVec, pdestReverse) 163// val vconfigAddr = Mux(rob.io.commits.isCommit, commitSel, walkSel) 164// decode.io.vconfig := io.vconfigReadPort.data 165// decode.io.isVsetFlushPipe := rob.io.isVsetFlushPipe 166 167 pcMem.io.raddr(pcMemReadIdxForRedirect) := redirectGen.io.redirectPcRead.ptr.value 168 redirectGen.io.redirectPcRead.data := pcMem.io.rdata(pcMemReadIdxForRedirect).getPc(RegNext(redirectGen.io.redirectPcRead.offset)) 169 pcMem.io.raddr(pcMemReadIdxForMemPred) := redirectGen.io.memPredPcRead.ptr.value 170 redirectGen.io.memPredPcRead.data := pcMem.io.rdata(pcMemReadIdxForMemPred).getPc(RegNext(redirectGen.io.memPredPcRead.offset)) 171 redirectGen.io.hartId := io.fromTop.hartId 172 redirectGen.io.exuRedirect := exuRedirects 173 redirectGen.io.exuOutPredecode := exuPredecode // garded by exuRedirect.valid 174 redirectGen.io.loadReplay <> loadReplay 175 176 redirectGen.io.robFlush := s1_robFlushRedirect.valid 177 178 val s6_frontendFlushValid = DelayN(s1_robFlushRedirect.valid, 5) 179 val frontendFlushBits = RegEnable(s1_robFlushRedirect.bits, s1_robFlushRedirect.valid) // ?? 180 // When ROB commits an instruction with a flush, we notify the frontend of the flush without the commit. 181 // Flushes to frontend may be delayed by some cycles and commit before flush causes errors. 182 // Thus, we make all flush reasons to behave the same as exceptions for frontend. 183 for (i <- 0 until CommitWidth) { 184 // why flushOut: instructions with flushPipe are not commited to frontend 185 // If we commit them to frontend, it will cause flush after commit, which is not acceptable by frontend. 186 val s1_isCommit = rob.io.commits.commitValid(i) && rob.io.commits.isCommit && rob.io.commits.info(i).uopIdx.andR && !s0_robFlushRedirect.valid 187 io.frontend.toFtq.rob_commits(i).valid := RegNext(s1_isCommit) 188 io.frontend.toFtq.rob_commits(i).bits := RegEnable(rob.io.commits.info(i), s1_isCommit) 189 } 190 io.frontend.toFtq.redirect.valid := s6_frontendFlushValid || s3_redirectGen.valid 191 io.frontend.toFtq.redirect.bits := Mux(s6_frontendFlushValid, frontendFlushBits, s3_redirectGen.bits) 192 // Be careful here: 193 // T0: rob.io.flushOut, s0_robFlushRedirect 194 // T1: s1_robFlushRedirect, rob.io.exception.valid 195 // T2: csr.redirect.valid 196 // T3: csr.exception.valid 197 // T4: csr.trapTarget 198 // T5: ctrlBlock.trapTarget 199 // T6: io.frontend.toFtq.stage2Redirect.valid 200 val s2_robFlushPc = RegEnable(Mux(s1_robFlushRedirect.bits.flushItself(), 201 s1_robFlushPc, // replay inst 202 s1_robFlushPc + 4.U // flush pipe 203 ), s1_robFlushRedirect.valid) 204 private val s2_csrIsXRet = io.robio.csr.isXRet 205 private val s5_csrIsTrap = DelayN(rob.io.exception.valid, 4) 206 private val s2_s5_trapTargetFromCsr = io.robio.csr.trapTarget 207 208 val flushTarget = Mux(s2_csrIsXRet || s5_csrIsTrap, s2_s5_trapTargetFromCsr, s2_robFlushPc) 209 when (s6_frontendFlushValid) { 210 io.frontend.toFtq.redirect.bits.level := RedirectLevel.flush 211 io.frontend.toFtq.redirect.bits.cfiUpdate.target := RegNext(flushTarget) 212 } 213 214 if (env.EnableTopDown) { 215 val stage2Redirect_valid_when_pending = s2_s4_pendingRedirectValid && s1_s3_redirect.valid 216 217 val stage2_redirect_cycles = RegInit(false.B) // frontend_bound->fetch_lantency->stage2_redirect 218 val MissPredPending = RegInit(false.B); val branch_resteers_cycles = RegInit(false.B) // frontend_bound->fetch_lantency->stage2_redirect->branch_resteers 219 val RobFlushPending = RegInit(false.B); val robFlush_bubble_cycles = RegInit(false.B) // frontend_bound->fetch_lantency->stage2_redirect->robflush_bubble 220 val LdReplayPending = RegInit(false.B); val ldReplay_bubble_cycles = RegInit(false.B) // frontend_bound->fetch_lantency->stage2_redirect->ldReplay_bubble 221 222 when(redirectGen.io.isMisspreRedirect) { MissPredPending := true.B } 223 when(s1_robFlushRedirect.valid) { RobFlushPending := true.B } 224 when(redirectGen.io.loadReplay.valid) { LdReplayPending := true.B } 225 226 when (RegNext(io.frontend.toFtq.redirect.valid)) { 227 when(s2_s4_pendingRedirectValid) { stage2_redirect_cycles := true.B } 228 when(MissPredPending) { MissPredPending := false.B; branch_resteers_cycles := true.B } 229 when(RobFlushPending) { RobFlushPending := false.B; robFlush_bubble_cycles := true.B } 230 when(LdReplayPending) { LdReplayPending := false.B; ldReplay_bubble_cycles := true.B } 231 } 232 233 when(VecInit(decode.io.out.map(x => x.valid)).asUInt.orR){ 234 when(stage2_redirect_cycles) { stage2_redirect_cycles := false.B } 235 when(branch_resteers_cycles) { branch_resteers_cycles := false.B } 236 when(robFlush_bubble_cycles) { robFlush_bubble_cycles := false.B } 237 when(ldReplay_bubble_cycles) { ldReplay_bubble_cycles := false.B } 238 } 239 240 XSPerfAccumulate("stage2_redirect_cycles", stage2_redirect_cycles) 241 XSPerfAccumulate("branch_resteers_cycles", branch_resteers_cycles) 242 XSPerfAccumulate("robFlush_bubble_cycles", robFlush_bubble_cycles) 243 XSPerfAccumulate("ldReplay_bubble_cycles", ldReplay_bubble_cycles) 244 XSPerfAccumulate("s2Redirect_pend_cycles", stage2Redirect_valid_when_pending) 245 } 246 247 decode.io.in.zip(io.frontend.cfVec).foreach { case (decodeIn, frontendCf) => 248 decodeIn.valid := frontendCf.valid 249 frontendCf.ready := decodeIn.ready 250 decodeIn.bits.connectCtrlFlow(frontendCf.bits) 251 } 252 decode.io.csrCtrl := RegNext(io.csrCtrl) 253 decode.io.intRat <> rat.io.intReadPorts 254 decode.io.fpRat <> rat.io.fpReadPorts 255 decode.io.vecRat <> rat.io.vecReadPorts 256 decode.io.fusion := 0.U.asTypeOf(decode.io.fusion) // Todo 257// decode.io.isRedirect <> stage2Redirect.valid 258// decode.io.robCommits <> rob.io.commits 259 260 val decodeHasException = decode.io.out.map(x => x.bits.exceptionVec(instrPageFault) || x.bits.exceptionVec(instrAccessFault)) 261 // fusion decoder 262 for (i <- 0 until DecodeWidth) { 263 fusionDecoder.io.in(i).valid := decode.io.out(i).valid && !(decodeHasException(i) || disableFusion) 264 fusionDecoder.io.in(i).bits := decode.io.out(i).bits.instr 265 if (i > 0) { 266 fusionDecoder.io.inReady(i - 1) := decode.io.out(i).ready 267 } 268 } 269 270 private val decodePipeRename = Wire(Vec(RenameWidth, DecoupledIO(new DecodedInst))) 271 272 for (i <- 0 until RenameWidth) { 273 PipelineConnect(decode.io.out(i), decodePipeRename(i), rename.io.in(i).ready, 274 s1_s3_redirect.valid || s2_s4_pendingRedirectValid, moduleName = Some("decodePipeRenameModule")) 275 276 decodePipeRename(i).ready := rename.io.in(i).ready 277 rename.io.in(i).valid := decodePipeRename(i).valid && !fusionDecoder.io.clear(i) 278 rename.io.in(i).bits := decodePipeRename(i).bits 279 } 280 281 for (i <- 0 until RenameWidth - 1) { 282 fusionDecoder.io.dec(i) := decodePipeRename(i).bits 283 rename.io.fusionInfo(i) := fusionDecoder.io.info(i) 284 285 // update the first RenameWidth - 1 instructions 286 decode.io.fusion(i) := fusionDecoder.io.out(i).valid && rename.io.out(i).fire 287 when (fusionDecoder.io.out(i).valid) { 288 fusionDecoder.io.out(i).bits.update(rename.io.in(i).bits) 289 // TODO: remove this dirty code for ftq update 290 val sameFtqPtr = rename.io.in(i).bits.ftqPtr.value === rename.io.in(i + 1).bits.ftqPtr.value 291 val ftqOffset0 = rename.io.in(i).bits.ftqOffset 292 val ftqOffset1 = rename.io.in(i + 1).bits.ftqOffset 293 val ftqOffsetDiff = ftqOffset1 - ftqOffset0 294 val cond1 = sameFtqPtr && ftqOffsetDiff === 1.U 295 val cond2 = sameFtqPtr && ftqOffsetDiff === 2.U 296 val cond3 = !sameFtqPtr && ftqOffset1 === 0.U 297 val cond4 = !sameFtqPtr && ftqOffset1 === 1.U 298 rename.io.in(i).bits.commitType := Mux(cond1, 4.U, Mux(cond2, 5.U, Mux(cond3, 6.U, 7.U))) 299 XSError(!cond1 && !cond2 && !cond3 && !cond4, p"new condition $sameFtqPtr $ftqOffset0 $ftqOffset1\n") 300 } 301 302 } 303 304 // memory dependency predict 305 // when decode, send fold pc to mdp 306 private val mdpFlodPcVec = Wire(Vec(DecodeWidth, UInt(MemPredPCWidth.W))) 307 for (i <- 0 until DecodeWidth) { 308 mdpFlodPcVec(i) := Mux( 309 decode.io.out(i).fire, 310 decode.io.in(i).bits.foldpc, 311 rename.io.in(i).bits.foldpc 312 ) 313 } 314 315 // currently, we only update mdp info when isReplay 316 memCtrl.io.redirect <> s1_s3_redirect 317 memCtrl.io.csrCtrl := io.csrCtrl // RegNext in memCtrl 318 memCtrl.io.stIn := io.fromMem.stIn // RegNext in memCtrl 319 memCtrl.io.memPredUpdate := redirectGen.io.memPredUpdate // RegNext in memCtrl 320 memCtrl.io.mdpFlodPcVec := mdpFlodPcVec 321 memCtrl.io.dispatchLFSTio <> dispatch.io.lfst 322 323 rat.io.redirect := s1_s3_redirect.valid 324 rat.io.robCommits := rob.io.commits 325 rat.io.intRenamePorts := rename.io.intRenamePorts 326 rat.io.fpRenamePorts := rename.io.fpRenamePorts 327 rat.io.vecRenamePorts := rename.io.vecRenamePorts 328 329 rename.io.redirect := s1_s3_redirect 330 rename.io.robCommits <> rob.io.commits 331 rename.io.waittable := (memCtrl.io.waitTable2Rename zip decode.io.out).map{ case(waittable2rename, decodeOut) => 332 RegEnable(waittable2rename, decodeOut.fire) 333 } 334 rename.io.ssit := memCtrl.io.ssit2Rename 335 rename.io.intReadPorts := VecInit(rat.io.intReadPorts.map(x => VecInit(x.map(_.data)))) 336 rename.io.fpReadPorts := VecInit(rat.io.fpReadPorts.map(x => VecInit(x.map(_.data)))) 337 rename.io.vecReadPorts := VecInit(rat.io.vecReadPorts.map(x => VecInit(x.map(_.data)))) 338 rename.io.debug_int_rat := rat.io.debug_int_rat 339 rename.io.debug_fp_rat := rat.io.debug_fp_rat 340 rename.io.debug_vconfig_rat := rat.io.debug_vconfig_rat 341 rename.io.debug_vec_rat := rat.io.debug_vec_rat 342 343 // pipeline between rename and dispatch 344 for (i <- 0 until RenameWidth) { 345 PipelineConnect(rename.io.out(i), dispatch.io.fromRename(i), dispatch.io.recv(i), s1_s3_redirect.valid) 346 } 347 348 dispatch.io.hartId := io.fromTop.hartId 349 dispatch.io.redirect <> s1_s3_redirect 350 dispatch.io.enqRob <> rob.io.enq 351 dispatch.io.singleStep := RegNext(io.csrCtrl.singlestep) 352 353 intDq.io.enq <> dispatch.io.toIntDq 354 intDq.io.redirect <> s2_s4_redirect 355 356 fpDq.io.enq <> dispatch.io.toFpDq 357 fpDq.io.redirect <> s2_s4_redirect 358 359 lsDq.io.enq <> dispatch.io.toLsDq 360 lsDq.io.redirect <> s2_s4_redirect 361 362 io.toIssueBlock.intUops <> intDq.io.deq 363 io.toIssueBlock.vfUops <> fpDq.io.deq 364 io.toIssueBlock.memUops <> lsDq.io.deq 365 io.toIssueBlock.allocPregs <> dispatch.io.allocPregs 366 367 pcMem.io.wen.head := RegNext(io.frontend.fromFtq.pc_mem_wen) 368 pcMem.io.waddr.head := RegNext(io.frontend.fromFtq.pc_mem_waddr) 369 pcMem.io.wdata.head := RegNext(io.frontend.fromFtq.pc_mem_wdata) 370 targetMem.io.wen.head := RegNext(io.frontend.fromFtq.pc_mem_wen) 371 targetMem.io.waddr.head := RegNext(io.frontend.fromFtq.pc_mem_waddr) 372 targetMem.io.wdata.head := RegNext(io.frontend.fromFtq.pc_mem_wdata.startAddr) 373 374 private val jumpPcVec : Vec[UInt] = Wire(Vec(params.numPcReadPort, UInt(VAddrData().dataWidth.W))) 375 private val jumpTargetReadVec : Vec[UInt] = Wire(Vec(params.numPcReadPort, UInt(VAddrData().dataWidth.W))) 376 private val jumpTargetVec : Vec[UInt] = Wire(Vec(params.numPcReadPort, UInt(VAddrData().dataWidth.W))) 377 io.toIssueBlock.pcVec := jumpPcVec 378 io.toIssueBlock.targetVec := jumpTargetVec 379 380 for (i <- 0 until params.numPcReadPort) { 381 pcMem.io.raddr(i) := intDq.io.deqNext(i).ftqPtr.value 382 jumpPcVec(i) := pcMem.io.rdata(i).getPc(RegNext(intDq.io.deqNext(i).ftqOffset)) 383 } 384 385 private val newestTarget: UInt = io.frontend.fromFtq.newest_entry_target 386 for (i <- 0 until numTargetMemRead) { 387 val targetPtr = intDq.io.deqNext(i).ftqPtr 388 // target pc stored in next entry 389 targetMem.io.raddr(i) := (targetPtr + 1.U).value 390 jumpTargetReadVec(i) := targetMem.io.rdata(i) 391 val needNewestTarget = RegNext(targetPtr === io.frontend.fromFtq.newest_entry_ptr) 392 jumpTargetVec(i) := Mux( 393 needNewestTarget, 394 RegNext(newestTarget), 395 jumpTargetReadVec(i) 396 ) 397 } 398 399 rob.io.hartId := io.fromTop.hartId 400 rob.io.redirect <> s1_s3_redirect 401 rob.io.writeback := io.fromWB.wbData // Todo 402 403 io.redirect <> s1_s3_redirect 404 405 // rob to int block 406 io.robio.csr <> rob.io.csr 407 // When wfi is disabled, it will not block ROB commit. 408 rob.io.csr.wfiEvent := io.robio.csr.wfiEvent 409 rob.io.wfi_enable := decode.io.csrCtrl.wfi_enable 410 411 io.toTop.cpuHalt := DelayN(rob.io.cpu_halt, 5) 412 413 io.robio.csr.perfinfo.retiredInstr <> RegNext(rob.io.csr.perfinfo.retiredInstr) 414 io.robio.exception := rob.io.exception 415 io.robio.exception.bits.pc := s1_robFlushPc 416 417// io.robio.csr.vcsrFlag := RegNext(rob.io.commits.isCommit && Cat(isCommitWriteVconfigVec).orR) 418 419 // rob to mem block 420 io.robio.lsq <> rob.io.lsq 421 422 io.debug_int_rat := rat.io.debug_int_rat 423 io.debug_fp_rat := rat.io.debug_fp_rat 424 io.debug_vec_rat := rat.io.debug_vec_rat 425 io.debug_vconfig_rat := rat.io.debug_vconfig_rat 426 427 io.perfInfo.ctrlInfo.robFull := RegNext(rob.io.robFull) 428 io.perfInfo.ctrlInfo.intdqFull := RegNext(intDq.io.dqFull) 429 io.perfInfo.ctrlInfo.fpdqFull := RegNext(fpDq.io.dqFull) 430 io.perfInfo.ctrlInfo.lsdqFull := RegNext(lsDq.io.dqFull) 431 432 val pfevent = Module(new PFEvent) 433 pfevent.io.distribute_csr := RegNext(io.csrCtrl.distribute_csr) 434 val csrevents = pfevent.io.hpmevent.slice(8,16) 435 436 val perfinfo = IO(new Bundle(){ 437 val perfEventsRs = Input(Vec(params.IqCnt, new PerfEvent)) 438 val perfEventsEu0 = Input(Vec(6, new PerfEvent)) 439 val perfEventsEu1 = Input(Vec(6, new PerfEvent)) 440 }) 441 442 val allPerfEvents = Seq(decode, rename, dispatch, intDq, fpDq, lsDq, rob).flatMap(_.getPerf) 443 val hpmEvents = allPerfEvents ++ perfinfo.perfEventsEu0 ++ perfinfo.perfEventsEu1 ++ perfinfo.perfEventsRs 444 val perfEvents = HPerfMonitor(csrevents, hpmEvents).getPerfEvents 445 generatePerfEvent() 446} 447 448class CtrlBlockIO()(implicit p: Parameters, params: BackendParams) extends XSBundle { 449 val fromTop = new Bundle { 450 val hartId = Input(UInt(8.W)) 451 } 452 val toTop = new Bundle { 453 val cpuHalt = Output(Bool()) 454 } 455 val frontend = Flipped(new FrontendToCtrlIO()) 456 val toIssueBlock = new Bundle { 457 val allocPregs = Vec(RenameWidth, Output(new ResetPregStateReq)) 458 val intUops = Vec(dpParams.IntDqDeqWidth, DecoupledIO(new DynInst)) 459 val vfUops = Vec(dpParams.FpDqDeqWidth, DecoupledIO(new DynInst)) 460 val memUops = Vec(dpParams.LsDqDeqWidth, DecoupledIO(new DynInst)) 461 val pcVec = Output(Vec(params.numPcReadPort, UInt(VAddrData().dataWidth.W))) 462 val targetVec = Output(Vec(params.numPcReadPort, UInt(VAddrData().dataWidth.W))) 463 } 464 val fromWB = new Bundle { 465 val wbData = Flipped(MixedVec(params.genWrite2CtrlBundles)) 466 } 467 val redirect = ValidIO(new Redirect) 468 val fromMem = new Bundle { 469 val stIn = Vec(params.StuCnt, Flipped(ValidIO(new DynInst))) // use storeSetHit, ssid, robIdx 470 val violation = Flipped(ValidIO(new Redirect)) 471 } 472 val csrCtrl = Input(new CustomCSRCtrlIO) 473 val robio = new Bundle { 474 val csr = new RobCSRIO 475 val exception = ValidIO(new ExceptionInfo) 476 val lsq = new RobLsqIO 477 } 478 479 val perfInfo = Output(new Bundle{ 480 val ctrlInfo = new Bundle { 481 val robFull = Bool() 482 val intdqFull = Bool() 483 val fpdqFull = Bool() 484 val lsdqFull = Bool() 485 } 486 }) 487 val debug_int_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) 488 val debug_fp_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) 489 val debug_vec_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) // TODO: use me 490 val debug_vconfig_rat = Output(UInt(PhyRegIdxWidth.W)) // TODO: use me 491 492} 493