1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* 4* XiangShan is licensed under Mulan PSL v2. 5* You can use this software according to the terms and conditions of the Mulan PSL v2. 6* You may obtain a copy of Mulan PSL v2 at: 7* http://license.coscl.org.cn/MulanPSL2 8* 9* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 10* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 11* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 12* 13* See the Mulan PSL v2 for more details. 14***************************************************************************************/ 15 16package xiangshan.backend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import utils._ 22import xiangshan._ 23import xiangshan.backend.decode.{DecodeStage, ImmUnion} 24import xiangshan.backend.rename.{BusyTable, Rename} 25import xiangshan.backend.dispatch.Dispatch 26import xiangshan.backend.exu._ 27import xiangshan.frontend.{FtqRead, FtqToCtrlIO, FtqPtr, CfiInfoToCtrl} 28import xiangshan.backend.roq.{Roq, RoqCSRIO, RoqLsqIO, RoqPtr} 29import xiangshan.mem.LsqEnqIO 30 31 32class CtrlToIntBlockIO(implicit p: Parameters) extends XSBundle { 33 val enqIqCtrl = Vec(exuParameters.IntExuCnt, DecoupledIO(new MicroOp)) 34 val readRf = Vec(NRIntReadPorts, Output(UInt(PhyRegIdxWidth.W))) 35 val jumpPc = Output(UInt(VAddrBits.W)) 36 val jalr_target = Output(UInt(VAddrBits.W)) 37 // int block only uses port 0~7 38 val readPortIndex = Vec(exuParameters.IntExuCnt, Output(UInt(log2Ceil(8 / 2).W))) // TODO parameterize 8 here 39 val redirect = ValidIO(new Redirect) 40 val flush = Output(Bool()) 41 val debug_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) 42} 43 44class CtrlToFpBlockIO(implicit p: Parameters) extends XSBundle { 45 val enqIqCtrl = Vec(exuParameters.FpExuCnt, DecoupledIO(new MicroOp)) 46 val readRf = Vec(NRFpReadPorts, Output(UInt(PhyRegIdxWidth.W))) 47 // fp block uses port 0~11 48 val readPortIndex = Vec(exuParameters.FpExuCnt, Output(UInt(log2Ceil((NRFpReadPorts - exuParameters.StuCnt) / 3).W))) 49 val redirect = ValidIO(new Redirect) 50 val flush = Output(Bool()) 51 val debug_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) 52} 53 54class CtrlToLsBlockIO(implicit p: Parameters) extends XSBundle { 55 val enqIqCtrl = Vec(exuParameters.LsExuCnt, DecoupledIO(new MicroOp)) 56 val enqLsq = Flipped(new LsqEnqIO) 57 val memPredUpdate = Vec(StorePipelineWidth, Input(new MemPredUpdateReq)) 58 val redirect = ValidIO(new Redirect) 59 val flush = Output(Bool()) 60} 61 62class CtrlToFtqIO(implicit p: Parameters) extends XSBundle { 63 val roq_commits = Vec(CommitWidth, Valid(new RoqCommitInfo)) 64 val stage2Redirect = Valid(new Redirect) 65 val roqFlush = Valid(new Bundle { 66 val ftqIdx = Output(new FtqPtr) 67 val ftqOffset = Output(UInt(log2Up(PredictWidth).W)) 68 }) 69 70 val exuWriteback = Vec(exuParameters.JmpCnt + exuParameters.AluCnt, Valid(new ExuOutput)) 71 val loadReplay = Valid(new Redirect) 72 val stage3Redirect = ValidIO(new Redirect) 73} 74 75class RedirectGenerator(implicit p: Parameters) extends XSModule 76 with HasCircularQueuePtrHelper { 77 val numRedirect = exuParameters.JmpCnt + exuParameters.AluCnt 78 val io = IO(new Bundle() { 79 val exuMispredict = Vec(numRedirect, Flipped(ValidIO(new ExuOutput))) 80 val loadReplay = Flipped(ValidIO(new Redirect)) 81 val flush = Input(Bool()) 82 val stage1PcRead = Vec(numRedirect+1, new FtqRead(UInt(VAddrBits.W))) 83 val stage1CfiRead = Vec(numRedirect+1, new FtqRead(new CfiInfoToCtrl)) 84 val stage2Redirect = ValidIO(new Redirect) 85 val stage3Redirect = ValidIO(new Redirect) 86 val memPredUpdate = Output(new MemPredUpdateReq) 87 val memPredPcRead = new FtqRead(UInt(VAddrBits.W)) // read req send form stage 2 88 }) 89 /* 90 LoadQueue Jump ALU0 ALU1 ALU2 ALU3 exception Stage1 91 | | | | | | | 92 |============= reg & compare =====| | ======== 93 | | 94 | | 95 | | Stage2 96 | | 97 redirect (flush backend) | 98 | | 99 === reg === | ======== 100 | | 101 |----- mux (exception first) -----| Stage3 102 | 103 redirect (send to frontend) 104 */ 105 private class Wrapper(val n: Int) extends Bundle { 106 val redirect = new Redirect 107 val valid = Bool() 108 val idx = UInt(log2Up(n).W) 109 } 110 def selectOldestRedirect(xs: Seq[Valid[Redirect]]): Vec[Bool] = { 111 val compareVec = (0 until xs.length).map(i => (0 until i).map(j => isAfter(xs(j).bits.roqIdx, xs(i).bits.roqIdx))) 112 val resultOnehot = VecInit((0 until xs.length).map(i => Cat((0 until xs.length).map(j => 113 (if (j < i) !xs(j).valid || compareVec(i)(j) 114 else if (j == i) xs(i).valid 115 else !xs(j).valid || !compareVec(j)(i)) 116 )).andR)) 117 resultOnehot 118 } 119 120 val redirects = io.exuMispredict.map(_.bits.redirect) :+ io.loadReplay.bits 121 val stage1FtqReadPcs = 122 (io.stage1PcRead zip redirects).map{ case (r: FtqRead[UInt], redirect: Redirect) => 123 r(redirect.ftqIdx, redirect.ftqOffset) 124 } 125 val stage1FtqReadCfis = 126 (io.stage1CfiRead zip redirects).map{ case (r: FtqRead[CfiInfoToCtrl], redirect: Redirect) => 127 r(redirect.ftqIdx, redirect.ftqOffset) 128 } 129 130 def getRedirect(exuOut: Valid[ExuOutput]): ValidIO[Redirect] = { 131 val redirect = Wire(Valid(new Redirect)) 132 redirect.valid := exuOut.valid && exuOut.bits.redirect.cfiUpdate.isMisPred 133 redirect.bits := exuOut.bits.redirect 134 redirect 135 } 136 137 val jumpOut = io.exuMispredict.head 138 val allRedirect = VecInit(io.exuMispredict.map(x => getRedirect(x)) :+ io.loadReplay) 139 val oldestOneHot = selectOldestRedirect(allRedirect) 140 val needFlushVec = VecInit(allRedirect.map(_.bits.roqIdx.needFlush(io.stage2Redirect, io.flush))) 141 val oldestValid = VecInit(oldestOneHot.zip(needFlushVec).map{ case (v, f) => v && !f }).asUInt.orR 142 val oldestExuOutput = Mux1H((0 until 5).map(oldestOneHot), io.exuMispredict) 143 val oldestRedirect = Mux1H(oldestOneHot, allRedirect) 144 145 val s1_jumpTarget = RegEnable(jumpOut.bits.redirect.cfiUpdate.target, jumpOut.valid) 146 val s1_imm12_reg = RegNext(oldestExuOutput.bits.uop.ctrl.imm(11, 0)) 147 val s1_pd = RegNext(oldestExuOutput.bits.uop.cf.pd) 148 val s1_redirect_bits_reg = RegNext(oldestRedirect.bits) 149 val s1_redirect_valid_reg = RegNext(oldestValid) 150 val s1_redirect_onehot = RegNext(oldestOneHot) 151 152 // stage1 -> stage2 153 io.stage2Redirect.valid := s1_redirect_valid_reg && !io.flush 154 io.stage2Redirect.bits := s1_redirect_bits_reg 155 io.stage2Redirect.bits.cfiUpdate := DontCare 156 157 val s1_isReplay = s1_redirect_onehot(5) 158 val s1_isJump = s1_redirect_onehot(0) 159 val cfiRead = Mux1H(s1_redirect_onehot, stage1FtqReadCfis) 160 val real_pc = Mux1H(s1_redirect_onehot, stage1FtqReadPcs) 161 val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s1_imm12_reg), XLEN) 162 val snpc = real_pc + Mux(s1_pd.isRVC, 2.U, 4.U) 163 val target = Mux(s1_isReplay, 164 real_pc, // repaly from itself 165 Mux(s1_redirect_bits_reg.cfiUpdate.taken, 166 Mux(s1_isJump, s1_jumpTarget, brTarget), 167 snpc 168 ) 169 ) 170 171 // get pc from ftq 172 // valid only if redirect is caused by load violation 173 // store_pc is used to update store set 174 val store_pc = io.memPredPcRead(s1_redirect_bits_reg.stFtqIdx, s1_redirect_bits_reg.stFtqOffset) 175 176 // update load violation predictor if load violation redirect triggered 177 io.memPredUpdate.valid := RegNext(s1_isReplay && s1_redirect_valid_reg, init = false.B) 178 // update wait table 179 io.memPredUpdate.waddr := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth)) 180 io.memPredUpdate.wdata := true.B 181 // update store set 182 io.memPredUpdate.ldpc := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth)) 183 // store pc is ready 1 cycle after s1_isReplay is judged 184 io.memPredUpdate.stpc := XORFold(store_pc(VAddrBits-1, 1), MemPredPCWidth) 185 186 val s2_br_mask = RegEnable(cfiRead.br_mask, enable = s1_redirect_valid_reg) 187 val s2_sawNotTakenBranch = RegEnable(VecInit((0 until PredictWidth).map{ i => 188 if(i == 0) false.B else Cat(cfiRead.br_mask.take(i)).orR() 189 })(s1_redirect_bits_reg.ftqOffset), enable = s1_redirect_valid_reg) 190 val s2_hist = RegEnable(cfiRead.hist, enable = s1_redirect_valid_reg) 191 val s2_target = RegEnable(target, enable = s1_redirect_valid_reg) 192 val s2_pd = RegEnable(s1_pd, enable = s1_redirect_valid_reg) 193 val s2_pc = RegEnable(real_pc, enable = s1_redirect_valid_reg) 194 val s2_redirect_bits_reg = RegEnable(s1_redirect_bits_reg, enable = s1_redirect_valid_reg) 195 val s2_redirect_valid_reg = RegNext(s1_redirect_valid_reg && !io.flush, init = false.B) 196 197 io.stage3Redirect.valid := s2_redirect_valid_reg 198 io.stage3Redirect.bits := s2_redirect_bits_reg 199 val stage3CfiUpdate = io.stage3Redirect.bits.cfiUpdate 200 stage3CfiUpdate.pc := s2_pc 201 stage3CfiUpdate.pd := s2_pd 202 // stage3CfiUpdate.rasSp := s2_ftqRead.rasSp 203 // stage3CfiUpdate.rasEntry := s2_ftqRead.rasTop 204 // stage3CfiUpdate.predHist := s2_ftqRead.predHist 205 // stage3CfiUpdate.specCnt := s2_ftqRead.specCnt 206 stage3CfiUpdate.hist := s2_hist 207 stage3CfiUpdate.predTaken := s2_redirect_bits_reg.cfiUpdate.predTaken 208 stage3CfiUpdate.sawNotTakenBranch := s2_sawNotTakenBranch 209 stage3CfiUpdate.target := s2_target 210 stage3CfiUpdate.taken := s2_redirect_bits_reg.cfiUpdate.taken 211 stage3CfiUpdate.isMisPred := s2_redirect_bits_reg.cfiUpdate.isMisPred 212} 213 214class CtrlBlock(implicit p: Parameters) extends XSModule 215 with HasCircularQueuePtrHelper { 216 val io = IO(new Bundle { 217 val frontend = Flipped(new FrontendToCtrlIO) 218 val fromIntBlock = Flipped(new IntBlockToCtrlIO) 219 val fromFpBlock = Flipped(new FpBlockToCtrlIO) 220 val fromLsBlock = Flipped(new LsBlockToCtrlIO) 221 val toIntBlock = new CtrlToIntBlockIO 222 val toFpBlock = new CtrlToFpBlockIO 223 val toLsBlock = new CtrlToLsBlockIO 224 val roqio = new Bundle { 225 // to int block 226 val toCSR = new RoqCSRIO 227 val exception = ValidIO(new ExceptionInfo) 228 // to mem block 229 val lsq = new RoqLsqIO 230 } 231 val csrCtrl = Input(new CustomCSRCtrlIO) 232 val perfInfo = Output(new Bundle{ 233 val ctrlInfo = new Bundle { 234 val roqFull = Input(Bool()) 235 val intdqFull = Input(Bool()) 236 val fpdqFull = Input(Bool()) 237 val lsdqFull = Input(Bool()) 238 } 239 }) 240 }) 241 242 val decode = Module(new DecodeStage) 243 val rename = Module(new Rename) 244 val dispatch = Module(new Dispatch) 245 val intBusyTable = Module(new BusyTable(NRIntReadPorts, NRIntWritePorts)) 246 val fpBusyTable = Module(new BusyTable(NRFpReadPorts, NRFpWritePorts)) 247 val redirectGen = Module(new RedirectGenerator) 248 249 val roqWbSize = NRIntWritePorts + NRFpWritePorts + exuParameters.StuCnt 250 val roq = Module(new Roq(roqWbSize)) 251 252 val stage2Redirect = redirectGen.io.stage2Redirect 253 val stage3Redirect = redirectGen.io.stage3Redirect 254 val flush = roq.io.flushOut.valid 255 val flushReg = RegNext(flush) 256 257 val exuRedirect = io.fromIntBlock.exuRedirect.map(x => { 258 val valid = x.valid && x.bits.redirectValid 259 val killedByOlder = x.bits.uop.roqIdx.needFlush(stage2Redirect, flushReg) 260 val delayed = Wire(Valid(new ExuOutput)) 261 delayed.valid := RegNext(valid && !killedByOlder, init = false.B) 262 delayed.bits := RegEnable(x.bits, x.valid) 263 delayed 264 }) 265 val loadReplay = Wire(Valid(new Redirect)) 266 loadReplay.valid := RegNext(io.fromLsBlock.replay.valid && 267 !io.fromLsBlock.replay.bits.roqIdx.needFlush(stage2Redirect, flushReg), 268 init = false.B 269 ) 270 loadReplay.bits := RegEnable(io.fromLsBlock.replay.bits, io.fromLsBlock.replay.valid) 271 io.frontend.fromFtq.getRedirectPcRead <> redirectGen.io.stage1PcRead 272 io.frontend.fromFtq.getMemPredPcRead <> redirectGen.io.memPredPcRead 273 io.frontend.fromFtq.cfi_reads <> redirectGen.io.stage1CfiRead 274 redirectGen.io.exuMispredict <> exuRedirect 275 redirectGen.io.loadReplay <> loadReplay 276 redirectGen.io.flush := flushReg 277 278 for(i <- 0 until CommitWidth){ 279 io.frontend.toFtq.roq_commits(i).valid := roq.io.commits.valid(i) && !roq.io.commits.isWalk 280 io.frontend.toFtq.roq_commits(i).bits := roq.io.commits.info(i) 281 } 282 io.frontend.toFtq.stage2Redirect <> stage2Redirect 283 io.frontend.toFtq.roqFlush <> RegNext(roq.io.flushOut) 284 io.frontend.toFtq.stage3Redirect <> stage3Redirect 285 io.frontend.toFtq.exuWriteback <> exuRedirect 286 io.frontend.toFtq.loadReplay <> loadReplay 287 288 val roqPcRead = io.frontend.fromFtq.getRoqFlushPcRead 289 val flushPC = roqPcRead(roq.io.flushOut.bits.ftqIdx, roq.io.flushOut.bits.ftqOffset) 290 291 val flushRedirect = Wire(Valid(new Redirect)) 292 flushRedirect.valid := flushReg 293 flushRedirect.bits := DontCare 294 flushRedirect.bits.ftqIdx := RegEnable(roq.io.flushOut.bits.ftqIdx, flush) 295 flushRedirect.bits.interrupt := true.B 296 flushRedirect.bits.cfiUpdate.target := Mux(io.roqio.toCSR.isXRet || roq.io.exception.valid, 297 io.roqio.toCSR.trapTarget, 298 flushPC + 4.U // flush pipe 299 ) 300 val flushRedirectReg = Wire(Valid(new Redirect)) 301 flushRedirectReg.valid := RegNext(flushRedirect.valid, init = false.B) 302 flushRedirectReg.bits := RegEnable(flushRedirect.bits, enable = flushRedirect.valid) 303 304 io.frontend.redirect_cfiUpdate := Mux(flushRedirectReg.valid, flushRedirectReg, stage3Redirect) 305 306 decode.io.in <> io.frontend.cfVec 307 // currently, we only update wait table when isReplay 308 decode.io.memPredUpdate(0) <> RegNext(redirectGen.io.memPredUpdate) 309 decode.io.memPredUpdate(1) := DontCare 310 decode.io.memPredUpdate(1).valid := false.B 311 // decode.io.memPredUpdate <> io.toLsBlock.memPredUpdate 312 decode.io.csrCtrl := RegNext(io.csrCtrl) 313 314 315 val jumpInst = dispatch.io.enqIQCtrl(0).bits 316 val jumpPcRead = io.frontend.fromFtq.getJumpPcRead 317 io.toIntBlock.jumpPc := jumpPcRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset) 318 val jumpTargetRead = io.frontend.fromFtq.target_read 319 io.toIntBlock.jalr_target := jumpTargetRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset) 320 321 // pipeline between decode and dispatch 322 for (i <- 0 until RenameWidth) { 323 PipelineConnect(decode.io.out(i), rename.io.in(i), rename.io.in(i).ready, 324 flushReg || io.frontend.redirect_cfiUpdate.valid) 325 } 326 327 rename.io.redirect <> stage2Redirect 328 rename.io.flush := flushReg 329 rename.io.roqCommits <> roq.io.commits 330 rename.io.out <> dispatch.io.fromRename 331 rename.io.renameBypass <> dispatch.io.renameBypass 332 rename.io.dispatchInfo <> dispatch.io.preDpInfo 333 rename.io.csrCtrl <> RegNext(io.csrCtrl) 334 335 dispatch.io.redirect <> stage2Redirect 336 dispatch.io.flush := flushReg 337 dispatch.io.enqRoq <> roq.io.enq 338 dispatch.io.enqLsq <> io.toLsBlock.enqLsq 339 dispatch.io.readIntRf <> io.toIntBlock.readRf 340 dispatch.io.readFpRf <> io.toFpBlock.readRf 341 dispatch.io.allocPregs.zipWithIndex.foreach { case (preg, i) => 342 intBusyTable.io.allocPregs(i).valid := preg.isInt 343 fpBusyTable.io.allocPregs(i).valid := preg.isFp 344 intBusyTable.io.allocPregs(i).bits := preg.preg 345 fpBusyTable.io.allocPregs(i).bits := preg.preg 346 } 347 dispatch.io.numExist <> io.fromIntBlock.numExist ++ io.fromFpBlock.numExist ++ io.fromLsBlock.numExist 348 dispatch.io.enqIQCtrl <> io.toIntBlock.enqIqCtrl ++ io.toFpBlock.enqIqCtrl ++ io.toLsBlock.enqIqCtrl 349// dispatch.io.enqIQData <> io.toIntBlock.enqIqData ++ io.toFpBlock.enqIqData ++ io.toLsBlock.enqIqData 350 dispatch.io.csrCtrl <> io.csrCtrl 351 dispatch.io.storeIssue <> io.fromLsBlock.stIn 352 353 354 fpBusyTable.io.flush := flushReg 355 intBusyTable.io.flush := flushReg 356 for((wb, setPhyRegRdy) <- io.fromIntBlock.wbRegs.zip(intBusyTable.io.wbPregs)){ 357 setPhyRegRdy.valid := wb.valid && wb.bits.uop.ctrl.rfWen 358 setPhyRegRdy.bits := wb.bits.uop.pdest 359 } 360 for((wb, setPhyRegRdy) <- io.fromFpBlock.wbRegs.zip(fpBusyTable.io.wbPregs)){ 361 setPhyRegRdy.valid := wb.valid && wb.bits.uop.ctrl.fpWen 362 setPhyRegRdy.bits := wb.bits.uop.pdest 363 } 364 intBusyTable.io.read <> dispatch.io.readIntState 365 fpBusyTable.io.read <> dispatch.io.readFpState 366 367 roq.io.redirect <> stage2Redirect 368 val exeWbResults = VecInit(io.fromIntBlock.wbRegs ++ io.fromFpBlock.wbRegs ++ io.fromLsBlock.stOut) 369 for((roq_wb, wb) <- roq.io.exeWbResults.zip(exeWbResults)) { 370 roq_wb.valid := RegNext(wb.valid && !wb.bits.uop.roqIdx.needFlush(stage2Redirect, flushReg)) 371 roq_wb.bits := RegNext(wb.bits) 372 } 373 374 // TODO: is 'stage2Redirect' necesscary? 375 io.toIntBlock.redirect <> stage2Redirect 376 io.toIntBlock.flush <> flushReg 377 io.toIntBlock.debug_rat <> rename.io.debug_int_rat 378 io.toFpBlock.redirect <> stage2Redirect 379 io.toFpBlock.flush <> flushReg 380 io.toFpBlock.debug_rat <> rename.io.debug_fp_rat 381 io.toLsBlock.redirect <> stage2Redirect 382 io.toLsBlock.flush <> flushReg 383 384 dispatch.io.readPortIndex.intIndex <> io.toIntBlock.readPortIndex 385 dispatch.io.readPortIndex.fpIndex <> io.toFpBlock.readPortIndex 386 387 // roq to int block 388 io.roqio.toCSR <> roq.io.csr 389 io.roqio.toCSR.perfinfo.retiredInstr <> RegNext(roq.io.csr.perfinfo.retiredInstr) 390 io.roqio.exception := roq.io.exception 391 io.roqio.exception.bits.uop.cf.pc := flushPC 392 // roq to mem block 393 io.roqio.lsq <> roq.io.lsq 394 395 io.perfInfo.ctrlInfo.roqFull := RegNext(roq.io.roqFull) 396 io.perfInfo.ctrlInfo.intdqFull := RegNext(dispatch.io.ctrlInfo.intdqFull) 397 io.perfInfo.ctrlInfo.fpdqFull := RegNext(dispatch.io.ctrlInfo.fpdqFull) 398 io.perfInfo.ctrlInfo.lsdqFull := RegNext(dispatch.io.ctrlInfo.lsdqFull) 399} 400