1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* 4* XiangShan is licensed under Mulan PSL v2. 5* You can use this software according to the terms and conditions of the Mulan PSL v2. 6* You may obtain a copy of Mulan PSL v2 at: 7* http://license.coscl.org.cn/MulanPSL2 8* 9* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 10* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 11* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 12* 13* See the Mulan PSL v2 for more details. 14***************************************************************************************/ 15 16package xiangshan.backend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import utils._ 22import xiangshan._ 23import xiangshan.backend.decode.{DecodeStage, ImmUnion} 24import xiangshan.backend.rename.{BusyTable, Rename} 25import xiangshan.backend.dispatch.Dispatch 26import xiangshan.backend.exu._ 27import xiangshan.frontend.{FtqRead, FtqToCtrlIO, FtqPtr, CfiInfoToCtrl} 28import xiangshan.backend.roq.{Roq, RoqCSRIO, RoqLsqIO, RoqPtr} 29import xiangshan.mem.LsqEnqIO 30 31class CtrlToFtqIO(implicit p: Parameters) extends XSBundle { 32 val roq_commits = Vec(CommitWidth, Valid(new RoqCommitInfo)) 33 val stage2Redirect = Valid(new Redirect) 34 val roqFlush = Valid(new Bundle { 35 val ftqIdx = Output(new FtqPtr) 36 val ftqOffset = Output(UInt(log2Up(PredictWidth).W)) 37 }) 38 39 val exuWriteback = Vec(exuParameters.JmpCnt + exuParameters.AluCnt, Valid(new ExuOutput)) 40 val loadReplay = Valid(new Redirect) 41 val stage3Redirect = ValidIO(new Redirect) 42} 43 44class RedirectGenerator(implicit p: Parameters) extends XSModule 45 with HasCircularQueuePtrHelper { 46 val numRedirect = exuParameters.JmpCnt + exuParameters.AluCnt 47 val io = IO(new Bundle() { 48 val exuMispredict = Vec(numRedirect, Flipped(ValidIO(new ExuOutput))) 49 val loadReplay = Flipped(ValidIO(new Redirect)) 50 val flush = Input(Bool()) 51 val stage1PcRead = Vec(numRedirect+1, new FtqRead(UInt(VAddrBits.W))) 52 val stage1CfiRead = Vec(numRedirect+1, new FtqRead(new CfiInfoToCtrl)) 53 val stage2Redirect = ValidIO(new Redirect) 54 val stage3Redirect = ValidIO(new Redirect) 55 val memPredUpdate = Output(new MemPredUpdateReq) 56 val memPredPcRead = new FtqRead(UInt(VAddrBits.W)) // read req send form stage 2 57 }) 58 /* 59 LoadQueue Jump ALU0 ALU1 ALU2 ALU3 exception Stage1 60 | | | | | | | 61 |============= reg & compare =====| | ======== 62 | | 63 | | 64 | | Stage2 65 | | 66 redirect (flush backend) | 67 | | 68 === reg === | ======== 69 | | 70 |----- mux (exception first) -----| Stage3 71 | 72 redirect (send to frontend) 73 */ 74 private class Wrapper(val n: Int) extends Bundle { 75 val redirect = new Redirect 76 val valid = Bool() 77 val idx = UInt(log2Up(n).W) 78 } 79 def selectOldestRedirect(xs: Seq[Valid[Redirect]]): Vec[Bool] = { 80 val compareVec = (0 until xs.length).map(i => (0 until i).map(j => isAfter(xs(j).bits.roqIdx, xs(i).bits.roqIdx))) 81 val resultOnehot = VecInit((0 until xs.length).map(i => Cat((0 until xs.length).map(j => 82 (if (j < i) !xs(j).valid || compareVec(i)(j) 83 else if (j == i) xs(i).valid 84 else !xs(j).valid || !compareVec(j)(i)) 85 )).andR)) 86 resultOnehot 87 } 88 89 val redirects = io.exuMispredict.map(_.bits.redirect) :+ io.loadReplay.bits 90 val stage1FtqReadPcs = 91 (io.stage1PcRead zip redirects).map{ case (r, redirect) => 92 r(redirect.ftqIdx, redirect.ftqOffset) 93 } 94 val stage1FtqReadCfis = 95 (io.stage1CfiRead zip redirects).map{ case (r, redirect) => 96 r(redirect.ftqIdx, redirect.ftqOffset) 97 } 98 99 def getRedirect(exuOut: Valid[ExuOutput]): ValidIO[Redirect] = { 100 val redirect = Wire(Valid(new Redirect)) 101 redirect.valid := exuOut.valid && exuOut.bits.redirect.cfiUpdate.isMisPred 102 redirect.bits := exuOut.bits.redirect 103 redirect 104 } 105 106 val jumpOut = io.exuMispredict.head 107 val allRedirect = VecInit(io.exuMispredict.map(x => getRedirect(x)) :+ io.loadReplay) 108 val oldestOneHot = selectOldestRedirect(allRedirect) 109 val needFlushVec = VecInit(allRedirect.map(_.bits.roqIdx.needFlush(io.stage2Redirect, io.flush))) 110 val oldestValid = VecInit(oldestOneHot.zip(needFlushVec).map{ case (v, f) => v && !f }).asUInt.orR 111 val oldestExuOutput = Mux1H(io.exuMispredict.indices.map(oldestOneHot), io.exuMispredict) 112 val oldestRedirect = Mux1H(oldestOneHot, allRedirect) 113 114 val s1_jumpTarget = RegEnable(jumpOut.bits.redirect.cfiUpdate.target, jumpOut.valid) 115 val s1_imm12_reg = RegNext(oldestExuOutput.bits.uop.ctrl.imm(11, 0)) 116 val s1_pd = RegNext(oldestExuOutput.bits.uop.cf.pd) 117 val s1_redirect_bits_reg = RegNext(oldestRedirect.bits) 118 val s1_redirect_valid_reg = RegNext(oldestValid) 119 val s1_redirect_onehot = RegNext(oldestOneHot) 120 121 // stage1 -> stage2 122 io.stage2Redirect.valid := s1_redirect_valid_reg && !io.flush 123 io.stage2Redirect.bits := s1_redirect_bits_reg 124 io.stage2Redirect.bits.cfiUpdate := DontCare 125 126 val s1_isReplay = s1_redirect_onehot.last 127 val s1_isJump = s1_redirect_onehot.head 128 val cfiRead = Mux1H(s1_redirect_onehot, stage1FtqReadCfis) 129 val real_pc = Mux1H(s1_redirect_onehot, stage1FtqReadPcs) 130 val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s1_imm12_reg), XLEN) 131 val snpc = real_pc + Mux(s1_pd.isRVC, 2.U, 4.U) 132 val target = Mux(s1_isReplay, 133 real_pc, // repaly from itself 134 Mux(s1_redirect_bits_reg.cfiUpdate.taken, 135 Mux(s1_isJump, s1_jumpTarget, brTarget), 136 snpc 137 ) 138 ) 139 140 // get pc from ftq 141 // valid only if redirect is caused by load violation 142 // store_pc is used to update store set 143 val store_pc = io.memPredPcRead(s1_redirect_bits_reg.stFtqIdx, s1_redirect_bits_reg.stFtqOffset) 144 145 // update load violation predictor if load violation redirect triggered 146 io.memPredUpdate.valid := RegNext(s1_isReplay && s1_redirect_valid_reg, init = false.B) 147 // update wait table 148 io.memPredUpdate.waddr := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth)) 149 io.memPredUpdate.wdata := true.B 150 // update store set 151 io.memPredUpdate.ldpc := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth)) 152 // store pc is ready 1 cycle after s1_isReplay is judged 153 io.memPredUpdate.stpc := XORFold(store_pc(VAddrBits-1, 1), MemPredPCWidth) 154 155 val s2_br_mask = RegEnable(cfiRead.br_mask, enable = s1_redirect_valid_reg) 156 val s2_sawNotTakenBranch = RegEnable(VecInit((0 until PredictWidth).map{ i => 157 if(i == 0) false.B else Cat(cfiRead.br_mask.take(i)).orR() 158 })(s1_redirect_bits_reg.ftqOffset), enable = s1_redirect_valid_reg) 159 val s2_hist = RegEnable(cfiRead.hist, enable = s1_redirect_valid_reg) 160 val s2_target = RegEnable(target, enable = s1_redirect_valid_reg) 161 val s2_pd = RegEnable(s1_pd, enable = s1_redirect_valid_reg) 162 val s2_pc = RegEnable(real_pc, enable = s1_redirect_valid_reg) 163 val s2_redirect_bits_reg = RegEnable(s1_redirect_bits_reg, enable = s1_redirect_valid_reg) 164 val s2_redirect_valid_reg = RegNext(s1_redirect_valid_reg && !io.flush, init = false.B) 165 166 io.stage3Redirect.valid := s2_redirect_valid_reg 167 io.stage3Redirect.bits := s2_redirect_bits_reg 168 val stage3CfiUpdate = io.stage3Redirect.bits.cfiUpdate 169 stage3CfiUpdate.pc := s2_pc 170 stage3CfiUpdate.pd := s2_pd 171 // stage3CfiUpdate.rasSp := s2_ftqRead.rasSp 172 // stage3CfiUpdate.rasEntry := s2_ftqRead.rasTop 173 // stage3CfiUpdate.predHist := s2_ftqRead.predHist 174 // stage3CfiUpdate.specCnt := s2_ftqRead.specCnt 175 stage3CfiUpdate.hist := s2_hist 176 stage3CfiUpdate.predTaken := s2_redirect_bits_reg.cfiUpdate.predTaken 177 stage3CfiUpdate.sawNotTakenBranch := s2_sawNotTakenBranch 178 stage3CfiUpdate.target := s2_target 179 stage3CfiUpdate.taken := s2_redirect_bits_reg.cfiUpdate.taken 180 stage3CfiUpdate.isMisPred := s2_redirect_bits_reg.cfiUpdate.isMisPred 181} 182 183class CtrlBlock(implicit p: Parameters) extends XSModule 184 with HasCircularQueuePtrHelper { 185 val io = IO(new Bundle { 186 val frontend = Flipped(new FrontendToBackendIO) 187 val enqIQ = Vec(exuParameters.CriticalExuCnt, DecoupledIO(new MicroOp)) 188 // from int block 189 val exuRedirect = Vec(exuParameters.AluCnt + exuParameters.JmpCnt, Flipped(ValidIO(new ExuOutput))) 190 val stIn = Vec(exuParameters.StuCnt, Flipped(ValidIO(new ExuInput))) 191 val stOut = Vec(exuParameters.StuCnt, Flipped(ValidIO(new ExuOutput))) 192 val memoryViolation = Flipped(ValidIO(new Redirect)) 193 val enqLsq = Flipped(new LsqEnqIO) 194 val jumpPc = Output(UInt(VAddrBits.W)) 195 val jalr_target = Output(UInt(VAddrBits.W)) 196 val roqio = new Bundle { 197 // to int block 198 val toCSR = new RoqCSRIO 199 val exception = ValidIO(new ExceptionInfo) 200 // to mem block 201 val lsq = new RoqLsqIO 202 } 203 val csrCtrl = Input(new CustomCSRCtrlIO) 204 val perfInfo = Output(new Bundle{ 205 val ctrlInfo = new Bundle { 206 val roqFull = Input(Bool()) 207 val intdqFull = Input(Bool()) 208 val fpdqFull = Input(Bool()) 209 val lsdqFull = Input(Bool()) 210 } 211 }) 212 val writeback = Vec(NRIntWritePorts + NRFpWritePorts, Flipped(ValidIO(new ExuOutput))) 213 // redirect out 214 val redirect = ValidIO(new Redirect) 215 val flush = Output(Bool()) 216 val readIntRf = Vec(NRIntReadPorts, Output(UInt(PhyRegIdxWidth.W))) 217 val readFpRf = Vec(NRFpReadPorts, Output(UInt(PhyRegIdxWidth.W))) 218 val debug_int_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) 219 val debug_fp_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W))) 220 }) 221 222 val decode = Module(new DecodeStage) 223 val rename = Module(new Rename) 224 val dispatch = Module(new Dispatch) 225 val intBusyTable = Module(new BusyTable(NRIntReadPorts, NRIntWritePorts)) 226 val fpBusyTable = Module(new BusyTable(NRFpReadPorts, NRFpWritePorts)) 227 val redirectGen = Module(new RedirectGenerator) 228 229 val roqWbSize = NRIntWritePorts + NRFpWritePorts + exuParameters.StuCnt 230 val roq = Module(new Roq(roqWbSize)) 231 232 val stage2Redirect = redirectGen.io.stage2Redirect 233 val stage3Redirect = redirectGen.io.stage3Redirect 234 val flush = roq.io.flushOut.valid 235 val flushReg = RegNext(flush) 236 237 val exuRedirect = io.exuRedirect.map(x => { 238 val valid = x.valid && x.bits.redirectValid 239 val killedByOlder = x.bits.uop.roqIdx.needFlush(stage2Redirect, flushReg) 240 val delayed = Wire(Valid(new ExuOutput)) 241 delayed.valid := RegNext(valid && !killedByOlder, init = false.B) 242 delayed.bits := RegEnable(x.bits, x.valid) 243 delayed 244 }) 245 val loadReplay = Wire(Valid(new Redirect)) 246 loadReplay.valid := RegNext(io.memoryViolation.valid && 247 !io.memoryViolation.bits.roqIdx.needFlush(backendRedirect, flushReg), 248 init = false.B 249 ) 250 loadReplay.bits := RegEnable(io.memoryViolation.bits, io.memoryViolation.valid) 251 VecInit(ftq.io.ftqRead.tail.dropRight(2)) <> redirectGen.io.stage1FtqRead 252 io.frontend.fromFtq.getRedirectPcRead <> redirectGen.io.stage1PcRead 253 io.frontend.fromFtq.getMemPredPcRead <> redirectGen.io.memPredPcRead 254 io.frontend.fromFtq.cfi_reads <> redirectGen.io.stage1CfiRead 255 redirectGen.io.exuMispredict <> exuRedirect 256 redirectGen.io.loadReplay <> loadReplay 257 redirectGen.io.flush := flushReg 258 259 for(i <- 0 until CommitWidth){ 260 io.frontend.toFtq.roq_commits(i).valid := roq.io.commits.valid(i) && !roq.io.commits.isWalk 261 io.frontend.toFtq.roq_commits(i).bits := roq.io.commits.info(i) 262 } 263 io.frontend.toFtq.stage2Redirect <> stage2Redirect 264 io.frontend.toFtq.roqFlush <> RegNext(roq.io.flushOut) 265 io.frontend.toFtq.stage3Redirect <> stage3Redirect 266 io.frontend.toFtq.exuWriteback <> exuRedirect 267 io.frontend.toFtq.loadReplay <> loadReplay 268 269 val roqPcRead = io.frontend.fromFtq.getRoqFlushPcRead 270 val flushPC = roqPcRead(roq.io.flushOut.bits.ftqIdx, roq.io.flushOut.bits.ftqOffset) 271 272 val flushRedirect = Wire(Valid(new Redirect)) 273 flushRedirect.valid := flushReg 274 flushRedirect.bits := DontCare 275 flushRedirect.bits.ftqIdx := RegEnable(roq.io.flushOut.bits.ftqIdx, flush) 276 flushRedirect.bits.interrupt := true.B 277 flushRedirect.bits.cfiUpdate.target := Mux(io.roqio.toCSR.isXRet || roq.io.exception.valid, 278 io.roqio.toCSR.trapTarget, 279 flushPC + 4.U // flush pipe 280 ) 281 val flushRedirectReg = Wire(Valid(new Redirect)) 282 flushRedirectReg.valid := RegNext(flushRedirect.valid, init = false.B) 283 flushRedirectReg.bits := RegEnable(flushRedirect.bits, enable = flushRedirect.valid) 284 285 io.frontend.redirect_cfiUpdate := Mux(flushRedirectReg.valid, flushRedirectReg, stage3Redirect) 286 287 decode.io.in <> io.frontend.cfVec 288 // currently, we only update wait table when isReplay 289 decode.io.memPredUpdate(0) <> RegNext(redirectGen.io.memPredUpdate) 290 decode.io.memPredUpdate(1) := DontCare 291 decode.io.memPredUpdate(1).valid := false.B 292 // decode.io.memPredUpdate <> io.toLsBlock.memPredUpdate 293 decode.io.csrCtrl := RegNext(io.csrCtrl) 294 295 296 val jumpInst = dispatch.io.enqIQCtrl(0).bits 297 val jumpPcRead = io.frontend.fromFtq.getJumpPcRead 298 io.jumpPc := jumpPcRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset) 299 val jumpTargetRead = io.frontend.fromFtq.target_read 300 io.jalr_target := jumpTargetRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset) 301 302 // pipeline between decode and dispatch 303 for (i <- 0 until RenameWidth) { 304 PipelineConnect(decode.io.out(i), rename.io.in(i), rename.io.in(i).ready, 305 flushReg || io.frontend.redirect_cfiUpdate.valid) 306 } 307 308 rename.io.redirect <> stage2Redirect 309 rename.io.flush := flushReg 310 rename.io.roqCommits <> roq.io.commits 311 rename.io.out <> dispatch.io.fromRename 312 rename.io.renameBypass <> dispatch.io.renameBypass 313 rename.io.dispatchInfo <> dispatch.io.preDpInfo 314 rename.io.csrCtrl <> RegNext(io.csrCtrl) 315 316 dispatch.io.redirect <> stage2Redirect 317 dispatch.io.flush := flushReg 318 dispatch.io.enqRoq <> roq.io.enq 319 dispatch.io.enqLsq <> io.enqLsq 320 dispatch.io.allocPregs.zipWithIndex.foreach { case (preg, i) => 321 intBusyTable.io.allocPregs(i).valid := preg.isInt 322 fpBusyTable.io.allocPregs(i).valid := preg.isFp 323 intBusyTable.io.allocPregs(i).bits := preg.preg 324 fpBusyTable.io.allocPregs(i).bits := preg.preg 325 } 326 dispatch.io.enqIQCtrl := DontCare 327 io.enqIQ <> dispatch.io.enqIQCtrl 328 dispatch.io.csrCtrl <> io.csrCtrl 329 dispatch.io.storeIssue <> io.stIn 330 dispatch.io.readIntRf <> io.readIntRf 331 dispatch.io.readFpRf <> io.readFpRf 332 333 fpBusyTable.io.flush := flushReg 334 intBusyTable.io.flush := flushReg 335 for((wb, setPhyRegRdy) <- io.writeback.take(NRIntWritePorts).zip(intBusyTable.io.wbPregs)){ 336 setPhyRegRdy.valid := wb.valid && wb.bits.uop.ctrl.rfWen 337 setPhyRegRdy.bits := wb.bits.uop.pdest 338 } 339 for((wb, setPhyRegRdy) <- io.writeback.drop(NRIntWritePorts).zip(fpBusyTable.io.wbPregs)){ 340 setPhyRegRdy.valid := wb.valid && wb.bits.uop.ctrl.fpWen 341 setPhyRegRdy.bits := wb.bits.uop.pdest 342 } 343 intBusyTable.io.read <> dispatch.io.readIntState 344 fpBusyTable.io.read <> dispatch.io.readFpState 345 346 roq.io.redirect <> stage2Redirect 347 val exeWbResults = VecInit(io.writeback ++ io.stOut) 348 for((roq_wb, wb) <- roq.io.exeWbResults.zip(exeWbResults)) { 349 roq_wb.valid := RegNext(wb.valid && !wb.bits.uop.roqIdx.needFlush(stage2Redirect, flushReg)) 350 roq_wb.bits := RegNext(wb.bits) 351 } 352 353 // TODO: is 'backendRedirect' necesscary? 354 io.redirect <> backendRedirect 355 io.flush <> flushReg 356 io.debug_int_rat <> rename.io.debug_int_rat 357 io.debug_fp_rat <> rename.io.debug_fp_rat 358 359// dispatch.io.readPortIndex.intIndex <> io.toIntBlock.readPortIndex 360// dispatch.io.readPortIndex.fpIndex <> io.toFpBlock.readPortIndex 361 362 // roq to int block 363 io.roqio.toCSR <> roq.io.csr 364 io.roqio.toCSR.perfinfo.retiredInstr <> RegNext(roq.io.csr.perfinfo.retiredInstr) 365 io.roqio.exception := roq.io.exception 366 io.roqio.exception.bits.uop.cf.pc := flushPC 367 // roq to mem block 368 io.roqio.lsq <> roq.io.lsq 369 370 io.perfInfo.ctrlInfo.roqFull := RegNext(roq.io.roqFull) 371 io.perfInfo.ctrlInfo.intdqFull := RegNext(dispatch.io.ctrlInfo.intdqFull) 372 io.perfInfo.ctrlInfo.fpdqFull := RegNext(dispatch.io.ctrlInfo.fpdqFull) 373 io.perfInfo.ctrlInfo.lsdqFull := RegNext(dispatch.io.ctrlInfo.lsdqFull) 374} 375