1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.backend.rename 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import xiangshan._ 23import utils._ 24import xiangshan.backend.rob.RobPtr 25import xiangshan.backend.dispatch.PreDispatchInfo 26 27class RenameBypassInfo(implicit p: Parameters) extends XSBundle { 28 val lsrc1_bypass = MixedVec(List.tabulate(RenameWidth-1)(i => UInt((i+1).W))) 29 val lsrc2_bypass = MixedVec(List.tabulate(RenameWidth-1)(i => UInt((i+1).W))) 30 val lsrc3_bypass = MixedVec(List.tabulate(RenameWidth-1)(i => UInt((i+1).W))) 31 val ldest_bypass = MixedVec(List.tabulate(RenameWidth-1)(i => UInt((i+1).W))) 32} 33 34class Rename(implicit p: Parameters) extends XSModule { 35 val io = IO(new Bundle() { 36 val redirect = Flipped(ValidIO(new Redirect)) 37 val robCommits = Flipped(new RobCommitIO) 38 // from decode 39 val in = Vec(RenameWidth, Flipped(DecoupledIO(new CfCtrl))) 40 // to rename table 41 val intReadPorts = Vec(RenameWidth, Vec(3, Input(UInt(PhyRegIdxWidth.W)))) 42 val fpReadPorts = Vec(RenameWidth, Vec(4, Input(UInt(PhyRegIdxWidth.W)))) 43 val intRenamePorts = Vec(RenameWidth, Output(new RatWritePort)) 44 val fpRenamePorts = Vec(RenameWidth, Output(new RatWritePort)) 45 // to dispatch1 46 val out = Vec(RenameWidth, DecoupledIO(new MicroOp)) 47 val renameBypass = Output(new RenameBypassInfo) 48 val dispatchInfo = Output(new PreDispatchInfo) 49 }) 50 51 // create free list and rat 52 val intFreeList = Module(new freelist.MEFreeList) 53 val fpFreeList = Module(new freelist.StdFreeList) 54 55 // decide if given instruction needs allocating a new physical register (CfCtrl: from decode; RobCommitInfo: from rob) 56 def needDestReg[T <: CfCtrl](fp: Boolean, x: T): Bool = { 57 {if(fp) x.ctrl.fpWen else x.ctrl.rfWen && (x.ctrl.ldest =/= 0.U)} 58 } 59 def needDestRegCommit[T <: RobCommitInfo](fp: Boolean, x: T): Bool = { 60 {if(fp) x.fpWen else x.rfWen && (x.ldest =/= 0.U)} 61 } 62 63 // connect [redirect + walk] ports for __float point__ & __integer__ free list 64 Seq((fpFreeList, true), (intFreeList, false)).foreach{ case (fl, isFp) => 65 fl.redirect := io.redirect.valid 66 fl.walk := io.robCommits.isWalk 67 // when isWalk, use stepBack to restore head pointer of free list 68 // (if ME enabled, stepBack of intFreeList should be useless thus optimized out) 69 fl.stepBack := PopCount(io.robCommits.valid.zip(io.robCommits.info).map{case (v, i) => v && needDestRegCommit(isFp, i)}) 70 } 71 // walk has higher priority than allocation and thus we don't use isWalk here 72 // only when both fp and int free list and dispatch1 has enough space can we do allocation 73 intFreeList.doAllocate := fpFreeList.canAllocate && io.out(0).ready 74 fpFreeList.doAllocate := intFreeList.canAllocate && io.out(0).ready 75 76 // dispatch1 ready ++ float point free list ready ++ int free list ready ++ not walk 77 val canOut = io.out(0).ready && fpFreeList.canAllocate && intFreeList.canAllocate && !io.robCommits.isWalk 78 79 80 // speculatively assign the instruction with an robIdx 81 val validCount = PopCount(io.in.map(_.valid)) // number of instructions waiting to enter rob (from decode) 82 val robIdxHead = RegInit(0.U.asTypeOf(new RobPtr)) 83 val lastCycleMisprediction = RegNext(io.redirect.valid && !io.redirect.bits.flushItself()) 84 val robIdxHeadNext = Mux(io.redirect.valid, io.redirect.bits.robIdx, // redirect: move ptr to given rob index 85 Mux(lastCycleMisprediction, robIdxHead + 1.U, // mis-predict: not flush robIdx itself 86 Mux(canOut, robIdxHead + validCount, // instructions successfully entered next stage: increase robIdx 87 /* default */ robIdxHead))) // no instructions passed by this cycle: stick to old value 88 robIdxHead := robIdxHeadNext 89 90 /** 91 * Rename: allocate free physical register and update rename table 92 */ 93 val uops = Wire(Vec(RenameWidth, new MicroOp)) 94 uops.foreach( uop => { 95 uop.srcState(0) := DontCare 96 uop.srcState(1) := DontCare 97 uop.srcState(2) := DontCare 98 uop.robIdx := DontCare 99 uop.diffTestDebugLrScValid := DontCare 100 uop.debugInfo := DontCare 101 uop.lqIdx := DontCare 102 uop.sqIdx := DontCare 103 }) 104 105 val needFpDest = Wire(Vec(RenameWidth, Bool())) 106 val needIntDest = Wire(Vec(RenameWidth, Bool())) 107 val hasValid = Cat(io.in.map(_.valid)).orR 108 109 val isMove = io.in.map(_.bits.ctrl.isMove) 110 val isMax = intFreeList.maxVec 111 val meEnable = WireInit(VecInit(Seq.fill(RenameWidth)(false.B))) 112 val psrc_cmp = Wire(MixedVec(List.tabulate(RenameWidth-1)(i => UInt((i+1).W)))) 113 val intPsrc = Wire(Vec(RenameWidth, UInt())) 114 115 val intSpecWen = Wire(Vec(RenameWidth, Bool())) 116 val fpSpecWen = Wire(Vec(RenameWidth, Bool())) 117 118 // uop calculation 119 for (i <- 0 until RenameWidth) { 120 uops(i).cf := io.in(i).bits.cf 121 uops(i).ctrl := io.in(i).bits.ctrl 122 123 val inValid = io.in(i).valid 124 125 // alloc a new phy reg 126 needFpDest(i) := inValid && needDestReg(fp = true, io.in(i).bits) 127 needIntDest(i) := inValid && needDestReg(fp = false, io.in(i).bits) 128 fpFreeList.allocateReq(i) := needFpDest(i) 129 intFreeList.allocateReq(i) := needIntDest(i) 130 131 // no valid instruction from decode stage || all resources (dispatch1 + both free lists) ready 132 io.in(i).ready := !hasValid || canOut 133 134 // do checkpoints when a branch inst come 135 // for(fl <- Seq(fpFreeList, intFreeList)){ 136 // fl.cpReqs(i).valid := inValid 137 // fl.cpReqs(i).bits := io.in(i).bits.brTag 138 // } 139 140 uops(i).robIdx := robIdxHead + PopCount(io.in.take(i).map(_.valid)) 141 142 val intPhySrcVec = io.intReadPorts(i).take(2) 143 val intOldPdest = io.intReadPorts(i).last 144 intPsrc(i) := intPhySrcVec(0) 145 val fpPhySrcVec = io.fpReadPorts(i).take(3) 146 val fpOldPdest = io.fpReadPorts(i).last 147 uops(i).psrc(0) := Mux(uops(i).ctrl.srcType(0) === SrcType.reg, intPhySrcVec(0), fpPhySrcVec(0)) 148 uops(i).psrc(1) := Mux(uops(i).ctrl.srcType(1) === SrcType.reg, intPhySrcVec(1), fpPhySrcVec(1)) 149 uops(i).psrc(2) := fpPhySrcVec(2) 150 uops(i).old_pdest := Mux(uops(i).ctrl.rfWen, intOldPdest, fpOldPdest) 151 152 if (i == 0) { 153 // calculate meEnable 154 meEnable(i) := isMove(i) && (!isMax(intPsrc(i)) || uops(i).ctrl.lsrc(0) === 0.U) 155 } else { 156 // compare psrc0 157 psrc_cmp(i-1) := Cat((0 until i).map(j => { 158 intPsrc(i) === intPsrc(j) && io.in(i).bits.ctrl.isMove && io.in(j).bits.ctrl.isMove 159 }) /* reverse is not necessary here */) 160 161 // calculate meEnable 162 meEnable(i) := isMove(i) && (!(io.renameBypass.lsrc1_bypass(i-1).orR | psrc_cmp(i-1).orR | isMax(intPsrc(i))) || uops(i).ctrl.lsrc(0) === 0.U) 163 } 164 uops(i).eliminatedMove := meEnable(i) || (uops(i).ctrl.isMove && uops(i).ctrl.ldest === 0.U) 165 166 // send psrc of eliminated move instructions to free list and label them as eliminated 167 intFreeList.psrcOfMove(i).valid := meEnable(i) 168 intFreeList.psrcOfMove(i).bits := intPsrc(i) 169 170 // update pdest 171 uops(i).pdest := Mux(meEnable(i), intPsrc(i), // move eliminated 172 Mux(needIntDest(i), intFreeList.allocatePhyReg(i), // normal int inst 173 Mux(uops(i).ctrl.ldest===0.U && uops(i).ctrl.rfWen, 0.U // int inst with dst=r0 174 /* default */, fpFreeList.allocatePhyReg(i)))) // normal fp inst 175 176 // Assign performance counters 177 uops(i).debugInfo.renameTime := GTimer() 178 179 io.out(i).valid := io.in(i).valid && intFreeList.canAllocate && fpFreeList.canAllocate && !io.robCommits.isWalk 180 io.out(i).bits := uops(i) 181 182 // write speculative rename table 183 // we update rat later inside commit code 184 intSpecWen(i) := intFreeList.allocateReq(i) && intFreeList.canAllocate && intFreeList.doAllocate && !io.robCommits.isWalk 185 fpSpecWen(i) := fpFreeList.allocateReq(i) && fpFreeList.canAllocate && fpFreeList.doAllocate && !io.robCommits.isWalk 186 } 187 188 // We don't bypass the old_pdest from valid instructions with the same ldest currently in rename stage. 189 // Instead, we determine whether there're some dependencies between the valid instructions. 190 for (i <- 1 until RenameWidth) { 191 io.renameBypass.lsrc1_bypass(i-1) := Cat((0 until i).map(j => { 192 val fpMatch = needFpDest(j) && io.in(i).bits.ctrl.srcType(0) === SrcType.fp 193 val intMatch = needIntDest(j) && io.in(i).bits.ctrl.srcType(0) === SrcType.reg 194 (fpMatch || intMatch) && io.in(j).bits.ctrl.ldest === io.in(i).bits.ctrl.lsrc(0) 195 }).reverse) 196 io.renameBypass.lsrc2_bypass(i-1) := Cat((0 until i).map(j => { 197 val fpMatch = needFpDest(j) && io.in(i).bits.ctrl.srcType(1) === SrcType.fp 198 val intMatch = needIntDest(j) && io.in(i).bits.ctrl.srcType(1) === SrcType.reg 199 (fpMatch || intMatch) && io.in(j).bits.ctrl.ldest === io.in(i).bits.ctrl.lsrc(1) 200 }).reverse) 201 io.renameBypass.lsrc3_bypass(i-1) := Cat((0 until i).map(j => { 202 val fpMatch = needFpDest(j) && io.in(i).bits.ctrl.srcType(2) === SrcType.fp 203 val intMatch = needIntDest(j) && io.in(i).bits.ctrl.srcType(2) === SrcType.reg 204 (fpMatch || intMatch) && io.in(j).bits.ctrl.ldest === io.in(i).bits.ctrl.lsrc(2) 205 }).reverse) 206 io.renameBypass.ldest_bypass(i-1) := Cat((0 until i).map(j => { 207 val fpMatch = needFpDest(j) && needFpDest(i) 208 val intMatch = needIntDest(j) && needIntDest(i) 209 (fpMatch || intMatch) && io.in(j).bits.ctrl.ldest === io.in(i).bits.ctrl.ldest 210 }).reverse) 211 } 212 213 // calculate lsq space requirement 214 val isLs = VecInit(uops.map(uop => FuType.isLoadStore(uop.ctrl.fuType))) 215 val isStore = VecInit(uops.map(uop => FuType.isStoreExu(uop.ctrl.fuType))) 216 val isAMO = VecInit(uops.map(uop => FuType.isAMO(uop.ctrl.fuType))) 217 io.dispatchInfo.lsqNeedAlloc := VecInit((0 until RenameWidth).map(i => 218 Mux(isLs(i), Mux(isStore(i) && !isAMO(i), 2.U, 1.U), 0.U))) 219 220 /** 221 * Instructions commit: update freelist and rename table 222 */ 223 for (i <- 0 until CommitWidth) { 224 225 Seq((io.intRenamePorts, false), (io.fpRenamePorts, true)) foreach { case (rat, fp) => 226 // is valid commit req and given instruction has destination register 227 val commitDestValid = io.robCommits.valid(i) && needDestRegCommit(fp, io.robCommits.info(i)) 228 XSDebug(p"isFp[${fp}]index[$i]-commitDestValid:$commitDestValid,isWalk:${io.robCommits.isWalk}\n") 229 230 /* 231 I. RAT Update 232 */ 233 234 // walk back write - restore spec state : ldest => old_pdest 235 if (fp && i < RenameWidth) { 236 // When redirect happens (mis-prediction), don't update the rename table 237 rat(i).wen := fpSpecWen(i) && !io.redirect.valid 238 rat(i).addr := uops(i).ctrl.ldest 239 rat(i).data := fpFreeList.allocatePhyReg(i) 240 } else if (!fp && i < RenameWidth) { 241 rat(i).wen := intSpecWen(i) && !io.redirect.valid 242 rat(i).addr := uops(i).ctrl.ldest 243 rat(i).data := Mux(meEnable(i), intPsrc(i), intFreeList.allocatePhyReg(i)) 244 } 245 246 /* 247 II. Free List Update 248 */ 249 if (fp) { // Float Point free list 250 fpFreeList.freeReq(i) := commitDestValid && !io.robCommits.isWalk 251 fpFreeList.freePhyReg(i) := io.robCommits.info(i).old_pdest 252 } else { // Integer free list 253 254 // during walk process: 255 // 1. for normal inst, free pdest + revert rat from ldest->pdest to ldest->old_pdest 256 // 2. for ME inst, free pdest(commit counter++) + revert rat 257 258 // conclusion: 259 // a. rat recovery has nothing to do with ME or not 260 // b. treat walk as normal commit except replace old_pdests with pdests and set io.walk to true 261 // c. ignore pdests port when walking 262 263 intFreeList.freeReq(i) := commitDestValid // walk or not walk 264 intFreeList.freePhyReg(i) := Mux(io.robCommits.isWalk, io.robCommits.info(i).pdest, io.robCommits.info(i).old_pdest) 265 intFreeList.eliminatedMove(i) := io.robCommits.info(i).eliminatedMove 266 intFreeList.multiRefPhyReg(i) := io.robCommits.info(i).pdest 267 } 268 } 269 } 270 271 272 /* 273 Debug and performance counter 274 */ 275 276 def printRenameInfo(in: DecoupledIO[CfCtrl], out: DecoupledIO[MicroOp]) = { 277 XSInfo( 278 in.valid && in.ready, 279 p"pc:${Hexadecimal(in.bits.cf.pc)} in v:${in.valid} in rdy:${in.ready} " + 280 p"lsrc(0):${in.bits.ctrl.lsrc(0)} -> psrc(0):${out.bits.psrc(0)} " + 281 p"lsrc(1):${in.bits.ctrl.lsrc(1)} -> psrc(1):${out.bits.psrc(1)} " + 282 p"lsrc(2):${in.bits.ctrl.lsrc(2)} -> psrc(2):${out.bits.psrc(2)} " + 283 p"ldest:${in.bits.ctrl.ldest} -> pdest:${out.bits.pdest} " + 284 p"old_pdest:${out.bits.old_pdest} " + 285 p"out v:${out.valid} r:${out.ready}\n" 286 ) 287 } 288 289 for((x,y) <- io.in.zip(io.out)){ 290 printRenameInfo(x, y) 291 } 292 293 XSDebug(io.robCommits.isWalk, p"Walk Recovery Enabled\n") 294 XSDebug(io.robCommits.isWalk, p"validVec:${Binary(io.robCommits.valid.asUInt)}\n") 295 for (i <- 0 until CommitWidth) { 296 val info = io.robCommits.info(i) 297 XSDebug(io.robCommits.isWalk && io.robCommits.valid(i), p"[#$i walk info] pc:${Hexadecimal(info.pc)} " + 298 p"ldest:${info.ldest} rfWen:${info.rfWen} fpWen:${info.fpWen} " + p"eliminatedMove:${info.eliminatedMove} " + 299 p"pdest:${info.pdest} old_pdest:${info.old_pdest}\n") 300 } 301 302 XSDebug(p"inValidVec: ${Binary(Cat(io.in.map(_.valid)))}\n") 303 XSInfo(!canOut, p"stall at rename, hasValid:${hasValid}, fpCanAlloc:${fpFreeList.canAllocate}, intCanAlloc:${intFreeList.canAllocate} dispatch1ready:${io.out(0).ready}, isWalk:${io.robCommits.isWalk}\n") 304 305 XSPerfAccumulate("in", Mux(RegNext(io.in(0).ready), PopCount(io.in.map(_.valid)), 0.U)) 306 XSPerfAccumulate("utilization", PopCount(io.in.map(_.valid))) 307 XSPerfAccumulate("waitInstr", PopCount((0 until RenameWidth).map(i => io.in(i).valid && !io.in(i).ready))) 308 XSPerfAccumulate("stall_cycle_dispatch", hasValid && !io.out(0).ready && fpFreeList.canAllocate && intFreeList.canAllocate && !io.robCommits.isWalk) 309 XSPerfAccumulate("stall_cycle_fp", hasValid && io.out(0).ready && !fpFreeList.canAllocate && intFreeList.canAllocate && !io.robCommits.isWalk) 310 XSPerfAccumulate("stall_cycle_int", hasValid && io.out(0).ready && fpFreeList.canAllocate && !intFreeList.canAllocate && !io.robCommits.isWalk) 311 XSPerfAccumulate("stall_cycle_walk", hasValid && io.out(0).ready && fpFreeList.canAllocate && intFreeList.canAllocate && io.robCommits.isWalk) 312 313 XSPerfAccumulate("move_instr_count", PopCount(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && io.in(i).bits.ctrl.isMove))) 314 XSPerfAccumulate("move_elim_enabled", PopCount(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && meEnable(i)))) 315 XSPerfAccumulate("move_elim_cancelled", PopCount(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i)))) 316 XSPerfAccumulate("move_elim_cancelled_psrc_bypass", PopCount(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i) && { if (i == 0) false.B else io.renameBypass.lsrc1_bypass(i-1).orR }))) 317 XSPerfAccumulate("move_elim_cancelled_cnt_limit", PopCount(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i) && isMax(io.out(i).bits.psrc(0))))) 318 XSPerfAccumulate("move_elim_cancelled_inc_more_than_one", PopCount(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i) && { if (i == 0) false.B else psrc_cmp(i-1).orR }))) 319 320 // to make sure meEnable functions as expected 321 for (i <- 0 until RenameWidth) { 322 XSDebug(io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i) && isMax(io.out(i).bits.psrc(0)), 323 p"ME_CANCELLED: ref counter hits max value (pc:0x${Hexadecimal(io.in(i).bits.cf.pc)})\n") 324 XSDebug(io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i) && { if (i == 0) false.B else io.renameBypass.lsrc1_bypass(i-1).orR }, 325 p"ME_CANCELLED: RAW dependency (pc:0x${Hexadecimal(io.in(i).bits.cf.pc)})\n") 326 XSDebug(io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i) && { if (i == 0) false.B else psrc_cmp(i-1).orR }, 327 p"ME_CANCELLED: psrc duplicates with former instruction (pc:0x${Hexadecimal(io.in(i).bits.cf.pc)})\n") 328 } 329 XSDebug(VecInit(Seq.tabulate(RenameWidth)(i => io.out(i).fire() && io.in(i).bits.ctrl.isMove && !meEnable(i))).asUInt().orR, 330 p"ME_CANCELLED: pc group [ " + (0 until RenameWidth).map(i => p"fire:${io.out(i).fire()},pc:0x${Hexadecimal(io.in(i).bits.cf.pc)} ").reduceLeft(_ + _) + p"]\n") 331 XSInfo(meEnable.asUInt().orR(), p"meEnableVec:${Binary(meEnable.asUInt)}\n") 332} 333