1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.backend.rename 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import xiangshan._ 23import utils._ 24import utility._ 25import xiangshan.backend.Bundles._ 26import xiangshan.backend.datapath.WbConfig._ 27import xiangshan.backend.issue.SchdBlockParams 28import xiangshan.backend.datapath.{DataSource} 29 30class BusyTableReadIO(implicit p: Parameters) extends XSBundle { 31 val req = Input(UInt(PhyRegIdxWidth.W)) 32 val resp = Output(Bool()) 33 val loadDependency = Vec(LoadPipelineWidth, Output(UInt(LoadDependencyWidth.W))) 34} 35 36class BusyTable(numReadPorts: Int, numWritePorts: Int, numPhyPregs: Int, pregWB: PregWB)(implicit p: Parameters, params: SchdBlockParams) extends XSModule with HasPerfEvents { 37 val io = IO(new Bundle() { 38 // set preg state to busy 39 val allocPregs = Vec(RenameWidth, Flipped(ValidIO(UInt(PhyRegIdxWidth.W)))) 40 // set preg state to ready (write back regfile + rob walk) 41 val wbPregs = Vec(numWritePorts, Flipped(ValidIO(UInt(PhyRegIdxWidth.W)))) 42 // fast wakeup 43 val wakeUp: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = Flipped(params.genIQWakeUpInValidBundle) 44 // cancelFromDatapath 45 val og0Cancel = Input(ExuVec()) 46 // cancelFromMem 47 val ldCancel = Vec(backendParams.LdExuCnt, Flipped(new LoadCancelIO)) 48 // read preg state 49 val read = Vec(numReadPorts, new BusyTableReadIO) 50 }) 51 52 val allExuParams = params.backendParam.allExuParams 53 val intBusyTableNeedLoadCancel = allExuParams.map(x => 54 x.needLoadDependency && x.writeIntRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readIntRf).foldLeft(false)(_ || _) 55 ).reduce(_ || _) 56 val fpBusyTableNeedLoadCancel = allExuParams.map(x => 57 x.needLoadDependency && x.writeFpRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readFpRf).foldLeft(false)(_ || _) 58 ).reduce(_ || _) 59 val vfBusyTableNeedLoadCancel = allExuParams.map(x => 60 x.needLoadDependency && x.writeVfRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readVecRf).foldLeft(false)(_ || _) 61 ).reduce(_ || _) 62 val v0BusyTableNeedLoadCancel = allExuParams.map(x => 63 x.needLoadDependency && x.writeV0Rf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readVecRf).foldLeft(false)(_ || _) 64 ).reduce(_ || _) 65 val vlBusyTableNeedLoadCancel = allExuParams.map(x => 66 x.needLoadDependency && x.writeVlRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readVlRf).foldLeft(false)(_ || _) 67 ).reduce(_ || _) 68 val needLoadCancel = pregWB match { 69 case IntWB(_, _) => intBusyTableNeedLoadCancel 70 case FpWB(_, _) => fpBusyTableNeedLoadCancel 71 case VfWB(_, _) => vfBusyTableNeedLoadCancel 72 case V0WB(_, _) => v0BusyTableNeedLoadCancel 73 case VlWB(_, _) => vlBusyTableNeedLoadCancel 74 case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted") 75 } 76 if (!needLoadCancel) println(s"[BusyTable]: WbConfig ${pregWB} busyTable don't need loadCancel") 77 val loadCancel = if (needLoadCancel) io.ldCancel else 0.U.asTypeOf(io.ldCancel) 78 val loadDependency = RegInit(0.U.asTypeOf(Vec(numPhyPregs, Vec(LoadPipelineWidth, UInt(LoadDependencyWidth.W))))) 79 val shiftLoadDependency = Wire(Vec(io.wakeUp.size, Vec(LoadPipelineWidth, UInt(LoadDependencyWidth.W)))) 80 val tableUpdate = Wire(Vec(numPhyPregs, Bool())) 81 val wakeupOHVec = Wire(Vec(numPhyPregs, UInt(io.wakeUp.size.W))) 82 83 def reqVecToMask(rVec: Vec[Valid[UInt]]): UInt = { 84 ParallelOR(rVec.map(v => Mux(v.valid, UIntToOH(v.bits), 0.U))) 85 } 86 87 shiftLoadDependency.zip(io.wakeUp.map(_.bits.loadDependency)).zip(params.wakeUpInExuSources.map(_.name)).foreach { 88 case ((deps, originalDeps), name) => deps.zip(originalDeps).zipWithIndex.foreach { 89 case ((dep, originalDep), deqPortIdx) => 90 if (params.backendParam.getLdExuIdx(params.backendParam.allExuParams.find(_.name == name).get) == deqPortIdx) 91 dep := 1.U 92 else 93 dep := originalDep << 1 94 } 95 } 96 97 wakeupOHVec.zipWithIndex.foreach{ case (wakeupOH, idx) => 98 val tmp = pregWB match { 99 case IntWB(_, _) => io.wakeUp.map(x => x.valid && x.bits.rfWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx))) 100 case FpWB(_, _) => io.wakeUp.map(x => x.valid && x.bits.fpWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx))) 101 case VfWB(_, _) => io.wakeUp.map(x => x.valid && x.bits.vecWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx))) 102 case V0WB(_, _) => io.wakeUp.map(x => x.valid && x.bits.v0Wen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx))) 103 case VlWB(_, _) => io.wakeUp.map(x => x.valid && x.bits.vlWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx))) 104 case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted") 105 } 106 wakeupOH := (if (io.wakeUp.nonEmpty) VecInit(tmp.toSeq).asUInt else 0.U) 107 } 108 val wbMask = reqVecToMask(io.wbPregs) 109 val allocMask = reqVecToMask(io.allocPregs) 110 val wakeUpMask = VecInit(wakeupOHVec.map(_.orR).toSeq).asUInt 111 val ldCancelMask = loadDependency.map(x => LoadShouldCancel(Some(x), loadCancel)) 112 113 loadDependency.zipWithIndex.foreach{ case (ldDp, idx) => 114 when(allocMask(idx) || wbMask(idx) || ldCancelMask(idx)) { 115 ldDp := 0.U.asTypeOf(ldDp) 116 }.elsewhen(wakeUpMask(idx)) { 117 ldDp := (if (io.wakeUp.nonEmpty) Mux1H(wakeupOHVec(idx), shiftLoadDependency) else 0.U.asTypeOf(ldDp)) 118 }.elsewhen(ldDp.map(x => x.orR).reduce(_ | _)) { 119 ldDp := VecInit(ldDp.map(x => x << 1)) 120 } 121 } 122 123 /* 124 we can ensure that the following conditions are mutually exclusive 125 wakeUp and cancel (same pdest) may arrive at the same cycle 126 for a pdest: 127 rename alloc => wakeUp / cancel => ... => wakeUp / cancel => wakeUp 128 or 129 rename alloc => wbMask //TODO we still need wbMask because wakeUp signal is partial now 130 the bypass state lasts for a maximum of one cycle, cancel(=> busy) or else(=> regFile) 131 */ 132 val table = VecInit((0 until numPhyPregs).zip(tableUpdate).map{ case (idx, update) => 133 RegEnable(update, 0.U(1.W), allocMask(idx) || ldCancelMask(idx) || wakeUpMask(idx) || wbMask(idx)) 134 }).asUInt 135 136 tableUpdate.zipWithIndex.foreach{ case (update, idx) => 137 when(allocMask(idx) || ldCancelMask(idx)) { 138 update := true.B //busy 139 if (idx == 0 && pregWB.isInstanceOf[IntWB]) { 140 // Int RegFile 0 is always ready 141 update := false.B 142 } 143 }.elsewhen(wakeUpMask(idx) || wbMask(idx)) { 144 update := false.B //ready 145 }.otherwise { 146 update := table(idx) 147 } 148 } 149 150 io.read.foreach{ case res => 151 res.resp := !table(res.req) 152 res.loadDependency := loadDependency(res.req) 153 } 154 155 val oddTable = table.asBools.zipWithIndex.filter(_._2 % 2 == 1).map(_._1) 156 val evenTable = table.asBools.zipWithIndex.filter(_._2 % 2 == 0).map(_._1) 157 val busyCount = RegNext(RegNext(PopCount(oddTable)) + RegNext(PopCount(evenTable))) 158 159 XSPerfAccumulate("busy_count", PopCount(table)) 160 161 val perfEvents = Seq( 162 ("bt_std_freelist_1_4_valid", busyCount < (numPhyPregs / 4).U ), 163 ("bt_std_freelist_2_4_valid", busyCount > (numPhyPregs / 4).U && busyCount <= (numPhyPregs / 2).U ), 164 ("bt_std_freelist_3_4_valid", busyCount > (numPhyPregs / 2).U && busyCount <= (numPhyPregs * 3 / 4).U), 165 ("bt_std_freelist_4_4_valid", busyCount > (numPhyPregs * 3 / 4).U ) 166 ) 167 generatePerfEvent() 168} 169