xref: /XiangShan/src/main/scala/xiangshan/backend/rename/BusyTable.scala (revision a38d1eab87777ed93b417106a7dfd58a062cee18)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.backend.rename
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import utility._
25import xiangshan.backend.Bundles._
26import xiangshan.backend.datapath.WbConfig._
27import xiangshan.backend.issue.SchdBlockParams
28import xiangshan.backend.datapath.{DataSource}
29
30class BusyTableReadIO(implicit p: Parameters) extends XSBundle {
31  val req = Input(UInt(PhyRegIdxWidth.W))
32  val resp = Output(Bool())
33  val loadDependency = Vec(LoadPipelineWidth, Output(UInt(LoadDependencyWidth.W)))
34}
35
36class BusyTable(numReadPorts: Int, numWritePorts: Int, numPhyPregs: Int, pregWB: PregWB)(implicit p: Parameters, params: SchdBlockParams) extends XSModule with HasPerfEvents {
37  val io = IO(new Bundle() {
38    // set preg state to busy
39    val allocPregs = Vec(RenameWidth, Flipped(ValidIO(UInt(PhyRegIdxWidth.W))))
40    // set preg state to ready (write back regfile + rob walk)
41    val wbPregs = Vec(numWritePorts, Flipped(ValidIO(UInt(PhyRegIdxWidth.W))))
42    // fast wakeup
43    val wakeUp: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = Flipped(params.genIQWakeUpInValidBundle)
44    // cancelFromDatapath
45    val og0Cancel = Input(ExuVec())
46    // cancelFromMem
47    val ldCancel = Vec(backendParams.LdExuCnt, Flipped(new LoadCancelIO))
48    // read preg state
49    val read = Vec(numReadPorts, new BusyTableReadIO)
50  })
51
52  val allExuParams = params.backendParam.allExuParams
53  val intBusyTableNeedLoadCancel = allExuParams.map(x =>
54    x.needLoadDependency && x.writeIntRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readIntRf).foldLeft(false)(_ || _)
55  ).reduce(_ || _)
56  val fpBusyTableNeedLoadCancel = allExuParams.map(x =>
57    x.needLoadDependency && x.writeFpRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readFpRf).foldLeft(false)(_ || _)
58  ).reduce(_ || _)
59  val vfBusyTableNeedLoadCancel = allExuParams.map(x =>
60    x.needLoadDependency && x.writeVfRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readVecRf).foldLeft(false)(_ || _)
61  ).reduce(_ || _)
62  val v0BusyTableNeedLoadCancel = allExuParams.map(x =>
63    x.needLoadDependency && x.writeV0Rf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readVecRf).foldLeft(false)(_ || _)
64  ).reduce(_ || _)
65  val vlBusyTableNeedLoadCancel = allExuParams.map(x =>
66    x.needLoadDependency && x.writeVlRf && x.iqWakeUpSourcePairs.map(y => y.sink.getExuParam(allExuParams).readVlRf).foldLeft(false)(_ || _)
67  ).reduce(_ || _)
68  val needLoadCancel = pregWB match {
69    case IntWB(_, _) => intBusyTableNeedLoadCancel
70    case FpWB(_, _) => fpBusyTableNeedLoadCancel
71    case VfWB(_, _) => vfBusyTableNeedLoadCancel
72    case V0WB(_, _) => v0BusyTableNeedLoadCancel
73    case VlWB(_, _) => vlBusyTableNeedLoadCancel
74    case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted")
75  }
76  if (!needLoadCancel) println(s"[BusyTable]: WbConfig ${pregWB} busyTable don't need loadCancel")
77  val loadCancel = if (needLoadCancel) io.ldCancel else 0.U.asTypeOf(io.ldCancel)
78  val wakeUpIn = pregWB match {
79    case IntWB(_, _) => io.wakeUp.filter(_.bits.params.writeIntRf)
80    case FpWB(_, _) => io.wakeUp.filter(_.bits.params.writeFpRf)
81    case VfWB(_, _) => io.wakeUp.filter(_.bits.params.writeVfRf)
82    case V0WB(_, _) => io.wakeUp.filter(_.bits.params.writeV0Rf)
83    case VlWB(_, _) => io.wakeUp.filter(_.bits.params.writeVlRf)
84    case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted")
85  }
86  val loadDependency = RegInit(0.U.asTypeOf(Vec(numPhyPregs, Vec(LoadPipelineWidth, UInt(LoadDependencyWidth.W)))))
87  val shiftLoadDependency = Wire(Vec(wakeUpIn.size, Vec(LoadPipelineWidth, UInt(LoadDependencyWidth.W))))
88  val tableUpdate = Wire(Vec(numPhyPregs, Bool()))
89  val wakeupOHVec = Wire(Vec(numPhyPregs, UInt(wakeUpIn.size.W)))
90
91  def reqVecToMask(rVec: Vec[Valid[UInt]]): UInt = {
92    ParallelOR(rVec.map(v => Mux(v.valid, UIntToOH(v.bits), 0.U)))
93  }
94
95  shiftLoadDependency.zip(wakeUpIn).map{ case (deps, wakeup) =>
96    if (wakeup.bits.params.hasLoadExu) {
97      deps.zipWithIndex.map{ case (dep, i) =>
98        if (backendParams.getLdExuIdx(wakeup.bits.params) == i) dep := 1.U
99        else dep := 0.U
100      }
101    }
102    else {
103      deps.zip(wakeup.bits.loadDependency).map{ case (sink, source) =>
104        sink := source << 1
105      }
106    }
107  }
108
109  wakeupOHVec.zipWithIndex.foreach{ case (wakeupOH, idx) =>
110    val tmp = pregWB match {
111      case IntWB(_, _) => wakeUpIn.map(x => x.valid && x.bits.rfWen  && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx)))
112      case FpWB(_, _)  => wakeUpIn.map(x => x.valid && x.bits.fpWen  && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx)))
113      case VfWB(_, _)  => wakeUpIn.map(x => x.valid && x.bits.vecWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx)))
114      case V0WB(_, _)  => wakeUpIn.map(x => x.valid && x.bits.v0Wen  && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx)))
115      case VlWB(_, _)  => wakeUpIn.map(x => x.valid && x.bits.vlWen  && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), loadCancel) && !(x.bits.is0Lat && io.og0Cancel(x.bits.params.exuIdx)))
116      case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted")
117    }
118    wakeupOH := (if (wakeUpIn.nonEmpty) VecInit(tmp.toSeq).asUInt else 0.U)
119  }
120  val wbMask = reqVecToMask(io.wbPregs)
121  val allocMask = reqVecToMask(io.allocPregs)
122  val wakeUpMask = VecInit(wakeupOHVec.map(_.orR).toSeq).asUInt
123  val ldCancelMask = loadDependency.map(x => LoadShouldCancel(Some(x), loadCancel))
124
125  loadDependency.zipWithIndex.foreach{ case (ldDp, idx) =>
126    when(wakeUpMask(idx)) {
127      ldDp := (if (wakeUpIn.nonEmpty) Mux1H(wakeupOHVec(idx), shiftLoadDependency) else 0.U.asTypeOf(ldDp))
128    }
129    .elsewhen(ldDp.map(x => x.orR).reduce(_ | _)) {
130      ldDp := VecInit(ldDp.map(x => x << 1))
131    }
132  }
133
134  /*
135  we can ensure that the following conditions are mutually exclusive
136  wakeUp and cancel (same pdest) may arrive at the same cycle
137  for a pdest:
138    rename alloc => wakeUp / cancel => ... => wakeUp / cancel => wakeUp
139  or
140    rename alloc => wbMask  //TODO we still need wbMask because wakeUp signal is partial now
141  in wakeUpMask, we filter ogCancel and loadTransCancel at the same cycle
142   */
143  val table = VecInit((0 until numPhyPregs).zip(tableUpdate).map{ case (idx, update) =>
144    RegEnable(update, 0.U(1.W), allocMask(idx) || ldCancelMask(idx) || wakeUpMask(idx) || wbMask(idx))
145  }).asUInt
146
147  tableUpdate.zipWithIndex.foreach{ case (update, idx) =>
148    when(wakeUpMask(idx) || wbMask(idx)) {
149      update := false.B                                   //ready
150    }
151    .elsewhen(allocMask(idx) || ldCancelMask(idx)) {
152      update := true.B                                    //busy
153      if (idx == 0 && pregWB.isInstanceOf[IntWB]) {
154          // Int RegFile 0 is always ready
155          update := false.B
156      }
157    }
158    .otherwise {
159      update := table(idx)
160    }
161  }
162
163  io.read.foreach{ case res =>
164    res.resp := !table(res.req)
165    res.loadDependency := loadDependency(res.req)
166  }
167
168  val oddTable = table.asBools.zipWithIndex.filter(_._2 % 2 == 1).map(_._1)
169  val evenTable = table.asBools.zipWithIndex.filter(_._2 % 2 == 0).map(_._1)
170  val busyCount = RegNext(RegNext(PopCount(oddTable)) + RegNext(PopCount(evenTable)))
171
172  XSPerfAccumulate("busy_count", PopCount(table))
173
174  val perfEvents = Seq(
175    ("bt_std_freelist_1_4_valid", busyCount < (numPhyPregs / 4).U                                      ),
176    ("bt_std_freelist_2_4_valid", busyCount > (numPhyPregs / 4).U && busyCount <= (numPhyPregs / 2).U    ),
177    ("bt_std_freelist_3_4_valid", busyCount > (numPhyPregs / 2).U && busyCount <= (numPhyPregs * 3 / 4).U),
178    ("bt_std_freelist_4_4_valid", busyCount > (numPhyPregs * 3 / 4).U                                  )
179  )
180  generatePerfEvent()
181}
182