xref: /XiangShan/src/main/scala/xiangshan/backend/rename/BusyTable.scala (revision bb2f3f51dd67f6e16e0cc1ffe43368c9fc7e4aef)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.backend.rename
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import utility._
25import xiangshan.backend.Bundles._
26import xiangshan.backend.datapath.WbConfig._
27import xiangshan.backend.issue.SchdBlockParams
28import xiangshan.backend.datapath.{DataSource}
29
30class BusyTableReadIO(implicit p: Parameters) extends XSBundle {
31  val req = Input(UInt(PhyRegIdxWidth.W))
32  val resp = Output(Bool())
33  val loadDependency = Vec(LoadPipelineWidth, Output(UInt(LoadDependencyWidth.W)))
34}
35
36class BusyTable(numReadPorts: Int, numWritePorts: Int, numPhyPregs: Int, pregWB: PregWB)(implicit p: Parameters, params: SchdBlockParams) extends XSModule with HasPerfEvents {
37  val io = IO(new Bundle() {
38    // set preg state to busy
39    val allocPregs = Vec(RenameWidth, Flipped(ValidIO(UInt(PhyRegIdxWidth.W))))
40    // set preg state to ready (write back regfile + rob walk)
41    val wbPregs = Vec(numWritePorts, Flipped(ValidIO(UInt(PhyRegIdxWidth.W))))
42    // fast wakeup
43    val wakeUp: MixedVec[ValidIO[IssueQueueIQWakeUpBundle]] = Flipped(params.genIQWakeUpInValidBundle)
44    // cancelFromDatapath
45    val cancel = Vec(backendParams.numExu, Flipped(ValidIO(new CancelSignal)))
46    // cancelFromMem
47    val ldCancel = Vec(backendParams.LdExuCnt, Flipped(new LoadCancelIO))
48    // read preg state
49    val read = Vec(numReadPorts, new BusyTableReadIO)
50  })
51
52  val loadDependency = RegInit(0.U.asTypeOf(Vec(numPhyPregs, Vec(LoadPipelineWidth, UInt(LoadDependencyWidth.W)))))
53  val shiftLoadDependency = Wire(Vec(io.wakeUp.size, Vec(LoadPipelineWidth, UInt(LoadDependencyWidth.W))))
54  val tableUpdate = Wire(Vec(numPhyPregs, Bool()))
55  val wakeupOHVec = Wire(Vec(numPhyPregs, UInt(io.wakeUp.size.W)))
56
57  def reqVecToMask(rVec: Vec[Valid[UInt]]): UInt = {
58    ParallelOR(rVec.map(v => Mux(v.valid, UIntToOH(v.bits), 0.U)))
59  }
60
61  shiftLoadDependency.zip(io.wakeUp.map(_.bits.loadDependency)).zip(params.wakeUpInExuSources.map(_.name)).foreach {
62    case ((deps, originalDeps), name) => deps.zip(originalDeps).zipWithIndex.foreach {
63      case ((dep, originalDep), deqPortIdx) =>
64        if (params.backendParam.getLdExuIdx(params.backendParam.allExuParams.find(_.name == name).get) == deqPortIdx)
65          dep := 1.U
66        else
67          dep := originalDep << 1
68    }
69  }
70
71  wakeupOHVec.zipWithIndex.foreach{ case (wakeupOH, idx) =>
72    val tmp = pregWB match {
73      case IntWB(_, _) => io.wakeUp.map(x => x.valid && x.bits.rfWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), io.ldCancel))
74      case FpWB(_, _)  => io.wakeUp.map(x => x.valid && x.bits.fpWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), io.ldCancel))
75      case VfWB(_, _)  => io.wakeUp.map(x => x.valid && x.bits.vecWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), io.ldCancel))
76      case V0WB(_, _)  => io.wakeUp.map(x => x.valid && x.bits.v0Wen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), io.ldCancel))
77      case VlWB(_, _)  => io.wakeUp.map(x => x.valid && x.bits.vlWen && UIntToOH(x.bits.pdest)(idx) && !LoadShouldCancel(Some(x.bits.loadDependency), io.ldCancel))
78      case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted")
79    }
80    wakeupOH := (if (io.wakeUp.nonEmpty) VecInit(tmp.toSeq).asUInt else 0.U)
81  }
82  val wbMask = reqVecToMask(io.wbPregs)
83  val allocMask = reqVecToMask(io.allocPregs)
84  val wakeUpMask = VecInit(wakeupOHVec.map(_.orR).toSeq).asUInt
85  val cancelMask = pregWB match {
86    case IntWB(_, _) => io.cancel.map(x => Mux(x.valid && x.bits.rfWen, UIntToOH(x.bits.pdest), 0.U)).fold(0.U)(_ | _)
87    case FpWB(_, _)  => io.cancel.map(x => Mux(x.valid && x.bits.fpWen, UIntToOH(x.bits.pdest), 0.U)).fold(0.U)(_ | _)
88    case VfWB(_, _)  => io.cancel.map(x => Mux(x.valid && x.bits.vecWen, UIntToOH(x.bits.pdest), 0.U)).fold(0.U)(_ | _)
89    case V0WB(_, _)  => io.cancel.map(x => Mux(x.valid && x.bits.v0Wen, UIntToOH(x.bits.pdest), 0.U)).fold(0.U)(_ | _)
90    case VlWB(_, _)  => io.cancel.map(x => Mux(x.valid && x.bits.vlWen, UIntToOH(x.bits.pdest), 0.U)).fold(0.U)(_ | _)
91    case _ => throw new IllegalArgumentException(s"WbConfig ${pregWB} is not permitted")
92  }
93  val ldCancelMask = loadDependency.map(x => LoadShouldCancel(Some(x), io.ldCancel))
94
95  loadDependency.zipWithIndex.foreach{ case (ldDp, idx) =>
96    when(allocMask(idx) || cancelMask(idx) || wbMask(idx) || ldCancelMask(idx)) {
97      ldDp := 0.U.asTypeOf(ldDp)
98    }.elsewhen(wakeUpMask(idx)) {
99      ldDp := (if (io.wakeUp.nonEmpty) Mux1H(wakeupOHVec(idx), shiftLoadDependency) else 0.U.asTypeOf(ldDp))
100    }.elsewhen(ldDp.map(x => x.orR).reduce(_ | _)) {
101      ldDp := VecInit(ldDp.map(x => x << 1))
102    }
103  }
104
105  /*
106  we can ensure that the following conditions are mutually exclusive
107  wakeUp and cancel (same pdest) may arrive at the same cycle
108  for a pdest:
109    rename alloc => wakeUp / cancel => ... => wakeUp / cancel => wakeUp
110  or
111    rename alloc => wbMask  //TODO we still need wbMask because wakeUp signal is partial now
112  the bypass state lasts for a maximum of one cycle, cancel(=> busy) or else(=> regFile)
113   */
114  val table = VecInit((0 until numPhyPregs).zip(tableUpdate).map{ case (idx, update) =>
115    RegEnable(update, 0.U(1.W), allocMask(idx) || cancelMask(idx) || ldCancelMask(idx) || wakeUpMask(idx) || wbMask(idx))
116  }).asUInt
117
118  tableUpdate.zipWithIndex.foreach{ case (update, idx) =>
119    when(allocMask(idx) || cancelMask(idx) || ldCancelMask(idx)) {
120      update := true.B                                    //busy
121      if (idx == 0 && pregWB.isInstanceOf[IntWB]) {
122          // Int RegFile 0 is always ready
123          update := false.B
124      }
125    }.elsewhen(wakeUpMask(idx) || wbMask(idx)) {
126      update := false.B                                   //ready
127    }.otherwise {
128      update := table(idx)
129    }
130  }
131
132  io.read.foreach{ case res =>
133    res.resp := !table(res.req)
134    res.loadDependency := loadDependency(res.req)
135  }
136
137  val oddTable = table.asBools.zipWithIndex.filter(_._2 % 2 == 1).map(_._1)
138  val evenTable = table.asBools.zipWithIndex.filter(_._2 % 2 == 0).map(_._1)
139  val busyCount = RegNext(RegNext(PopCount(oddTable)) + RegNext(PopCount(evenTable)))
140
141  XSPerfAccumulate("busy_count", PopCount(table))
142
143  val perfEvents = Seq(
144    ("std_freelist_1_4_valid", busyCount < (numPhyPregs / 4).U                                      ),
145    ("std_freelist_2_4_valid", busyCount > (numPhyPregs / 4).U && busyCount <= (numPhyPregs / 2).U    ),
146    ("std_freelist_3_4_valid", busyCount > (numPhyPregs / 2).U && busyCount <= (numPhyPregs * 3 / 4).U),
147    ("std_freelist_4_4_valid", busyCount > (numPhyPregs * 3 / 4).U                                  )
148  )
149  generatePerfEvent()
150}
151