xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision 1a0debc27041058fb54ba12d616d87f838663e7c)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import freechips.rocketchip.tilelink.TLBuffer
27import system.HasSoCParameter
28import utils._
29import utility._
30import xiangshan.backend._
31import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
32import xiangshan.cache.mmu._
33import xiangshan.frontend._
34import xiangshan.mem.L1PrefetchFuzzer
35
36import scala.collection.mutable.ListBuffer
37
38abstract class XSModule(implicit val p: Parameters) extends Module
39  with HasXSParameter
40  with HasFPUParameters
41
42//remove this trait after impl module logic
43trait NeedImpl {
44  this: RawModule =>
45  override protected def IO[T <: Data](iodef: T): T = {
46    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
47    val io = chisel3.experimental.IO(iodef)
48    io <> DontCare
49    io
50  }
51}
52
53class WritebackSourceParams(
54  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
55 ) {
56  def length: Int = exuConfigs.length
57  def ++(that: WritebackSourceParams): WritebackSourceParams = {
58    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
59  }
60}
61
62trait HasWritebackSource {
63  val writebackSourceParams: Seq[WritebackSourceParams]
64  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
65    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
66    val source = sourceMod.writebackSource.get
67    require(source.length == writebackSourceParams.length, "length mismatch between sources")
68    for ((s, p) <- source.zip(writebackSourceParams)) {
69      require(s.length == p.length, "params do not match with the exuOutput")
70    }
71    source
72  }
73  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
74    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
75    val source = sourceMod.writebackSource1.get
76    require(source.length == writebackSourceParams.length, "length mismatch between sources")
77    for ((s, p) <- source.zip(writebackSourceParams)) {
78      require(s.length == p.length, "params do not match with the exuOutput")
79    }
80    source
81  }
82  val writebackSourceImp: HasWritebackSourceImp
83}
84
85trait HasWritebackSourceImp {
86  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
87  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
88}
89
90trait HasWritebackSink {
91  // Caches all sources. The selected source will be the one with smallest length.
92  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
93  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
94    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
95    writebackSinks += ((source, realIndex))
96    this
97  }
98
99  def writebackSinksParams: Seq[WritebackSourceParams] = {
100    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
101  }
102  final def writebackSinksMod(
103     thisMod: Option[HasWritebackSource] = None,
104     thisModImp: Option[HasWritebackSourceImp] = None
105   ): Seq[Seq[HasWritebackSourceImp]] = {
106    require(thisMod.isDefined == thisModImp.isDefined)
107    writebackSinks.map(_._1.map(source =>
108      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
109    )
110  }
111  final def writebackSinksImp(
112    thisMod: Option[HasWritebackSource] = None,
113    thisModImp: Option[HasWritebackSourceImp] = None
114  ): Seq[Seq[ValidIO[ExuOutput]]] = {
115    val sourceMod = writebackSinksMod(thisMod, thisModImp)
116    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
117      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
118    }
119  }
120  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
121    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
122  }
123  def generateWritebackIO(
124    thisMod: Option[HasWritebackSource] = None,
125    thisModImp: Option[HasWritebackSourceImp] = None
126   ): Unit
127}
128
129abstract class XSBundle(implicit val p: Parameters) extends Bundle
130  with HasXSParameter
131
132abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
133  with HasXSParameter with HasExuWbHelper
134{
135  // interrupt sinks
136  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
137  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
138  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
139  // outer facing nodes
140  val frontend = LazyModule(new Frontend())
141  val ptw = LazyModule(new L2TLBWrapper())
142  val ptw_to_l2_buffer = if (!coreParams.softPTW) LazyModule(new TLBuffer) else null
143  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
144
145  if (!coreParams.softPTW) {
146    ptw_to_l2_buffer.node := ptw.node
147  }
148
149  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
150  val intWbPorts: Seq[Seq[ExuConfig]] = wbArbiter.intWbPorts
151  val fpWbPorts: Seq[Seq[ExuConfig]] = wbArbiter.fpWbPorts
152
153  // TODO: better RS organization
154  // generate rs according to number of function units
155  require(exuParameters.JmpCnt == 1)
156  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
157  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
158  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
159
160  // one RS every 2 MDUs
161  val aluScheLaneCfg = ScheLaneConfig(
162    rsModGen            = aluRSMod,
163    exuConfig           = AluExeUnitCfg,
164    numDeq              = exuParameters.AluCnt,
165    intFastWakeupTarget = Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg))
166  val mulScheLaneCfg = ScheLaneConfig(
167    rsModGen            = mulRSMod,
168    exuConfig           = MulDivExeUnitCfg,
169    numDeq              = exuParameters.MduCnt,
170    intFastWakeupTarget = Seq(AluExeUnitCfg, MulDivExeUnitCfg))
171  val jumpScheLaneCfg = ScheLaneConfig(
172    rsModGen            = jumpRSMod,
173    exuConfig           = JumpCSRExeUnitCfg,
174    numDeq              = 1)
175  val loadScheLaneCfg = ScheLaneConfig(
176    rsModGen            = loadRSMod,
177    exuConfig           = LdExeUnitCfg,
178    numDeq              = exuParameters.LduCnt,
179    intFastWakeupTarget = Seq(AluExeUnitCfg, LdExeUnitCfg))
180  val staScheLaneCfg = ScheLaneConfig(
181    rsModGen            = staRSMod,
182    exuConfig           = StaExeUnitCfg,
183    numDeq              = exuParameters.StuCnt)
184  val stdScheLaneCfg = ScheLaneConfig(
185    rsModGen            = stdRSMod,
186    exuConfig           = StdExeUnitCfg,
187    numDeq              = exuParameters.StuCnt)
188  val fmaScheLaneCfg = ScheLaneConfig(
189    rsModGen            = fmaRSMod,
190    exuConfig           = FmacExeUnitCfg,
191    numDeq              = exuParameters.FmacCnt,
192    intFastWakeupTarget = Seq(),
193    fpFastWakeupTarget  = Seq(FmacExeUnitCfg, FmiscExeUnitCfg))
194  val fmiscScheLaneCfg = ScheLaneConfig(
195    rsModGen            = fmiscRSMod,
196    exuConfig           = FmiscExeUnitCfg,
197    numDeq              = exuParameters.FmiscCnt)
198
199  val intScheLaneCfgs = Seq(
200    aluScheLaneCfg,
201    mulScheLaneCfg,
202    jumpScheLaneCfg,
203    loadScheLaneCfg,
204    staScheLaneCfg,
205    stdScheLaneCfg
206  )
207  val vecScheLaneCfgs = Seq(
208    fmaScheLaneCfg,
209    fmiscScheLaneCfg
210  )
211  val allScheLaneCfgs = Seq(intScheLaneCfgs, vecScheLaneCfgs)
212
213  // should do outer fast wakeup ports here
214  def getOtherFastPorts(sche: Seq[ScheLaneConfig]): Seq[Seq[Int]]= {
215    val otherCfg = allScheLaneCfgs.filter(_ != sche).reduce(_ ++ _)
216    val outerPorts = sche.map(cfg => {
217      // exe units from this scheduler need fastUops from exeunits
218      val outerWakeupInSche = sche.filter(_.exuConfig.wakeupFromExu)
219      val intraIntScheOuter = outerWakeupInSche.filter(_.intFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
220      val intraFpScheOuter = outerWakeupInSche.filter(_.fpFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
221      // exe units from other schedulers need fastUop from outside
222      val otherIntSource = otherCfg.filter(_.intFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
223      val otherFpSource = otherCfg.filter(_.fpFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
224      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
225      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
226      getFastWakeupIndex(cfg.exuConfig, intSource, fpSource, intWbPorts.length).sorted
227    })
228    println(s"inter-scheduler wakeup sources: $outerPorts")
229    outerPorts
230  }
231  val intOtherFastPorts = getOtherFastPorts(intScheLaneCfgs)
232  val vecOtherFastPorts = getOtherFastPorts(vecScheLaneCfgs)
233  val otherFastPorts = Seq(intOtherFastPorts, vecOtherFastPorts)
234
235  // allow mdu and fmisc to have 2*numDeq enqueue ports
236  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
237    if (i < exuParameters.JmpCnt) Seq(
238      DpPortMapConfig(0, i),
239      DpPortMapConfig(1, i),
240      DpPortMapConfig(2, i))
241    else if (i < 2 * exuParameters.MduCnt) Seq(
242      DpPortMapConfig(0, i),
243      DpPortMapConfig(1, i))
244    else Seq(DpPortMapConfig(0, i))
245  })
246  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq(DpPortMapConfig(3, i))) ++
247                  (0 until exuParameters.StuCnt).map(i => Seq(DpPortMapConfig(4, i))) ++
248                  (0 until exuParameters.StuCnt).map(i => Seq(DpPortMapConfig(5, i)))
249  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
250    if (i < 2 * exuParameters.FmiscCnt) Seq(DpPortMapConfig(0, i), DpPortMapConfig(1, i))
251    else Seq(DpPortMapConfig(0, i))
252  })
253  val intDispatchPorts = intDpPorts ++ lsDpPorts
254  val vecDispatchPorts = fpDpPorts
255
256  val intExuBlock = LazyModule(new IntExuBlock(
257    configVec           = intScheLaneCfgs,
258    dpPortVec           = intDispatchPorts,
259    intRfWbPortVec      = intWbPorts,
260    fpRfWbPortVec       = fpWbPorts,
261    outFastPortVec      = intOtherFastPorts
262  ))
263  val vecExuBlock = LazyModule(new VecExuBlock(
264    configVec           = vecScheLaneCfgs,
265    dpPortVec           = vecDispatchPorts,
266    intRfWbPortVec      = intWbPorts,
267    fpRfWbPortVec       = fpWbPorts,
268    outFastPortVec      = vecOtherFastPorts
269  ))
270  val exuBlocks = Seq(intExuBlock, vecExuBlock)
271  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
272    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
273      IssQueSize = intExuBlock.scheduler.getMemRsEntries
274    )
275  })))
276
277  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
278  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
279  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
280  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
281  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
282  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
283}
284
285class XSCore()(implicit p: config.Parameters) extends XSCoreBase
286  with HasXSDts
287{
288  lazy val module = new XSCoreImp(this)
289}
290
291class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
292  with HasXSParameter
293  with HasSoCParameter {
294  val io = IO(new Bundle {
295    val hartId = Input(UInt(64.W))
296    val reset_vector = Input(UInt(PAddrBits.W))
297    val cpu_halt = Output(Bool())
298    val l2_pf_enable = Output(Bool())
299    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
300    val beu_errors = Output(new XSL1BusErrors())
301  })
302
303  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
304
305  val frontend = outer.frontend.module
306  val ctrlBlock = outer.ctrlBlock.module
307  val wb2Ctrl = outer.wb2Ctrl.module
308  val memBlock = outer.memBlock.module
309  val ptw = outer.ptw.module
310  val ptw_to_l2_buffer = if (!coreParams.softPTW) outer.ptw_to_l2_buffer.module else null
311  val intExuBlock = outer.intExuBlock.module
312  val vecExuBlock = outer.vecExuBlock.module
313  val exuBlocks = Seq(intExuBlock, vecExuBlock)
314
315  frontend.io.hartId  := io.hartId
316  ctrlBlock.io.hartId := io.hartId
317  exuBlocks.foreach(_.io.hartId := io.hartId)
318  memBlock.io.hartId := io.hartId
319  outer.wbArbiter.module.io.hartId := io.hartId
320  frontend.io.reset_vector := io.reset_vector
321
322  io.cpu_halt := ctrlBlock.io.cpu_halt
323
324  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
325  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.writeback
326  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
327  outer.wbArbiter.module.io.in <> allWriteback
328  val rfWriteback = outer.wbArbiter.module.io.out
329
330  // memblock error exception writeback, 1 cycle after normal writeback
331  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
332
333  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
334  outer.wb2Ctrl.generateWritebackIO()
335
336  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
337  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
338
339  // require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
340  val csrFenceMod = intExuBlock// exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
341  val csrioIn = csrFenceMod.extraio.fuExtra.csrio
342  val fenceio = csrFenceMod.extraio.fuExtra.fenceio
343
344  frontend.io.backend <> ctrlBlock.io.frontend
345  frontend.io.sfence <> fenceio.sfence
346  frontend.io.tlbCsr <> csrioIn.tlb
347  frontend.io.csrCtrl <> csrioIn.customCtrl
348  frontend.io.fencei := fenceio.fencei
349
350  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
351  ctrlBlock.io.exuRedirect <> intExuBlock.extraio.fuExtra.exuRedirect
352  ctrlBlock.io.stIn <> memBlock.io.stIn
353  ctrlBlock.io.memoryViolation <> memBlock.io.memoryViolation
354  intExuBlock.io.scheExtra.enqLsq.get <> memBlock.io.enqLsq
355  exuBlocks.foreach(b => {
356    b.io.scheExtra.lcommit := ctrlBlock.io.robio.lsq.lcommit
357    b.io.scheExtra.scommit := memBlock.io.sqDeq
358    b.io.scheExtra.lqCancelCnt := memBlock.io.lqCancelCnt
359    b.io.scheExtra.sqCancelCnt := memBlock.io.sqCancelCnt
360  })
361  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
362  outer.ctrlBlock.generateWritebackIO()
363
364  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.otherFastWakeup
365  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
366  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
367  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpVecRf).map(_._1)
368  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
369  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
370  val allFastUop1 = intFastUop1 ++ fpFastUop1
371
372  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
373  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
374  ctrlBlock.io.enqLsq <> memBlock.io.enqLsq
375  ctrlBlock.io.sqDeq := memBlock.io.sqDeq
376  ctrlBlock.io.lqCancelCnt := memBlock.io.lqCancelCnt
377  ctrlBlock.io.sqCancelCnt := memBlock.io.sqCancelCnt
378
379  val vconfigReadPort = exuBlocks(0).io.scheExtra.vconfigReadPort.get
380  ctrlBlock.io.vconfigReadPort <> vconfigReadPort
381  val vconfigArch = vconfigReadPort.data(15, 0).asTypeOf(new VConfig)
382
383  intExuBlock.io.scheExtra.fpRfReadIn.get <> vecExuBlock.io.scheExtra.fpRfReadOut.get
384  intExuBlock.io.scheExtra.fpStateReadIn.get <> vecExuBlock.io.scheExtra.fpStateReadOut.get
385
386  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(intExuBlock.extraio.issue.get)){
387    // read load pc at load s0
388    c.ptr := e.bits.uop.cf.ftqPtr
389    c.offset := e.bits.uop.cf.ftqOffset
390  }
391  // return load pc at load s2
392  memBlock.io.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
393  memBlock.io.issue <> intExuBlock.extraio.issue.get
394  // By default, instructions do not have exceptions when they enter the function units.
395  memBlock.io.issue.map(_.bits.uop.clearExceptions())
396  intExuBlock.io.scheExtra.loadFastMatch.get <> memBlock.io.loadFastMatch
397  intExuBlock.io.scheExtra.loadFastImm.get <> memBlock.io.loadFastImm
398
399  val stdIssue = intExuBlock.extraio.issue.get.takeRight(exuParameters.StuCnt)
400  exuBlocks.map(_.io).foreach { exu =>
401    exu.redirect <> ctrlBlock.io.redirect
402    exu.allocPregs <> ctrlBlock.io.allocPregs
403    exu.rfWritebackInt <> rfWriteback.take(NRIntWritePorts)
404    exu.rfWritebackFp <> rfWriteback.drop(NRIntWritePorts)
405    exu.fastUopIn <> allFastUop1
406    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
407    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
408    exu.scheExtra.stIssuePtr <> memBlock.io.stIssuePtr
409    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
410    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
411    exu.scheExtra.debug_vec_rat <> ctrlBlock.io.debug_vec_rat
412    exu.scheExtra.lqFull := memBlock.io.lqFull
413    exu.scheExtra.sqFull := memBlock.io.sqFull
414    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.stIn).foreach{case (sink, src) => {
415      sink.bits := src.bits
416      sink.valid := src.valid
417    }}
418    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
419      sink.valid := src.valid
420      sink.bits := src.bits
421    }}
422  }
423  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
424  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
425
426  ctrlBlock.perfinfo.perfEventsEu0 := intExuBlock.getPerf.dropRight(outer.intExuBlock.scheduler.numRs)
427  ctrlBlock.perfinfo.perfEventsEu1 := vecExuBlock.getPerf.dropRight(outer.vecExuBlock.scheduler.numRs)
428  if (!coreParams.softPTW) {
429    memBlock.io.perfEventsPTW := ptw.getPerf
430  } else {
431    memBlock.io.perfEventsPTW := DontCare
432  }
433  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
434
435  csrioIn.hartId <> io.hartId
436  csrioIn.perf <> DontCare
437  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
438  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
439  csrioIn.perf.memInfo <> memBlock.io.memInfo
440  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
441
442  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
443  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
444  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
445  csrioIn.perf.perfEventsHc       <> io.perfEvents
446
447  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
448  csrioIn.fpu.isIllegal := false.B
449  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
450  csrioIn.fpu.frm <> vecExuBlock.extraio.fuExtra.frm
451
452  csrioIn.vpu.set_vstart.valid <> ctrlBlock.io.robio.toCSR.vcsrFlag
453  csrioIn.vpu.set_vl.valid     <> ctrlBlock.io.robio.toCSR.vcsrFlag
454  csrioIn.vpu.set_vtype.valid  <> ctrlBlock.io.robio.toCSR.vcsrFlag
455  csrioIn.vpu.set_vstart.bits  <> 0.U
456  csrioIn.vpu.set_vl.bits <> ZeroExt(vconfigArch.vl, XLEN)
457  csrioIn.vpu.set_vtype.bits <> ZeroExt(vconfigArch.vtype.asUInt, XLEN)
458  csrioIn.vpu.vxrm <> vecExuBlock.extraio.fuExtra.vxrm
459  csrioIn.exception <> ctrlBlock.io.robio.exception
460  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
461  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
462  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
463  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
464  csrioIn.memExceptionVAddr <> memBlock.io.lsqio.exceptionAddr.vaddr
465
466  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
467  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
468  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
469  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
470  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
471
472  csrioIn.distributedUpdate(0).w.valid := memBlock.io.csrUpdate.w.valid
473  csrioIn.distributedUpdate(0).w.bits := memBlock.io.csrUpdate.w.bits
474  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
475  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
476
477  fenceio.sfence <> memBlock.io.sfence
478  fenceio.sbuffer <> memBlock.io.fenceToSbuffer
479
480  memBlock.io.redirect <> ctrlBlock.io.redirect
481  memBlock.io.rsfeedback <> intExuBlock.io.scheExtra.feedback.get
482  memBlock.io.csrCtrl <> csrioIn.customCtrl
483  memBlock.io.tlbCsr <> csrioIn.tlb
484  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
485  memBlock.io.lsqio.exceptionAddr.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
486  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
487
488  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay,frontend.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.ifilterSize)
489  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, ptw.io.tlb(0), fenceio.sfence, csrioIn.tlb)
490  val dtlbRepeater1  = PTWFilter(ldtlbParams.fenceDelay, memBlock.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.dfilterSize)
491  val dtlbRepeater2  = PTWRepeaterNB(passReady = false, ldtlbParams.fenceDelay, dtlbRepeater1.io.ptw, ptw.io.tlb(1), fenceio.sfence, csrioIn.tlb)
492  ptw.io.sfence <> fenceio.sfence
493  ptw.io.csr.tlb <> csrioIn.tlb
494  ptw.io.csr.distribute_csr <> csrioIn.customCtrl.distribute_csr
495
496  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
497  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
498
499  // Modules are reset one by one
500  val resetTree = ResetGenNode(
501    Seq(
502      ModuleNode(memBlock), ModuleNode(dtlbRepeater1),
503      ResetGenNode(Seq(
504        ModuleNode(itlbRepeater2),
505        ModuleNode(ptw),
506        ModuleNode(dtlbRepeater2),
507        ModuleNode(ptw_to_l2_buffer),
508      )),
509      ResetGenNode(Seq(
510        ModuleNode(exuBlocks.head),
511        ResetGenNode(
512          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
513        ),
514        ResetGenNode(Seq(
515          ModuleNode(ctrlBlock),
516          ResetGenNode(Seq(
517            ModuleNode(frontend), ModuleNode(itlbRepeater1)
518          ))
519        ))
520      ))
521    )
522  )
523
524  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
525
526}
527