xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision 57a10886ebedfc930c9a466960add81768f37697)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import freechips.rocketchip.tilelink.TLBuffer
27import system.HasSoCParameter
28import utils._
29import utility._
30import xiangshan.backend._
31import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
32import xiangshan.cache.mmu._
33import xiangshan.frontend._
34
35import scala.collection.mutable.ListBuffer
36
37abstract class XSModule(implicit val p: Parameters) extends Module
38  with HasXSParameter
39  with HasFPUParameters
40
41//remove this trait after impl module logic
42trait NeedImpl {
43  this: RawModule =>
44  override protected def IO[T <: Data](iodef: T): T = {
45    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
46    val io = chisel3.experimental.IO(iodef)
47    io <> DontCare
48    io
49  }
50}
51
52class WritebackSourceParams(
53  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
54 ) {
55  def length: Int = exuConfigs.length
56  def ++(that: WritebackSourceParams): WritebackSourceParams = {
57    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
58  }
59}
60
61trait HasWritebackSource {
62  val writebackSourceParams: Seq[WritebackSourceParams]
63  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
64    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
65    val source = sourceMod.writebackSource.get
66    require(source.length == writebackSourceParams.length, "length mismatch between sources")
67    for ((s, p) <- source.zip(writebackSourceParams)) {
68      require(s.length == p.length, "params do not match with the exuOutput")
69    }
70    source
71  }
72  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
73    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
74    val source = sourceMod.writebackSource1.get
75    require(source.length == writebackSourceParams.length, "length mismatch between sources")
76    for ((s, p) <- source.zip(writebackSourceParams)) {
77      require(s.length == p.length, "params do not match with the exuOutput")
78    }
79    source
80  }
81  val writebackSourceImp: HasWritebackSourceImp
82}
83
84trait HasWritebackSourceImp {
85  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
86  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
87}
88
89trait HasWritebackSink {
90  // Caches all sources. The selected source will be the one with smallest length.
91  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
92  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
93    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
94    writebackSinks += ((source, realIndex))
95    this
96  }
97
98  def writebackSinksParams: Seq[WritebackSourceParams] = {
99    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
100  }
101  final def writebackSinksMod(
102     thisMod: Option[HasWritebackSource] = None,
103     thisModImp: Option[HasWritebackSourceImp] = None
104   ): Seq[Seq[HasWritebackSourceImp]] = {
105    require(thisMod.isDefined == thisModImp.isDefined)
106    writebackSinks.map(_._1.map(source =>
107      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
108    )
109  }
110  final def writebackSinksImp(
111    thisMod: Option[HasWritebackSource] = None,
112    thisModImp: Option[HasWritebackSourceImp] = None
113  ): Seq[Seq[ValidIO[ExuOutput]]] = {
114    val sourceMod = writebackSinksMod(thisMod, thisModImp)
115    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
116      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
117    }
118  }
119  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
120    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
121  }
122  def generateWritebackIO(
123    thisMod: Option[HasWritebackSource] = None,
124    thisModImp: Option[HasWritebackSourceImp] = None
125   ): Unit
126}
127
128abstract class XSBundle(implicit val p: Parameters) extends Bundle
129  with HasXSParameter
130
131abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
132  with HasXSParameter with HasExuWbHelper
133{
134  // interrupt sinks
135  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
136  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
137  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
138  // outer facing nodes
139  val frontend = LazyModule(new Frontend())
140  val ptw = LazyModule(new L2TLBWrapper())
141  val ptw_to_l2_buffer = if (!coreParams.softPTW) LazyModule(new TLBuffer) else null
142  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
143
144  if (!coreParams.softPTW) {
145    ptw_to_l2_buffer.node := ptw.node
146  }
147
148  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
149  val intWbPorts: Seq[Seq[ExuConfig]] = wbArbiter.intWbPorts
150  val fpWbPorts: Seq[Seq[ExuConfig]] = wbArbiter.fpWbPorts
151
152  // TODO: better RS organization
153  // generate rs according to number of function units
154  require(exuParameters.JmpCnt == 1)
155  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
156  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
157  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
158
159  // one RS every 2 MDUs
160  val aluScheLaneCfg = ScheLaneConfig(
161    aluRSMod,
162    AluExeUnitCfg,
163    exuParameters.AluCnt,
164    Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg))
165  val mulScheLaneCfg = ScheLaneConfig(
166    mulRSMod,
167    MulDivExeUnitCfg,
168    exuParameters.MduCnt,
169    Seq(AluExeUnitCfg, MulDivExeUnitCfg))
170  val jumpScheLaneCfg = ScheLaneConfig(
171    jumpRSMod,
172    JumpCSRExeUnitCfg,
173    1)
174  val loadScheLaneCfg = ScheLaneConfig(
175    loadRSMod,
176    LdExeUnitCfg,
177    exuParameters.LduCnt,
178    Seq(AluExeUnitCfg, LdExeUnitCfg))
179  val staScheLaneCfg = ScheLaneConfig(
180    staRSMod,
181    StaExeUnitCfg,
182    exuParameters.StuCnt)
183  val stdScheLaneCfg = ScheLaneConfig(
184    stdRSMod,
185    StdExeUnitCfg,
186    exuParameters.StuCnt)
187  val fmaScheLaneCfg = ScheLaneConfig(
188    fmaRSMod,
189    FmacExeUnitCfg,
190    exuParameters.FmacCnt,
191    Seq(),
192    Seq(FmacExeUnitCfg, FmiscExeUnitCfg))
193  val fmiscScheLaneCfg = ScheLaneConfig(
194    fmiscRSMod,
195    FmiscExeUnitCfg,
196    exuParameters.FmiscCnt)
197
198  val scheduleCfgs = Seq(
199    Seq(
200      aluScheLaneCfg,
201      mulScheLaneCfg,
202      jumpScheLaneCfg,
203      loadScheLaneCfg,
204      staScheLaneCfg,
205      stdScheLaneCfg
206    ),
207    Seq(
208      fmaScheLaneCfg,
209      fmiscScheLaneCfg
210    )
211  )
212  // should do outer fast wakeup ports here
213  val otherFastPorts: Seq[Seq[Seq[Int]]] = scheduleCfgs.zipWithIndex.map { case (sche, i) =>
214    val otherCfg = scheduleCfgs.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
215    val outerPorts = sche.map(cfg => {
216      // exe units from this scheduler need fastUops from exeunits
217      val outerWakeupInSche = sche.filter(_.exuConfig.wakeupFromExu)
218      val intraIntScheOuter = outerWakeupInSche.filter(_.intFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
219      val intraFpScheOuter = outerWakeupInSche.filter(_.fpFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
220      // exe units from other schedulers need fastUop from outside
221      val otherIntSource = otherCfg.filter(_.intFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
222      val otherFpSource = otherCfg.filter(_.fpFastWakeupTarget.contains(cfg.exuConfig)).map(_.exuConfig)
223      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
224      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
225      getFastWakeupIndex(cfg.exuConfig, intSource, fpSource, intWbPorts.length).sorted
226    })
227    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
228    outerPorts
229  }
230
231  // allow mdu and fmisc to have 2*numDeq enqueue ports
232  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
233    if (i < exuParameters.JmpCnt) Seq(
234      DpPortMapConfig(0, i),
235      DpPortMapConfig(1, i),
236      DpPortMapConfig(2, i))
237    else if (i < 2 * exuParameters.MduCnt) Seq(
238      DpPortMapConfig(0, i),
239      DpPortMapConfig(1, i))
240    else Seq(DpPortMapConfig(0, i))
241  })
242  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq(DpPortMapConfig(3, i))) ++
243                  (0 until exuParameters.StuCnt).map(i => Seq(DpPortMapConfig(4, i))) ++
244                  (0 until exuParameters.StuCnt).map(i => Seq(DpPortMapConfig(5, i)))
245  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
246    if (i < 2 * exuParameters.FmiscCnt) Seq(DpPortMapConfig(0, i), DpPortMapConfig(1, i))
247    else Seq(DpPortMapConfig(0, i))
248  })
249
250  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
251
252  val outIntRfReadPorts = Seq(0, 0)
253  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
254  val hasIntRf = Seq(true, false)
255  val hasFpRf = Seq(false, true)
256  val exuBlocks = scheduleCfgs.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
257    case (((sche, disp), other), i) => {
258      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
259    }
260  }
261  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
262    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
263      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
264    )
265  })))
266
267  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
268  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
269  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
270  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
271  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
272  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
273}
274
275class XSCore()(implicit p: config.Parameters) extends XSCoreBase
276  with HasXSDts
277{
278  lazy val module = new XSCoreImp(this)
279}
280
281class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
282  with HasXSParameter
283  with HasSoCParameter {
284  val io = IO(new Bundle {
285    val hartId = Input(UInt(64.W))
286    val reset_vector = Input(UInt(PAddrBits.W))
287    val cpu_halt = Output(Bool())
288    val l2_pf_enable = Output(Bool())
289    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
290    val beu_errors = Output(new XSL1BusErrors())
291  })
292
293  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
294
295  val frontend = outer.frontend.module
296  val ctrlBlock = outer.ctrlBlock.module
297  val wb2Ctrl = outer.wb2Ctrl.module
298  val memBlock = outer.memBlock.module
299  val ptw = outer.ptw.module
300  val ptw_to_l2_buffer = if (!coreParams.softPTW) outer.ptw_to_l2_buffer.module else null
301  val exuBlocks = outer.exuBlocks.map(_.module)
302
303  frontend.io.hartId  := io.hartId
304  ctrlBlock.io.hartId := io.hartId
305  exuBlocks.foreach(_.io.hartId := io.hartId)
306  memBlock.io.hartId := io.hartId
307  outer.wbArbiter.module.io.hartId := io.hartId
308  frontend.io.reset_vector := io.reset_vector
309
310  io.cpu_halt := ctrlBlock.io.cpu_halt
311
312  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
313  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.writeback
314  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
315  outer.wbArbiter.module.io.in <> allWriteback
316  val rfWriteback = outer.wbArbiter.module.io.out
317
318  // memblock error exception writeback, 1 cycle after normal writeback
319  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
320
321  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
322  outer.wb2Ctrl.generateWritebackIO()
323
324  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
325  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
326
327  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
328  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
329  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
330  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
331
332  frontend.io.backend <> ctrlBlock.io.frontend
333  frontend.io.sfence <> fenceio.sfence
334  frontend.io.tlbCsr <> csrioIn.tlb
335  frontend.io.csrCtrl <> csrioIn.customCtrl
336  frontend.io.fencei := fenceio.fencei
337
338  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
339  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
340  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
341  ctrlBlock.io.stIn <> memBlock.io.stIn
342  ctrlBlock.io.memoryViolation <> memBlock.io.memoryViolation
343  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.enqLsq
344  exuBlocks.foreach(b => {
345    b.io.scheExtra.lcommit := ctrlBlock.io.robio.lsq.lcommit
346    b.io.scheExtra.scommit := memBlock.io.sqDeq
347    b.io.scheExtra.lqCancelCnt := memBlock.io.lqCancelCnt
348    b.io.scheExtra.sqCancelCnt := memBlock.io.sqCancelCnt
349  })
350  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
351  outer.ctrlBlock.generateWritebackIO()
352
353  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.otherFastWakeup
354  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
355  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
356  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
357  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
358  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
359  val allFastUop1 = intFastUop1 ++ fpFastUop1
360
361  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
362  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
363  ctrlBlock.io.enqLsq <> memBlock.io.enqLsq
364  ctrlBlock.io.sqDeq := memBlock.io.sqDeq
365  ctrlBlock.io.lqCancelCnt := memBlock.io.lqCancelCnt
366  ctrlBlock.io.sqCancelCnt := memBlock.io.sqCancelCnt
367
368  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
369  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
370
371  memBlock.io.issue <> exuBlocks(0).io.issue.get
372  // By default, instructions do not have exceptions when they enter the function units.
373  memBlock.io.issue.map(_.bits.uop.clearExceptions())
374  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.loadFastMatch
375  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.loadFastImm
376
377  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
378  exuBlocks.map(_.io).foreach { exu =>
379    exu.redirect <> ctrlBlock.io.redirect
380    exu.allocPregs <> ctrlBlock.io.allocPregs
381    exu.rfWritebackInt <> rfWriteback.take(NRIntWritePorts)
382    exu.rfWritebackFp <> rfWriteback.drop(NRIntWritePorts)
383    exu.fastUopIn <> allFastUop1
384    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
385    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
386    exu.scheExtra.stIssuePtr <> memBlock.io.stIssuePtr
387    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
388    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
389    exu.scheExtra.debug_vec_rat <> ctrlBlock.io.debug_vec_rat
390    exu.scheExtra.lqFull := memBlock.io.lqFull
391    exu.scheExtra.sqFull := memBlock.io.sqFull
392    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.stIn).foreach{case (sink, src) => {
393      sink.bits := src.bits
394      sink.valid := src.valid
395    }}
396    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
397      sink.valid := src.valid
398      sink.bits := src.bits
399    }}
400  }
401  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
402  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
403
404  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
405  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
406  if (!coreParams.softPTW) {
407    memBlock.io.perfEventsPTW := ptw.getPerf
408  } else {
409    memBlock.io.perfEventsPTW := DontCare
410  }
411  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
412
413  csrioIn.hartId <> io.hartId
414  csrioIn.perf <> DontCare
415  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
416  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
417  csrioIn.perf.memInfo <> memBlock.io.memInfo
418  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
419
420  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
421  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
422  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
423  csrioIn.perf.perfEventsHc       <> io.perfEvents
424
425  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
426  csrioIn.fpu.isIllegal := false.B
427  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
428  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
429  csrioIn.vpu <> DontCare
430  csrioIn.exception <> ctrlBlock.io.robio.exception
431  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
432  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
433  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
434  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
435  csrioIn.memExceptionVAddr <> memBlock.io.lsqio.exceptionAddr.vaddr
436
437  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
438  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
439  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
440  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
441  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
442
443  csrioIn.distributedUpdate(0).w.valid := memBlock.io.csrUpdate.w.valid
444  csrioIn.distributedUpdate(0).w.bits := memBlock.io.csrUpdate.w.bits
445  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
446  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
447
448  fenceio.sfence <> memBlock.io.sfence
449  fenceio.sbuffer <> memBlock.io.fenceToSbuffer
450
451  memBlock.io.redirect <> ctrlBlock.io.redirect
452  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
453  memBlock.io.csrCtrl <> csrioIn.customCtrl
454  memBlock.io.tlbCsr <> csrioIn.tlb
455  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
456  memBlock.io.lsqio.exceptionAddr.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
457
458  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay,frontend.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.ifilterSize)
459  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, ptw.io.tlb(0), fenceio.sfence, csrioIn.tlb)
460  val dtlbRepeater1  = PTWFilter(ldtlbParams.fenceDelay, memBlock.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.dfilterSize)
461  val dtlbRepeater2  = PTWRepeaterNB(passReady = false, ldtlbParams.fenceDelay, dtlbRepeater1.io.ptw, ptw.io.tlb(1), fenceio.sfence, csrioIn.tlb)
462  ptw.io.sfence <> fenceio.sfence
463  ptw.io.csr.tlb <> csrioIn.tlb
464  ptw.io.csr.distribute_csr <> csrioIn.customCtrl.distribute_csr
465
466  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
467  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
468
469  // Modules are reset one by one
470  val resetTree = ResetGenNode(
471    Seq(
472      ModuleNode(memBlock), ModuleNode(dtlbRepeater1),
473      ResetGenNode(Seq(
474        ModuleNode(itlbRepeater2),
475        ModuleNode(ptw),
476        ModuleNode(dtlbRepeater2),
477        ModuleNode(ptw_to_l2_buffer),
478      )),
479      ResetGenNode(Seq(
480        ModuleNode(exuBlocks.head),
481        ResetGenNode(
482          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
483        ),
484        ResetGenNode(Seq(
485          ModuleNode(ctrlBlock),
486          ResetGenNode(Seq(
487            ModuleNode(frontend), ModuleNode(itlbRepeater1)
488          ))
489        ))
490      ))
491    )
492  )
493
494  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
495
496}
497