xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision b9e121dff513e733e443a16e49648e82b9583af6)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import freechips.rocketchip.tilelink.TLBuffer
27import system.HasSoCParameter
28import utils._
29import utility._
30import xiangshan.backend._
31import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
32import xiangshan.cache.mmu._
33import xiangshan.frontend._
34import xiangshan.mem.L1PrefetchFuzzer
35
36import scala.collection.mutable.ListBuffer
37
38abstract class XSModule(implicit val p: Parameters) extends Module
39  with HasXSParameter
40  with HasFPUParameters
41
42//remove this trait after impl module logic
43trait NeedImpl {
44  this: RawModule =>
45  override protected def IO[T <: Data](iodef: T): T = {
46    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
47    val io = chisel3.experimental.IO(iodef)
48    io <> DontCare
49    io
50  }
51}
52
53class WritebackSourceParams(
54  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
55 ) {
56  def length: Int = exuConfigs.length
57  def ++(that: WritebackSourceParams): WritebackSourceParams = {
58    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
59  }
60}
61
62trait HasWritebackSource {
63  val writebackSourceParams: Seq[WritebackSourceParams]
64  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
65    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
66    val source = sourceMod.writebackSource.get
67    require(source.length == writebackSourceParams.length, "length mismatch between sources")
68    for ((s, p) <- source.zip(writebackSourceParams)) {
69      require(s.length == p.length, "params do not match with the exuOutput")
70    }
71    source
72  }
73  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
74    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
75    val source = sourceMod.writebackSource1.get
76    require(source.length == writebackSourceParams.length, "length mismatch between sources")
77    for ((s, p) <- source.zip(writebackSourceParams)) {
78      require(s.length == p.length, "params do not match with the exuOutput")
79    }
80    source
81  }
82  val writebackSourceImp: HasWritebackSourceImp
83}
84
85trait HasWritebackSourceImp {
86  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
87  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
88}
89
90trait HasWritebackSink {
91  // Caches all sources. The selected source will be the one with smallest length.
92  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
93  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
94    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
95    writebackSinks += ((source, realIndex))
96    this
97  }
98
99  def writebackSinksParams: Seq[WritebackSourceParams] = {
100    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
101  }
102  final def writebackSinksMod(
103     thisMod: Option[HasWritebackSource] = None,
104     thisModImp: Option[HasWritebackSourceImp] = None
105   ): Seq[Seq[HasWritebackSourceImp]] = {
106    require(thisMod.isDefined == thisModImp.isDefined)
107    writebackSinks.map(_._1.map(source =>
108      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
109    )
110  }
111  final def writebackSinksImp(
112    thisMod: Option[HasWritebackSource] = None,
113    thisModImp: Option[HasWritebackSourceImp] = None
114  ): Seq[Seq[ValidIO[ExuOutput]]] = {
115    val sourceMod = writebackSinksMod(thisMod, thisModImp)
116    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
117      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
118    }
119  }
120  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
121    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
122  }
123  def generateWritebackIO(
124    thisMod: Option[HasWritebackSource] = None,
125    thisModImp: Option[HasWritebackSourceImp] = None
126   ): Unit
127}
128
129abstract class XSBundle(implicit val p: Parameters) extends Bundle
130  with HasXSParameter
131
132abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
133  with HasXSParameter with HasExuWbHelper
134{
135  // interrupt sinks
136  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
137  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
138  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
139  // outer facing nodes
140  val frontend = LazyModule(new Frontend())
141  val ptw = LazyModule(new L2TLBWrapper())
142  val ptw_to_l2_buffer = if (!coreParams.softPTW) LazyModule(new TLBuffer) else null
143  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
144
145  if (!coreParams.softPTW) {
146    ptw_to_l2_buffer.node := ptw.node
147  }
148
149  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
150  val intWbPorts = wbArbiter.intWbPorts
151  val fpWbPorts = wbArbiter.fpWbPorts
152
153  // TODO: better RS organization
154  // generate rs according to number of function units
155  require(exuParameters.JmpCnt == 1)
156  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
157  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
158  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
159
160  // one RS every 2 MDUs
161  val schedulePorts = Seq(
162    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
163    Seq(
164      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
165      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
166      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
167      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
168      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
169      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
170    ),
171    Seq(
172      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
173      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
174    )
175  )
176
177  // should do outer fast wakeup ports here
178  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
179    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
180    val outerPorts = sche.map(cfg => {
181      // exe units from this scheduler need fastUops from exeunits
182      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
183      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
184      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
185      // exe units from other schedulers need fastUop from outside
186      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
187      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
188      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
189      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
190      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
191    })
192    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
193    outerPorts
194  }
195
196  // allow mdu and fmisc to have 2*numDeq enqueue ports
197  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
198    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
199    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
200    else Seq((0, i))
201  })
202  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
203                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
204                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
205  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
206    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
207    else Seq((0, i))
208  })
209
210  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
211
212  val outIntRfReadPorts = Seq(0, 0)
213  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
214  val hasIntRf = Seq(true, false)
215  val hasFpRf = Seq(false, true)
216  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
217    case (((sche, disp), other), i) =>
218      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
219  }
220
221  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
222    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
223      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
224    )
225  })))
226
227  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
228  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
229  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
230  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
231  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
232  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
233}
234
235class XSCore()(implicit p: config.Parameters) extends XSCoreBase
236  with HasXSDts
237{
238  lazy val module = new XSCoreImp(this)
239}
240
241class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
242  with HasXSParameter
243  with HasSoCParameter {
244  val io = IO(new Bundle {
245    val hartId = Input(UInt(64.W))
246    val reset_vector = Input(UInt(PAddrBits.W))
247    val cpu_halt = Output(Bool())
248    val l2_pf_enable = Output(Bool())
249    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
250    val beu_errors = Output(new XSL1BusErrors())
251    val l2Hint = Input(Valid(new L2ToL1Hint()))
252  })
253
254  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
255
256  val frontend = outer.frontend.module
257  val ctrlBlock = outer.ctrlBlock.module
258  val wb2Ctrl = outer.wb2Ctrl.module
259  val memBlock = outer.memBlock.module
260  val ptw = outer.ptw.module
261  val ptw_to_l2_buffer = if (!coreParams.softPTW) outer.ptw_to_l2_buffer.module else null
262  val exuBlocks = outer.exuBlocks.map(_.module)
263
264  frontend.io.hartId  := io.hartId
265  ctrlBlock.io.hartId := io.hartId
266  exuBlocks.foreach(_.io.hartId := io.hartId)
267  memBlock.io.hartId := io.hartId
268  outer.wbArbiter.module.io.hartId := io.hartId
269  frontend.io.reset_vector := io.reset_vector
270
271  io.cpu_halt := ctrlBlock.io.cpu_halt
272
273  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
274  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.writeback
275  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
276  outer.wbArbiter.module.io.in <> allWriteback
277  val rfWriteback = outer.wbArbiter.module.io.out
278
279  // memblock error exception writeback, 1 cycle after normal writeback
280  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
281
282  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
283  outer.wb2Ctrl.generateWritebackIO()
284
285  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
286  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
287
288  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
289  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
290  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
291  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
292
293  frontend.io.backend <> ctrlBlock.io.frontend
294  frontend.io.sfence <> fenceio.sfence
295  frontend.io.tlbCsr <> csrioIn.tlb
296  frontend.io.csrCtrl <> csrioIn.customCtrl
297  frontend.io.fencei := fenceio.fencei
298
299  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
300  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
301  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
302  ctrlBlock.io.stIn <> memBlock.io.stIn
303  ctrlBlock.io.memoryViolation <> memBlock.io.memoryViolation
304  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.enqLsq
305  exuBlocks.foreach(b => {
306    b.io.scheExtra.lcommit := memBlock.io.lqDeq
307    b.io.scheExtra.scommit := memBlock.io.sqDeq
308    b.io.scheExtra.lqCancelCnt := memBlock.io.lqCancelCnt
309    b.io.scheExtra.sqCancelCnt := memBlock.io.sqCancelCnt
310  })
311  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
312  outer.ctrlBlock.generateWritebackIO()
313
314  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.otherFastWakeup
315  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
316  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
317  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
318  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
319  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
320  val allFastUop1 = intFastUop1 ++ fpFastUop1
321
322  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
323  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
324  ctrlBlock.io.enqLsq <> memBlock.io.enqLsq
325  ctrlBlock.io.lqDeq := memBlock.io.lqDeq
326  ctrlBlock.io.sqDeq := memBlock.io.sqDeq
327  ctrlBlock.io.lqCancelCnt := memBlock.io.lqCancelCnt
328  ctrlBlock.io.sqCancelCnt := memBlock.io.sqCancelCnt
329
330  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
331  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
332
333  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){
334    // read load pc at load s0
335    c.ptr := e.bits.uop.cf.ftqPtr
336    c.offset := e.bits.uop.cf.ftqOffset
337  }
338  // return load pc at load s2
339  memBlock.io.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
340  memBlock.io.issue <> exuBlocks(0).io.issue.get
341  // By default, instructions do not have exceptions when they enter the function units.
342  memBlock.io.issue.map(_.bits.uop.clearExceptions())
343  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.loadFastMatch
344  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.loadFastImm
345
346  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
347  exuBlocks.map(_.io).foreach { exu =>
348    exu.redirect <> ctrlBlock.io.redirect
349    exu.allocPregs <> ctrlBlock.io.allocPregs
350    exu.rfWriteback <> rfWriteback
351    exu.fastUopIn <> allFastUop1
352    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
353    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
354    exu.scheExtra.stIssuePtr <> memBlock.io.stIssuePtr
355    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
356    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
357    exu.scheExtra.lqFull := memBlock.io.lqFull
358    exu.scheExtra.sqFull := memBlock.io.sqFull
359    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.stIn).foreach{case (sink, src) => {
360      sink.bits := src.bits
361      sink.valid := src.valid
362    }}
363    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
364      sink.valid := src.valid
365      sink.bits := src.bits
366    }}
367  }
368  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
369  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
370
371  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
372  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
373  if (!coreParams.softPTW) {
374    memBlock.io.perfEventsPTW := ptw.getPerf
375  } else {
376    memBlock.io.perfEventsPTW := DontCare
377  }
378  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
379
380  csrioIn.hartId <> io.hartId
381  csrioIn.perf <> DontCare
382  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
383  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
384  csrioIn.perf.memInfo <> memBlock.io.memInfo
385  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
386
387  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
388  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
389  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
390  csrioIn.perf.perfEventsHc       <> io.perfEvents
391
392  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
393  csrioIn.fpu.isIllegal := false.B
394  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
395  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
396  csrioIn.exception <> ctrlBlock.io.robio.exception
397  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
398  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
399  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
400  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
401  csrioIn.memExceptionVAddr <> memBlock.io.lsqio.exceptionAddr.vaddr
402
403  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
404  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
405  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
406  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
407  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
408
409  csrioIn.distributedUpdate(0).w.valid := memBlock.io.csrUpdate.w.valid
410  csrioIn.distributedUpdate(0).w.bits := memBlock.io.csrUpdate.w.bits
411  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
412  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
413
414  fenceio.sfence <> memBlock.io.sfence
415  fenceio.sbuffer <> memBlock.io.fenceToSbuffer
416
417  memBlock.io.redirect <> ctrlBlock.io.redirect
418  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
419  memBlock.io.csrCtrl <> csrioIn.customCtrl
420  memBlock.io.tlbCsr <> csrioIn.tlb
421  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
422  memBlock.io.lsqio.exceptionAddr.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
423  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
424  memBlock.io.l2Hint.valid := io.l2Hint.valid
425  memBlock.io.l2Hint.bits.sourceId := io.l2Hint.bits.sourceId
426
427  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay,frontend.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.ifilterSize)
428  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, ptw.io.tlb(0), fenceio.sfence, csrioIn.tlb)
429  val dtlbRepeater1  = PTWFilter(ldtlbParams.fenceDelay, memBlock.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.dfilterSize)
430  val dtlbRepeater2  = PTWRepeaterNB(passReady = false, ldtlbParams.fenceDelay, dtlbRepeater1.io.ptw, ptw.io.tlb(1), fenceio.sfence, csrioIn.tlb)
431  ptw.io.sfence <> fenceio.sfence
432  ptw.io.csr.tlb <> csrioIn.tlb
433  ptw.io.csr.distribute_csr <> csrioIn.customCtrl.distribute_csr
434
435  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
436  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
437
438  // Modules are reset one by one
439  val resetTree = ResetGenNode(
440    Seq(
441      ModuleNode(memBlock), ModuleNode(dtlbRepeater1),
442      ResetGenNode(Seq(
443        ModuleNode(itlbRepeater2),
444        ModuleNode(ptw),
445        ModuleNode(dtlbRepeater2),
446        ModuleNode(ptw_to_l2_buffer),
447      )),
448      ResetGenNode(Seq(
449        ModuleNode(exuBlocks.head),
450        ResetGenNode(
451          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
452        ),
453        ResetGenNode(Seq(
454          ModuleNode(ctrlBlock),
455          ResetGenNode(Seq(
456            ModuleNode(frontend), ModuleNode(itlbRepeater1)
457          ))
458        ))
459      ))
460    )
461  )
462
463  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
464
465}
466