xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision e4f69d78f24895ac36a5a6c704cec53e4af72485)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import freechips.rocketchip.tilelink.TLBuffer
27import system.HasSoCParameter
28import utils._
29import utility._
30import xiangshan.backend._
31import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
32import xiangshan.cache.mmu._
33import xiangshan.frontend._
34import xiangshan.mem.L1PrefetchFuzzer
35
36import scala.collection.mutable.ListBuffer
37
38abstract class XSModule(implicit val p: Parameters) extends Module
39  with HasXSParameter
40  with HasFPUParameters
41
42//remove this trait after impl module logic
43trait NeedImpl {
44  this: RawModule =>
45  override protected def IO[T <: Data](iodef: T): T = {
46    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
47    val io = chisel3.experimental.IO(iodef)
48    io <> DontCare
49    io
50  }
51}
52
53class WritebackSourceParams(
54  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
55 ) {
56  def length: Int = exuConfigs.length
57  def ++(that: WritebackSourceParams): WritebackSourceParams = {
58    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
59  }
60}
61
62trait HasWritebackSource {
63  val writebackSourceParams: Seq[WritebackSourceParams]
64  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
65    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
66    val source = sourceMod.writebackSource.get
67    require(source.length == writebackSourceParams.length, "length mismatch between sources")
68    for ((s, p) <- source.zip(writebackSourceParams)) {
69      require(s.length == p.length, "params do not match with the exuOutput")
70    }
71    source
72  }
73  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
74    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
75    val source = sourceMod.writebackSource1.get
76    require(source.length == writebackSourceParams.length, "length mismatch between sources")
77    for ((s, p) <- source.zip(writebackSourceParams)) {
78      require(s.length == p.length, "params do not match with the exuOutput")
79    }
80    source
81  }
82  val writebackSourceImp: HasWritebackSourceImp
83}
84
85trait HasWritebackSourceImp {
86  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
87  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
88}
89
90trait HasWritebackSink {
91  // Caches all sources. The selected source will be the one with smallest length.
92  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
93  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
94    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
95    writebackSinks += ((source, realIndex))
96    this
97  }
98
99  def writebackSinksParams: Seq[WritebackSourceParams] = {
100    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
101  }
102  final def writebackSinksMod(
103     thisMod: Option[HasWritebackSource] = None,
104     thisModImp: Option[HasWritebackSourceImp] = None
105   ): Seq[Seq[HasWritebackSourceImp]] = {
106    require(thisMod.isDefined == thisModImp.isDefined)
107    writebackSinks.map(_._1.map(source =>
108      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
109    )
110  }
111  final def writebackSinksImp(
112    thisMod: Option[HasWritebackSource] = None,
113    thisModImp: Option[HasWritebackSourceImp] = None
114  ): Seq[Seq[ValidIO[ExuOutput]]] = {
115    val sourceMod = writebackSinksMod(thisMod, thisModImp)
116    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
117      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
118    }
119  }
120  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
121    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
122  }
123  def generateWritebackIO(
124    thisMod: Option[HasWritebackSource] = None,
125    thisModImp: Option[HasWritebackSourceImp] = None
126   ): Unit
127}
128
129abstract class XSBundle(implicit val p: Parameters) extends Bundle
130  with HasXSParameter
131
132abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
133  with HasXSParameter with HasExuWbHelper
134{
135  // interrupt sinks
136  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
137  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
138  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
139  // outer facing nodes
140  val frontend = LazyModule(new Frontend())
141  val ptw = LazyModule(new L2TLBWrapper())
142  val ptw_to_l2_buffer = if (!coreParams.softPTW) LazyModule(new TLBuffer) else null
143  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
144
145  if (!coreParams.softPTW) {
146    ptw_to_l2_buffer.node := ptw.node
147  }
148
149  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
150  val intWbPorts = wbArbiter.intWbPorts
151  val fpWbPorts = wbArbiter.fpWbPorts
152
153  // TODO: better RS organization
154  // generate rs according to number of function units
155  require(exuParameters.JmpCnt == 1)
156  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
157  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
158  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
159
160  // one RS every 2 MDUs
161  val schedulePorts = Seq(
162    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
163    Seq(
164      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
165      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
166      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
167      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
168      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
169      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
170    ),
171    Seq(
172      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
173      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
174    )
175  )
176
177  // should do outer fast wakeup ports here
178  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
179    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
180    val outerPorts = sche.map(cfg => {
181      // exe units from this scheduler need fastUops from exeunits
182      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
183      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
184      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
185      // exe units from other schedulers need fastUop from outside
186      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
187      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
188      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
189      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
190      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
191    })
192    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
193    outerPorts
194  }
195
196  // allow mdu and fmisc to have 2*numDeq enqueue ports
197  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
198    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
199    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
200    else Seq((0, i))
201  })
202  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
203                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
204                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
205  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
206    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
207    else Seq((0, i))
208  })
209
210  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
211
212  val outIntRfReadPorts = Seq(0, 0)
213  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
214  val hasIntRf = Seq(true, false)
215  val hasFpRf = Seq(false, true)
216  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
217    case (((sche, disp), other), i) =>
218      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
219  }
220
221  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
222    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
223      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
224    )
225  })))
226
227  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
228  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
229  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
230  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
231  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
232  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
233}
234
235class XSCore()(implicit p: config.Parameters) extends XSCoreBase
236  with HasXSDts
237{
238  lazy val module = new XSCoreImp(this)
239}
240
241class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
242  with HasXSParameter
243  with HasSoCParameter {
244  val io = IO(new Bundle {
245    val hartId = Input(UInt(64.W))
246    val reset_vector = Input(UInt(PAddrBits.W))
247    val cpu_halt = Output(Bool())
248    val l2_pf_enable = Output(Bool())
249    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
250    val beu_errors = Output(new XSL1BusErrors())
251  })
252
253  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
254
255  val frontend = outer.frontend.module
256  val ctrlBlock = outer.ctrlBlock.module
257  val wb2Ctrl = outer.wb2Ctrl.module
258  val memBlock = outer.memBlock.module
259  val ptw = outer.ptw.module
260  val ptw_to_l2_buffer = if (!coreParams.softPTW) outer.ptw_to_l2_buffer.module else null
261  val exuBlocks = outer.exuBlocks.map(_.module)
262
263  frontend.io.hartId  := io.hartId
264  ctrlBlock.io.hartId := io.hartId
265  exuBlocks.foreach(_.io.hartId := io.hartId)
266  memBlock.io.hartId := io.hartId
267  outer.wbArbiter.module.io.hartId := io.hartId
268  frontend.io.reset_vector := io.reset_vector
269
270  io.cpu_halt := ctrlBlock.io.cpu_halt
271
272  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
273  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.writeback
274  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
275  outer.wbArbiter.module.io.in <> allWriteback
276  val rfWriteback = outer.wbArbiter.module.io.out
277
278  // memblock error exception writeback, 1 cycle after normal writeback
279  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
280
281  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
282  outer.wb2Ctrl.generateWritebackIO()
283
284  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
285  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
286
287  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
288  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
289  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
290  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
291
292  frontend.io.backend <> ctrlBlock.io.frontend
293  frontend.io.sfence <> fenceio.sfence
294  frontend.io.tlbCsr <> csrioIn.tlb
295  frontend.io.csrCtrl <> csrioIn.customCtrl
296  frontend.io.fencei := fenceio.fencei
297
298  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
299  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
300  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
301  ctrlBlock.io.stIn <> memBlock.io.stIn
302  ctrlBlock.io.memoryViolation <> memBlock.io.memoryViolation
303  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.enqLsq
304  exuBlocks.foreach(b => {
305    b.io.scheExtra.lcommit := memBlock.io.lqDeq
306    b.io.scheExtra.scommit := memBlock.io.sqDeq
307    b.io.scheExtra.lqCancelCnt := memBlock.io.lqCancelCnt
308    b.io.scheExtra.sqCancelCnt := memBlock.io.sqCancelCnt
309  })
310  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
311  outer.ctrlBlock.generateWritebackIO()
312
313  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.otherFastWakeup
314  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
315  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
316  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
317  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
318  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
319  val allFastUop1 = intFastUop1 ++ fpFastUop1
320
321  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
322  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
323  ctrlBlock.io.enqLsq <> memBlock.io.enqLsq
324  ctrlBlock.io.lqDeq := memBlock.io.lqDeq
325  ctrlBlock.io.sqDeq := memBlock.io.sqDeq
326  ctrlBlock.io.lqCancelCnt := memBlock.io.lqCancelCnt
327  ctrlBlock.io.sqCancelCnt := memBlock.io.sqCancelCnt
328
329  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
330  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
331
332  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){
333    // read load pc at load s0
334    c.ptr := e.bits.uop.cf.ftqPtr
335    c.offset := e.bits.uop.cf.ftqOffset
336  }
337  // return load pc at load s2
338  memBlock.io.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
339  memBlock.io.issue <> exuBlocks(0).io.issue.get
340  // By default, instructions do not have exceptions when they enter the function units.
341  memBlock.io.issue.map(_.bits.uop.clearExceptions())
342  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.loadFastMatch
343  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.loadFastImm
344
345  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
346  exuBlocks.map(_.io).foreach { exu =>
347    exu.redirect <> ctrlBlock.io.redirect
348    exu.allocPregs <> ctrlBlock.io.allocPregs
349    exu.rfWriteback <> rfWriteback
350    exu.fastUopIn <> allFastUop1
351    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
352    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
353    exu.scheExtra.stIssuePtr <> memBlock.io.stIssuePtr
354    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
355    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
356    exu.scheExtra.lqFull := memBlock.io.lqFull
357    exu.scheExtra.sqFull := memBlock.io.sqFull
358    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.stIn).foreach{case (sink, src) => {
359      sink.bits := src.bits
360      sink.valid := src.valid
361    }}
362    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
363      sink.valid := src.valid
364      sink.bits := src.bits
365    }}
366  }
367  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
368  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
369
370  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
371  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
372  if (!coreParams.softPTW) {
373    memBlock.io.perfEventsPTW := ptw.getPerf
374  } else {
375    memBlock.io.perfEventsPTW := DontCare
376  }
377  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
378
379  csrioIn.hartId <> io.hartId
380  csrioIn.perf <> DontCare
381  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
382  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
383  csrioIn.perf.memInfo <> memBlock.io.memInfo
384  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
385
386  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
387  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
388  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
389  csrioIn.perf.perfEventsHc       <> io.perfEvents
390
391  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
392  csrioIn.fpu.isIllegal := false.B
393  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
394  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
395  csrioIn.exception <> ctrlBlock.io.robio.exception
396  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
397  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
398  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
399  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
400  csrioIn.memExceptionVAddr <> memBlock.io.lsqio.exceptionAddr.vaddr
401
402  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
403  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
404  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
405  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
406  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
407
408  csrioIn.distributedUpdate(0).w.valid := memBlock.io.csrUpdate.w.valid
409  csrioIn.distributedUpdate(0).w.bits := memBlock.io.csrUpdate.w.bits
410  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
411  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
412
413  fenceio.sfence <> memBlock.io.sfence
414  fenceio.sbuffer <> memBlock.io.fenceToSbuffer
415
416  memBlock.io.redirect <> ctrlBlock.io.redirect
417  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
418  memBlock.io.csrCtrl <> csrioIn.customCtrl
419  memBlock.io.tlbCsr <> csrioIn.tlb
420  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
421  memBlock.io.lsqio.exceptionAddr.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
422  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
423
424  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay,frontend.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.ifilterSize)
425  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, ptw.io.tlb(0), fenceio.sfence, csrioIn.tlb)
426  val dtlbRepeater1  = PTWFilter(ldtlbParams.fenceDelay, memBlock.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.dfilterSize)
427  val dtlbRepeater2  = PTWRepeaterNB(passReady = false, ldtlbParams.fenceDelay, dtlbRepeater1.io.ptw, ptw.io.tlb(1), fenceio.sfence, csrioIn.tlb)
428  ptw.io.sfence <> fenceio.sfence
429  ptw.io.csr.tlb <> csrioIn.tlb
430  ptw.io.csr.distribute_csr <> csrioIn.customCtrl.distribute_csr
431
432  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
433  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
434
435  // Modules are reset one by one
436  val resetTree = ResetGenNode(
437    Seq(
438      ModuleNode(memBlock), ModuleNode(dtlbRepeater1),
439      ResetGenNode(Seq(
440        ModuleNode(itlbRepeater2),
441        ModuleNode(ptw),
442        ModuleNode(dtlbRepeater2),
443        ModuleNode(ptw_to_l2_buffer),
444      )),
445      ResetGenNode(Seq(
446        ModuleNode(exuBlocks.head),
447        ResetGenNode(
448          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
449        ),
450        ResetGenNode(Seq(
451          ModuleNode(ctrlBlock),
452          ResetGenNode(Seq(
453            ModuleNode(frontend), ModuleNode(itlbRepeater1)
454          ))
455        ))
456      ))
457    )
458  )
459
460  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
461
462}
463