xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision dcd58560d0a04f86ddbf73a98bd16c41d4a8e205)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import freechips.rocketchip.tilelink.TLBuffer
27import system.HasSoCParameter
28import utils._
29import utility._
30import xiangshan.backend._
31import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
32import xiangshan.cache.mmu._
33import xiangshan.frontend._
34import xiangshan.mem.L1PrefetchFuzzer
35
36import scala.collection.mutable.ListBuffer
37
38abstract class XSModule(implicit val p: Parameters) extends Module
39  with HasXSParameter
40  with HasFPUParameters
41
42//remove this trait after impl module logic
43trait NeedImpl {
44  this: RawModule =>
45  override protected def IO[T <: Data](iodef: T): T = {
46    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
47    val io = chisel3.experimental.IO(iodef)
48    io <> DontCare
49    io
50  }
51}
52
53class WritebackSourceParams(
54  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
55 ) {
56  def length: Int = exuConfigs.length
57  def ++(that: WritebackSourceParams): WritebackSourceParams = {
58    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
59  }
60}
61
62trait HasWritebackSource {
63  val writebackSourceParams: Seq[WritebackSourceParams]
64  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
65    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
66    val source = sourceMod.writebackSource.get
67    require(source.length == writebackSourceParams.length, "length mismatch between sources")
68    for ((s, p) <- source.zip(writebackSourceParams)) {
69      require(s.length == p.length, "params do not match with the exuOutput")
70    }
71    source
72  }
73  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
74    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
75    val source = sourceMod.writebackSource1.get
76    require(source.length == writebackSourceParams.length, "length mismatch between sources")
77    for ((s, p) <- source.zip(writebackSourceParams)) {
78      require(s.length == p.length, "params do not match with the exuOutput")
79    }
80    source
81  }
82  val writebackSourceImp: HasWritebackSourceImp
83}
84
85trait HasWritebackSourceImp {
86  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
87  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
88}
89
90trait HasWritebackSink {
91  // Caches all sources. The selected source will be the one with smallest length.
92  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
93  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
94    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
95    writebackSinks += ((source, realIndex))
96    this
97  }
98
99  def writebackSinksParams: Seq[WritebackSourceParams] = {
100    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
101  }
102  final def writebackSinksMod(
103     thisMod: Option[HasWritebackSource] = None,
104     thisModImp: Option[HasWritebackSourceImp] = None
105   ): Seq[Seq[HasWritebackSourceImp]] = {
106    require(thisMod.isDefined == thisModImp.isDefined)
107    writebackSinks.map(_._1.map(source =>
108      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
109    )
110  }
111  final def writebackSinksImp(
112    thisMod: Option[HasWritebackSource] = None,
113    thisModImp: Option[HasWritebackSourceImp] = None
114  ): Seq[Seq[ValidIO[ExuOutput]]] = {
115    val sourceMod = writebackSinksMod(thisMod, thisModImp)
116    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
117      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
118    }
119  }
120  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
121    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
122  }
123  def generateWritebackIO(
124    thisMod: Option[HasWritebackSource] = None,
125    thisModImp: Option[HasWritebackSourceImp] = None
126   ): Unit
127}
128
129abstract class XSBundle(implicit val p: Parameters) extends Bundle
130  with HasXSParameter
131
132abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
133  with HasXSParameter with HasExuWbHelper
134{
135  // interrupt sinks
136  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
137  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
138  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
139  // outer facing nodes
140  val frontend = LazyModule(new Frontend())
141  val ptw = LazyModule(new L2TLBWrapper())
142  val ptw_to_l2_buffer = if (!coreParams.softPTW) LazyModule(new TLBuffer) else null
143  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
144
145  if (!coreParams.softPTW) {
146    ptw_to_l2_buffer.node := ptw.node
147  }
148
149  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
150  val intWbPorts = wbArbiter.intWbPorts
151  val fpWbPorts = wbArbiter.fpWbPorts
152
153  // TODO: better RS organization
154  // generate rs according to number of function units
155  require(exuParameters.JmpCnt == 1)
156  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
157  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
158  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
159
160  // one RS every 2 MDUs
161  val schedulePorts = Seq(
162    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
163    Seq(
164      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
165      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
166      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
167      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
168      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
169      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
170    ),
171    Seq(
172      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
173      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
174    )
175  )
176
177  // should do outer fast wakeup ports here
178  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
179    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
180    val outerPorts = sche.map(cfg => {
181      // exe units from this scheduler need fastUops from exeunits
182      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
183      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
184      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
185      // exe units from other schedulers need fastUop from outside
186      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
187      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
188      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
189      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
190      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
191    })
192    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
193    outerPorts
194  }
195
196  // allow mdu and fmisc to have 2*numDeq enqueue ports
197  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
198    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
199    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
200    else Seq((0, i))
201  })
202  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
203                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
204                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
205  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
206    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
207    else Seq((0, i))
208  })
209
210  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
211
212  val outIntRfReadPorts = Seq(0, 0)
213  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
214  val hasIntRf = Seq(true, false)
215  val hasFpRf = Seq(false, true)
216  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
217    case (((sche, disp), other), i) =>
218      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
219  }
220
221  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
222    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
223      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
224    )
225  })))
226
227  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
228  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
229  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
230  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
231  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
232  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
233}
234
235class XSCore()(implicit p: config.Parameters) extends XSCoreBase
236  with HasXSDts
237{
238  lazy val module = new XSCoreImp(this)
239}
240
241class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
242  with HasXSParameter
243  with HasSoCParameter {
244  val io = IO(new Bundle {
245    val hartId = Input(UInt(64.W))
246    val reset_vector = Input(UInt(PAddrBits.W))
247    val cpu_halt = Output(Bool())
248    val l2_pf_enable = Output(Bool())
249    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
250    val beu_errors = Output(new XSL1BusErrors())
251  })
252
253  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
254
255  val frontend = outer.frontend.module
256  val ctrlBlock = outer.ctrlBlock.module
257  val wb2Ctrl = outer.wb2Ctrl.module
258  val memBlock = outer.memBlock.module
259  val ptw = outer.ptw.module
260  val ptw_to_l2_buffer = if (!coreParams.softPTW) outer.ptw_to_l2_buffer.module else null
261  val exuBlocks = outer.exuBlocks.map(_.module)
262
263  frontend.io.hartId  := io.hartId
264  ctrlBlock.io.hartId := io.hartId
265  exuBlocks.foreach(_.io.hartId := io.hartId)
266  memBlock.io.hartId := io.hartId
267  outer.wbArbiter.module.io.hartId := io.hartId
268  frontend.io.reset_vector := io.reset_vector
269
270  io.cpu_halt := ctrlBlock.io.cpu_halt
271
272  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
273  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.writeback
274  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
275  outer.wbArbiter.module.io.in <> allWriteback
276  val rfWriteback = outer.wbArbiter.module.io.out
277
278  // memblock error exception writeback, 1 cycle after normal writeback
279  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
280
281  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
282  outer.wb2Ctrl.generateWritebackIO()
283
284  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
285  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
286
287  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
288  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
289  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
290  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
291
292  frontend.io.backend <> ctrlBlock.io.frontend
293  frontend.io.sfence <> fenceio.sfence
294  frontend.io.tlbCsr <> csrioIn.tlb
295  frontend.io.csrCtrl <> csrioIn.customCtrl
296  frontend.io.fencei := fenceio.fencei
297
298  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
299  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
300  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
301  ctrlBlock.io.stIn <> memBlock.io.stIn
302  ctrlBlock.io.memoryViolation <> memBlock.io.memoryViolation
303  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.enqLsq
304  exuBlocks.foreach(b => {
305    b.io.scheExtra.lcommit := ctrlBlock.io.robio.lsq.lcommit
306    b.io.scheExtra.scommit := memBlock.io.sqDeq
307    b.io.scheExtra.lqCancelCnt := memBlock.io.lqCancelCnt
308    b.io.scheExtra.sqCancelCnt := memBlock.io.sqCancelCnt
309  })
310  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
311  outer.ctrlBlock.generateWritebackIO()
312
313  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.otherFastWakeup
314  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
315  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
316  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
317  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
318  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
319  val allFastUop1 = intFastUop1 ++ fpFastUop1
320
321  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
322  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
323  ctrlBlock.io.enqLsq <> memBlock.io.enqLsq
324  ctrlBlock.io.sqDeq := memBlock.io.sqDeq
325  ctrlBlock.io.lqCancelCnt := memBlock.io.lqCancelCnt
326  ctrlBlock.io.sqCancelCnt := memBlock.io.sqCancelCnt
327
328  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
329  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
330
331  memBlock.io.issue <> exuBlocks(0).io.issue.get
332  // By default, instructions do not have exceptions when they enter the function units.
333  memBlock.io.issue.map(_.bits.uop.clearExceptions())
334  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.loadFastMatch
335  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.loadFastImm
336
337  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
338  exuBlocks.map(_.io).foreach { exu =>
339    exu.redirect <> ctrlBlock.io.redirect
340    exu.allocPregs <> ctrlBlock.io.allocPregs
341    exu.rfWriteback <> rfWriteback
342    exu.fastUopIn <> allFastUop1
343    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
344    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
345    exu.scheExtra.stIssuePtr <> memBlock.io.stIssuePtr
346    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
347    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
348    exu.scheExtra.lqFull := memBlock.io.lqFull
349    exu.scheExtra.sqFull := memBlock.io.sqFull
350    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.stIn).foreach{case (sink, src) => {
351      sink.bits := src.bits
352      sink.valid := src.valid
353    }}
354    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
355      sink.valid := src.valid
356      sink.bits := src.bits
357    }}
358  }
359  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
360  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
361
362  // l1 prefetch fuzzer, for debug only
363  val debug_l1PrefetchFuzzer = Module(new L1PrefetchFuzzer)
364  debug_l1PrefetchFuzzer.io.req <> memBlock.io.prefetch_req
365  debug_l1PrefetchFuzzer.io.vaddr := memBlock.io.writeback(0).bits.debug.vaddr
366  debug_l1PrefetchFuzzer.io.paddr := memBlock.io.writeback(0).bits.debug.paddr
367
368  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
369  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
370  if (!coreParams.softPTW) {
371    memBlock.io.perfEventsPTW := ptw.getPerf
372  } else {
373    memBlock.io.perfEventsPTW := DontCare
374  }
375  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
376
377  csrioIn.hartId <> io.hartId
378  csrioIn.perf <> DontCare
379  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
380  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
381  csrioIn.perf.memInfo <> memBlock.io.memInfo
382  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
383
384  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
385  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
386  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
387  csrioIn.perf.perfEventsHc       <> io.perfEvents
388
389  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
390  csrioIn.fpu.isIllegal := false.B
391  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
392  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
393  csrioIn.exception <> ctrlBlock.io.robio.exception
394  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
395  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
396  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
397  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
398  csrioIn.memExceptionVAddr <> memBlock.io.lsqio.exceptionAddr.vaddr
399
400  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
401  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
402  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
403  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
404  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
405
406  csrioIn.distributedUpdate(0).w.valid := memBlock.io.csrUpdate.w.valid
407  csrioIn.distributedUpdate(0).w.bits := memBlock.io.csrUpdate.w.bits
408  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
409  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
410
411  fenceio.sfence <> memBlock.io.sfence
412  fenceio.sbuffer <> memBlock.io.fenceToSbuffer
413
414  memBlock.io.redirect <> ctrlBlock.io.redirect
415  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
416  memBlock.io.csrCtrl <> csrioIn.customCtrl
417  memBlock.io.tlbCsr <> csrioIn.tlb
418  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
419  memBlock.io.lsqio.exceptionAddr.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
420
421  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay,frontend.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.ifilterSize)
422  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, ptw.io.tlb(0), fenceio.sfence, csrioIn.tlb)
423  val dtlbRepeater1  = PTWFilter(ldtlbParams.fenceDelay, memBlock.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.dfilterSize)
424  val dtlbRepeater2  = PTWRepeaterNB(passReady = false, ldtlbParams.fenceDelay, dtlbRepeater1.io.ptw, ptw.io.tlb(1), fenceio.sfence, csrioIn.tlb)
425  ptw.io.sfence <> fenceio.sfence
426  ptw.io.csr.tlb <> csrioIn.tlb
427  ptw.io.csr.distribute_csr <> csrioIn.customCtrl.distribute_csr
428
429  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
430  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
431
432  // Modules are reset one by one
433  val resetTree = ResetGenNode(
434    Seq(
435      ModuleNode(memBlock), ModuleNode(dtlbRepeater1),
436      ResetGenNode(Seq(
437        ModuleNode(itlbRepeater2),
438        ModuleNode(ptw),
439        ModuleNode(dtlbRepeater2),
440        ModuleNode(ptw_to_l2_buffer),
441      )),
442      ResetGenNode(Seq(
443        ModuleNode(exuBlocks.head),
444        ResetGenNode(
445          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
446        ),
447        ResetGenNode(Seq(
448          ModuleNode(ctrlBlock),
449          ResetGenNode(Seq(
450            ModuleNode(frontend), ModuleNode(itlbRepeater1)
451          ))
452        ))
453      ))
454    )
455  )
456
457  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
458
459}
460