xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision 839e5512b11a14daed2f8e8e654ac047f4cc50ae)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import system.HasSoCParameter
27import utils._
28import utility._
29import xiangshan.backend._
30import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
31import xiangshan.frontend._
32import xiangshan.mem.L1PrefetchFuzzer
33
34import scala.collection.mutable.ListBuffer
35
36abstract class XSModule(implicit val p: Parameters) extends Module
37  with HasXSParameter
38  with HasFPUParameters
39
40//remove this trait after impl module logic
41trait NeedImpl {
42  this: RawModule =>
43  override protected def IO[T <: Data](iodef: T): T = {
44    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
45    val io = chisel3.experimental.IO(iodef)
46    io <> DontCare
47    io
48  }
49}
50
51class WritebackSourceParams(
52  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
53 ) {
54  def length: Int = exuConfigs.length
55  def ++(that: WritebackSourceParams): WritebackSourceParams = {
56    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
57  }
58}
59
60trait HasWritebackSource {
61  val writebackSourceParams: Seq[WritebackSourceParams]
62  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
63    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
64    val source = sourceMod.writebackSource.get
65    require(source.length == writebackSourceParams.length, "length mismatch between sources")
66    for ((s, p) <- source.zip(writebackSourceParams)) {
67      require(s.length == p.length, "params do not match with the exuOutput")
68    }
69    source
70  }
71  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
72    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
73    val source = sourceMod.writebackSource1.get
74    require(source.length == writebackSourceParams.length, "length mismatch between sources")
75    for ((s, p) <- source.zip(writebackSourceParams)) {
76      require(s.length == p.length, "params do not match with the exuOutput")
77    }
78    source
79  }
80  val writebackSourceImp: HasWritebackSourceImp
81}
82
83trait HasWritebackSourceImp {
84  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
85  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
86}
87
88trait HasWritebackSink {
89  // Caches all sources. The selected source will be the one with smallest length.
90  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
91  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
92    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
93    writebackSinks += ((source, realIndex))
94    this
95  }
96
97  def writebackSinksParams: Seq[WritebackSourceParams] = {
98    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
99  }
100  final def writebackSinksMod(
101     thisMod: Option[HasWritebackSource] = None,
102     thisModImp: Option[HasWritebackSourceImp] = None
103   ): Seq[Seq[HasWritebackSourceImp]] = {
104    require(thisMod.isDefined == thisModImp.isDefined)
105    writebackSinks.map(_._1.map(source =>
106      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
107    )
108  }
109  final def writebackSinksImp(
110    thisMod: Option[HasWritebackSource] = None,
111    thisModImp: Option[HasWritebackSourceImp] = None
112  ): Seq[Seq[ValidIO[ExuOutput]]] = {
113    val sourceMod = writebackSinksMod(thisMod, thisModImp)
114    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
115      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
116    }
117  }
118  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
119    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
120  }
121  def generateWritebackIO(
122    thisMod: Option[HasWritebackSource] = None,
123    thisModImp: Option[HasWritebackSourceImp] = None
124   ): Unit
125}
126
127abstract class XSBundle(implicit val p: Parameters) extends Bundle
128  with HasXSParameter
129
130abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
131  with HasXSParameter with HasExuWbHelper
132{
133  // interrupt sinks
134  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
135  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
136  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
137  // outer facing nodes
138  val frontend = LazyModule(new Frontend())
139  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
140
141  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
142  val intWbPorts = wbArbiter.intWbPorts
143  val fpWbPorts = wbArbiter.fpWbPorts
144
145  // TODO: better RS organization
146  // generate rs according to number of function units
147  require(exuParameters.JmpCnt == 1)
148  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
149  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
150  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
151
152  // one RS every 2 MDUs
153  val schedulePorts = Seq(
154    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
155    Seq(
156      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
157      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
158      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
159      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
160      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
161      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
162    ),
163    Seq(
164      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
165      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
166    )
167  )
168
169  // should do outer fast wakeup ports here
170  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
171    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
172    val outerPorts = sche.map(cfg => {
173      // exe units from this scheduler need fastUops from exeunits
174      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
175      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
176      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
177      // exe units from other schedulers need fastUop from outside
178      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
179      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
180      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
181      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
182      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
183    })
184    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
185    outerPorts
186  }
187
188  // allow mdu and fmisc to have 2*numDeq enqueue ports
189  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
190    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
191    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
192    else Seq((0, i))
193  })
194  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
195                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
196                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
197  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
198    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
199    else Seq((0, i))
200  })
201
202  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
203
204  val outIntRfReadPorts = Seq(0, 0)
205  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
206  val hasIntRf = Seq(true, false)
207  val hasFpRf = Seq(false, true)
208  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
209    case (((sche, disp), other), i) =>
210      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
211  }
212
213  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
214    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
215      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
216    )
217  })))
218
219  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
220  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
221  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
222  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
223  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
224  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
225}
226
227class XSCore()(implicit p: config.Parameters) extends XSCoreBase
228  with HasXSDts
229{
230  lazy val module = new XSCoreImp(this)
231}
232
233class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
234  with HasXSParameter
235  with HasSoCParameter {
236  val io = IO(new Bundle {
237    val hartId = Input(UInt(64.W))
238    val reset_vector = Input(UInt(PAddrBits.W))
239    val cpu_halt = Output(Bool())
240    val l2_pf_enable = Output(Bool())
241    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
242    val beu_errors = Output(new XSL1BusErrors())
243    val l2_hint = Input(Valid(new L2ToL1Hint()))
244  })
245
246  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
247
248  val frontend = outer.frontend.module
249  val ctrlBlock = outer.ctrlBlock.module
250  val wb2Ctrl = outer.wb2Ctrl.module
251  val memBlock = outer.memBlock.module
252  val exuBlocks = outer.exuBlocks.map(_.module)
253
254  frontend.io.hartId  := io.hartId
255  ctrlBlock.io.hartId := io.hartId
256  exuBlocks.foreach(_.io.hartId := io.hartId)
257  memBlock.io.hartId := io.hartId
258  outer.wbArbiter.module.io.hartId := io.hartId
259  frontend.io.reset_vector := io.reset_vector
260
261  io.cpu_halt := ctrlBlock.io.cpu_halt
262
263  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
264  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.mem_to_ooo.writeback
265  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
266  outer.wbArbiter.module.io.in <> allWriteback
267  val rfWriteback = outer.wbArbiter.module.io.out
268
269  // memblock error exception writeback, 1 cycle after normal writeback
270  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
271
272  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
273  outer.wb2Ctrl.generateWritebackIO()
274
275  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
276  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
277
278  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
279  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
280  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
281  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
282
283  frontend.io.backend <> ctrlBlock.io.frontend
284  frontend.io.sfence <> fenceio.sfence
285  frontend.io.tlbCsr <> csrioIn.tlb
286  frontend.io.csrCtrl <> csrioIn.customCtrl
287  frontend.io.fencei := fenceio.fencei
288
289  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
290  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
291  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
292  ctrlBlock.io.stIn <> memBlock.io.mem_to_ooo.stIn
293  ctrlBlock.io.memoryViolation <> memBlock.io.mem_to_ooo.memoryViolation
294  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.ooo_to_mem.enqLsq
295  exuBlocks.foreach(b => {
296    b.io.scheExtra.lcommit := memBlock.io.mem_to_ooo.lqDeq
297    b.io.scheExtra.scommit := memBlock.io.mem_to_ooo.sqDeq
298    b.io.scheExtra.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
299    b.io.scheExtra.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
300  })
301  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
302  outer.ctrlBlock.generateWritebackIO()
303
304  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.mem_to_ooo.otherFastWakeup
305  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
306  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
307  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
308  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
309  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
310  val allFastUop1 = intFastUop1 ++ fpFastUop1
311
312  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
313  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
314  ctrlBlock.io.enqLsq <> memBlock.io.ooo_to_mem.enqLsq
315  ctrlBlock.io.lqDeq := memBlock.io.mem_to_ooo.lqDeq
316  ctrlBlock.io.sqDeq := memBlock.io.mem_to_ooo.sqDeq
317  ctrlBlock.io.lqCanAccept := memBlock.io.mem_to_ooo.lsqio.lqCanAccept
318  ctrlBlock.io.sqCanAccept := memBlock.io.mem_to_ooo.lsqio.sqCanAccept
319  ctrlBlock.io.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
320  ctrlBlock.io.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
321  ctrlBlock.io.robHeadLsIssue := exuBlocks.map(_.io.scheExtra.robHeadLsIssue).reduce(_ || _)
322
323  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
324  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
325
326  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){
327    // read load pc at load s0
328    c.ptr := e.bits.uop.cf.ftqPtr
329    c.offset := e.bits.uop.cf.ftqOffset
330  }
331  // return load pc at load s2
332  memBlock.io.ooo_to_mem.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
333  memBlock.io.ooo_to_mem.issue <> exuBlocks(0).io.issue.get
334  // By default, instructions do not have exceptions when they enter the function units.
335  memBlock.io.ooo_to_mem.issue.map(_.bits.uop.clearExceptions())
336  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.ooo_to_mem.loadFastMatch
337  exuBlocks(0).io.scheExtra.loadFastFuOpType.get <> memBlock.io.ooo_to_mem.loadFastFuOpType
338  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.ooo_to_mem.loadFastImm
339
340  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
341  exuBlocks.map(_.io).foreach { exu =>
342    exu.redirect <> ctrlBlock.io.redirect
343    exu.allocPregs <> ctrlBlock.io.allocPregs
344    exu.rfWriteback <> rfWriteback
345    exu.fastUopIn <> allFastUop1
346    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
347    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
348    exu.scheExtra.stIssuePtr <> memBlock.io.mem_to_ooo.stIssuePtr
349    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
350    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
351    exu.scheExtra.robDeqPtr := ctrlBlock.io.robDeqPtr
352    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.mem_to_ooo.stIn).foreach{case (sink, src) => {
353      sink.bits := src.bits
354      sink.valid := src.valid
355    }}
356    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
357      sink.valid := src.valid
358      sink.bits := src.bits
359    }}
360  }
361  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
362  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
363
364  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
365  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
366  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
367
368  csrioIn.hartId <> io.hartId
369  csrioIn.perf <> DontCare
370  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
371  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
372  csrioIn.perf.memInfo <> memBlock.io.memInfo
373  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
374
375  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
376  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
377  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
378  csrioIn.perf.perfEventsHc       <> io.perfEvents
379
380  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
381  csrioIn.fpu.isIllegal := false.B
382  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
383  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
384  csrioIn.exception <> ctrlBlock.io.robio.exception
385  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
386  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
387  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
388  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
389  csrioIn.memExceptionVAddr <> memBlock.io.mem_to_ooo.lsqio.vaddr
390
391  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
392  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
393  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
394  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
395  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
396
397  csrioIn.distributedUpdate(0).w.valid := memBlock.io.mem_to_ooo.csrUpdate.w.valid
398  csrioIn.distributedUpdate(0).w.bits := memBlock.io.mem_to_ooo.csrUpdate.w.bits
399  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
400  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
401
402  fenceio.sfence <> memBlock.io.ooo_to_mem.sfence
403  memBlock.io.fetch_to_mem.itlb <> frontend.io.ptw
404  memBlock.io.ooo_to_mem.flushSb := fenceio.sbuffer.flushSb
405  fenceio.sbuffer.sbIsEmpty := memBlock.io.mem_to_ooo.sbIsEmpty
406
407
408  memBlock.io.redirect <> ctrlBlock.io.redirect
409  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
410
411  memBlock.io.ooo_to_mem.csrCtrl <> csrioIn.customCtrl
412  memBlock.io.ooo_to_mem.tlbCsr <> csrioIn.tlb
413
414  memBlock.io.ooo_to_mem.lsqio.lcommit   := ctrlBlock.io.robio.lsq.lcommit
415  memBlock.io.ooo_to_mem.lsqio.scommit   := ctrlBlock.io.robio.lsq.scommit
416  memBlock.io.ooo_to_mem.lsqio.pendingld := ctrlBlock.io.robio.lsq.pendingld
417  memBlock.io.ooo_to_mem.lsqio.pendingst := ctrlBlock.io.robio.lsq.pendingst
418  memBlock.io.ooo_to_mem.lsqio.commit    := ctrlBlock.io.robio.lsq.commit
419  memBlock.io.ooo_to_mem.lsqio.pendingPtr:= ctrlBlock.io.robio.lsq.pendingPtr
420  ctrlBlock.io.robio.lsq.mmio            := memBlock.io.mem_to_ooo.lsqio.mmio
421  ctrlBlock.io.robio.lsq.uop             := memBlock.io.mem_to_ooo.lsqio.uop
422//  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
423  memBlock.io.ooo_to_mem.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
424  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
425  memBlock.io.mem_to_ooo.lsTopdownInfo <> ctrlBlock.io.robio.lsTopdownInfo
426  memBlock.io.l2_hint.valid := io.l2_hint.valid
427  memBlock.io.l2_hint.bits.sourceId := io.l2_hint.bits.sourceId
428
429  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
430  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
431
432  // Modules are reset one by one
433  val resetTree = ResetGenNode(
434    Seq(
435      ModuleNode(memBlock),
436      ResetGenNode(Seq(
437        ModuleNode(exuBlocks.head),
438        ResetGenNode(
439          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
440        ),
441        ResetGenNode(Seq(
442          ModuleNode(ctrlBlock),
443          ResetGenNode(Seq(
444            ModuleNode(frontend)
445          ))
446        ))
447      ))
448    )
449  )
450
451  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
452
453}
454