xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision a0c65233389cccd2fdffe58236fb0a7dedf6d54f)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import system.HasSoCParameter
27import utils._
28import utility._
29import xiangshan.backend._
30import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
31import xiangshan.frontend._
32import xiangshan.mem.L1PrefetchFuzzer
33
34import scala.collection.mutable.ListBuffer
35
36abstract class XSModule(implicit val p: Parameters) extends Module
37  with HasXSParameter
38  with HasFPUParameters
39
40//remove this trait after impl module logic
41trait NeedImpl {
42  this: RawModule =>
43  override protected def IO[T <: Data](iodef: T): T = {
44    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
45    val io = chisel3.experimental.IO(iodef)
46    io <> DontCare
47    io
48  }
49}
50
51class WritebackSourceParams(
52  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
53 ) {
54  def length: Int = exuConfigs.length
55  def ++(that: WritebackSourceParams): WritebackSourceParams = {
56    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
57  }
58}
59
60trait HasWritebackSource {
61  val writebackSourceParams: Seq[WritebackSourceParams]
62  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
63    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
64    val source = sourceMod.writebackSource.get
65    require(source.length == writebackSourceParams.length, "length mismatch between sources")
66    for ((s, p) <- source.zip(writebackSourceParams)) {
67      require(s.length == p.length, "params do not match with the exuOutput")
68    }
69    source
70  }
71  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
72    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
73    val source = sourceMod.writebackSource1.get
74    require(source.length == writebackSourceParams.length, "length mismatch between sources")
75    for ((s, p) <- source.zip(writebackSourceParams)) {
76      require(s.length == p.length, "params do not match with the exuOutput")
77    }
78    source
79  }
80  val writebackSourceImp: HasWritebackSourceImp
81}
82
83trait HasWritebackSourceImp {
84  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
85  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
86}
87
88trait HasWritebackSink {
89  // Caches all sources. The selected source will be the one with smallest length.
90  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
91  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
92    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
93    writebackSinks += ((source, realIndex))
94    this
95  }
96
97  def writebackSinksParams: Seq[WritebackSourceParams] = {
98    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
99  }
100  final def writebackSinksMod(
101     thisMod: Option[HasWritebackSource] = None,
102     thisModImp: Option[HasWritebackSourceImp] = None
103   ): Seq[Seq[HasWritebackSourceImp]] = {
104    require(thisMod.isDefined == thisModImp.isDefined)
105    writebackSinks.map(_._1.map(source =>
106      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
107    )
108  }
109  final def writebackSinksImp(
110    thisMod: Option[HasWritebackSource] = None,
111    thisModImp: Option[HasWritebackSourceImp] = None
112  ): Seq[Seq[ValidIO[ExuOutput]]] = {
113    val sourceMod = writebackSinksMod(thisMod, thisModImp)
114    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
115      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
116    }
117  }
118  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
119    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
120  }
121  def generateWritebackIO(
122    thisMod: Option[HasWritebackSource] = None,
123    thisModImp: Option[HasWritebackSourceImp] = None
124   ): Unit
125}
126
127abstract class XSBundle(implicit val p: Parameters) extends Bundle
128  with HasXSParameter
129
130abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
131  with HasXSParameter with HasExuWbHelper
132{
133  override def shouldBeInlined: Boolean = false
134  // interrupt sinks
135  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
136  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
137  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
138  // outer facing nodes
139  val frontend = LazyModule(new Frontend())
140  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
141
142  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
143  val intWbPorts = wbArbiter.intWbPorts
144  val fpWbPorts = wbArbiter.fpWbPorts
145
146  // TODO: better RS organization
147  // generate rs according to number of function units
148  require(exuParameters.JmpCnt == 1)
149  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
150  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
151  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
152
153  // one RS every 2 MDUs
154  val schedulePorts = Seq(
155    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
156    Seq(
157      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
158      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
159      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
160      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
161      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
162      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
163    ),
164    Seq(
165      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
166      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
167    )
168  )
169
170  // should do outer fast wakeup ports here
171  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
172    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
173    val outerPorts = sche.map(cfg => {
174      // exe units from this scheduler need fastUops from exeunits
175      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
176      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
177      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
178      // exe units from other schedulers need fastUop from outside
179      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
180      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
181      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
182      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
183      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
184    })
185    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
186    outerPorts
187  }
188
189  // allow mdu and fmisc to have 2*numDeq enqueue ports
190  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
191    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
192    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
193    else Seq((0, i))
194  })
195  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
196                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
197                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
198  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
199    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
200    else Seq((0, i))
201  })
202
203  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
204
205  val outIntRfReadPorts = Seq(0, 0)
206  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
207  val hasIntRf = Seq(true, false)
208  val hasFpRf = Seq(false, true)
209  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
210    case (((sche, disp), other), i) =>
211      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
212  }
213
214  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
215    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
216      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
217    )
218  })))
219
220  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
221  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
222  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
223  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
224  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
225  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
226}
227
228class XSCore()(implicit p: config.Parameters) extends XSCoreBase
229  with HasXSDts
230{
231  lazy val module = new XSCoreImp(this)
232}
233
234class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
235  with HasXSParameter
236  with HasSoCParameter {
237  val io = IO(new Bundle {
238    val hartId = Input(UInt(64.W))
239    val reset_vector = Input(UInt(PAddrBits.W))
240    val cpu_halt = Output(Bool())
241    val l2_pf_enable = Output(Bool())
242    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
243    val beu_errors = Output(new XSL1BusErrors())
244    val l2_hint = Input(Valid(new L2ToL1Hint()))
245    val l2PfqBusy = Input(Bool())
246    val debugTopDown = new Bundle {
247      val robHeadPaddr = Valid(UInt(PAddrBits.W))
248      val l2MissMatch = Input(Bool())
249      val l3MissMatch = Input(Bool())
250    }
251  })
252
253  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
254
255  val frontend = outer.frontend.module
256  val ctrlBlock = outer.ctrlBlock.module
257  val wb2Ctrl = outer.wb2Ctrl.module
258  val memBlock = outer.memBlock.module
259  val exuBlocks = outer.exuBlocks.map(_.module)
260
261  frontend.io.hartId  := io.hartId
262  ctrlBlock.io.hartId := io.hartId
263  exuBlocks.foreach(_.io.hartId := io.hartId)
264  memBlock.io.hartId := io.hartId
265  outer.wbArbiter.module.io.hartId := io.hartId
266  frontend.io.reset_vector := io.reset_vector
267
268  io.cpu_halt := ctrlBlock.io.cpu_halt
269
270  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
271  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.mem_to_ooo.writeback
272  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
273  outer.wbArbiter.module.io.in <> allWriteback
274  val rfWriteback = outer.wbArbiter.module.io.out
275
276  // memblock error exception writeback, 1 cycle after normal writeback
277  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
278
279  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
280  outer.wb2Ctrl.generateWritebackIO()
281
282  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
283  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
284
285  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
286  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
287  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
288  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
289
290  frontend.io.backend <> ctrlBlock.io.frontend
291  frontend.io.sfence <> fenceio.sfence
292  frontend.io.tlbCsr <> csrioIn.tlb
293  frontend.io.csrCtrl <> csrioIn.customCtrl
294  frontend.io.fencei := fenceio.fencei
295
296  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
297  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
298  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
299  ctrlBlock.io.stIn <> memBlock.io.mem_to_ooo.stIn
300  ctrlBlock.io.memoryViolation <> memBlock.io.mem_to_ooo.memoryViolation
301  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.ooo_to_mem.enqLsq
302  exuBlocks.foreach(b => {
303    b.io.scheExtra.lcommit := memBlock.io.mem_to_ooo.lqDeq
304    b.io.scheExtra.scommit := memBlock.io.mem_to_ooo.sqDeq
305    b.io.scheExtra.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
306    b.io.scheExtra.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
307  })
308  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
309  outer.ctrlBlock.generateWritebackIO()
310
311  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.mem_to_ooo.otherFastWakeup
312  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
313  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
314  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
315  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
316  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
317  val allFastUop1 = intFastUop1 ++ fpFastUop1
318
319  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
320  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
321  ctrlBlock.io.enqLsq <> memBlock.io.ooo_to_mem.enqLsq
322  ctrlBlock.io.lqDeq := memBlock.io.mem_to_ooo.lqDeq
323  ctrlBlock.io.sqDeq := memBlock.io.mem_to_ooo.sqDeq
324  ctrlBlock.io.lqCanAccept := memBlock.io.mem_to_ooo.lsqio.lqCanAccept
325  ctrlBlock.io.sqCanAccept := memBlock.io.mem_to_ooo.lsqio.sqCanAccept
326  ctrlBlock.io.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
327  ctrlBlock.io.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
328  ctrlBlock.io.robHeadLsIssue := exuBlocks.map(_.io.scheExtra.robHeadLsIssue).reduce(_ || _)
329
330  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
331  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
332
333  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){
334    // read load pc at load s0
335    c.ptr := e.bits.uop.cf.ftqPtr
336    c.offset := e.bits.uop.cf.ftqOffset
337  }
338  // return load pc at load s2
339  memBlock.io.ooo_to_mem.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
340
341  for((c, e) <- ctrlBlock.io.st_pc_read.zip(exuBlocks(0).io.issue.get.drop(exuParameters.LduCnt))){
342    // read store pc at store s0
343    c.ptr := e.bits.uop.cf.ftqPtr
344    c.offset := e.bits.uop.cf.ftqOffset
345  }
346  // return store pc at store s2
347  memBlock.io.ooo_to_mem.storePc <> VecInit(ctrlBlock.io.st_pc_read.map(_.data))
348
349  memBlock.io.ooo_to_mem.issue <> exuBlocks(0).io.issue.get
350  // By default, instructions do not have exceptions when they enter the function units.
351  memBlock.io.ooo_to_mem.issue.map(_.bits.uop.clearExceptions())
352  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.ooo_to_mem.loadFastMatch
353  exuBlocks(0).io.scheExtra.loadFastFuOpType.get <> memBlock.io.ooo_to_mem.loadFastFuOpType
354  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.ooo_to_mem.loadFastImm
355
356  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
357  exuBlocks.map(_.io).foreach { exu =>
358    exu.redirect <> ctrlBlock.io.redirect
359    exu.allocPregs <> ctrlBlock.io.allocPregs
360    exu.rfWriteback <> rfWriteback
361    exu.fastUopIn <> allFastUop1
362    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
363    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
364    exu.scheExtra.stIssuePtr <> memBlock.io.mem_to_ooo.stIssuePtr
365    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
366    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
367    exu.scheExtra.robDeqPtr := ctrlBlock.io.robDeqPtr
368    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.mem_to_ooo.stIn).foreach{case (sink, src) => {
369      sink.bits := src.bits
370      sink.valid := src.valid
371    }}
372    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
373      sink.valid := src.valid
374      sink.bits := src.bits
375    }}
376  }
377  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
378  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
379
380  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
381  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
382  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
383
384  csrioIn.hartId <> io.hartId
385  csrioIn.perf <> DontCare
386  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
387  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
388  csrioIn.perf.memInfo <> memBlock.io.memInfo
389  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
390
391  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
392  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
393  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
394  csrioIn.perf.perfEventsHc       <> io.perfEvents
395
396  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
397  csrioIn.fpu.isIllegal := false.B
398  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
399  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
400  csrioIn.exception <> ctrlBlock.io.robio.exception
401  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
402  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
403  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
404  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
405  csrioIn.memExceptionVAddr <> memBlock.io.mem_to_ooo.lsqio.vaddr
406
407  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
408  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
409  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
410  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
411  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
412
413  csrioIn.distributedUpdate(0).w.valid := memBlock.io.mem_to_ooo.csrUpdate.w.valid
414  csrioIn.distributedUpdate(0).w.bits := memBlock.io.mem_to_ooo.csrUpdate.w.bits
415  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
416  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
417
418  fenceio.sfence <> memBlock.io.ooo_to_mem.sfence
419  memBlock.io.fetch_to_mem.itlb <> frontend.io.ptw
420  memBlock.io.ooo_to_mem.flushSb := fenceio.sbuffer.flushSb
421  fenceio.sbuffer.sbIsEmpty := memBlock.io.mem_to_ooo.sbIsEmpty
422
423
424  memBlock.io.redirect <> ctrlBlock.io.redirect
425  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
426
427  memBlock.io.ooo_to_mem.csrCtrl <> csrioIn.customCtrl
428  memBlock.io.ooo_to_mem.tlbCsr <> csrioIn.tlb
429
430  memBlock.io.ooo_to_mem.lsqio.lcommit   := ctrlBlock.io.robio.lsq.lcommit
431  memBlock.io.ooo_to_mem.lsqio.scommit   := ctrlBlock.io.robio.lsq.scommit
432  memBlock.io.ooo_to_mem.lsqio.pendingld := ctrlBlock.io.robio.lsq.pendingld
433  memBlock.io.ooo_to_mem.lsqio.pendingst := ctrlBlock.io.robio.lsq.pendingst
434  memBlock.io.ooo_to_mem.lsqio.commit    := ctrlBlock.io.robio.lsq.commit
435  memBlock.io.ooo_to_mem.lsqio.pendingPtr:= ctrlBlock.io.robio.lsq.pendingPtr
436  ctrlBlock.io.robio.lsq.mmio            := memBlock.io.mem_to_ooo.lsqio.mmio
437  ctrlBlock.io.robio.lsq.uop             := memBlock.io.mem_to_ooo.lsqio.uop
438//  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
439  memBlock.io.ooo_to_mem.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
440  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
441  memBlock.io.mem_to_ooo.lsTopdownInfo <> ctrlBlock.io.robio.lsTopdownInfo
442  memBlock.io.l2_hint.valid := io.l2_hint.valid
443  memBlock.io.l2_hint.bits.sourceId := io.l2_hint.bits.sourceId
444  memBlock.io.l2PfqBusy := io.l2PfqBusy
445
446  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
447  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
448
449  // top-down info
450  memBlock.io.debugTopDown.robHeadVaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadVaddr
451  frontend.io.debugTopDown.robHeadVaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadVaddr
452  io.debugTopDown.robHeadPaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadPaddr
453  ctrlBlock.io.debugTopDown.fromCore.l2MissMatch := io.debugTopDown.l2MissMatch
454  ctrlBlock.io.debugTopDown.fromCore.l3MissMatch := io.debugTopDown.l3MissMatch
455  ctrlBlock.io.debugTopDown.fromCore.fromMem := memBlock.io.debugTopDown.toCore
456
457  // Modules are reset one by one
458  val resetTree = ResetGenNode(
459    Seq(
460      ModuleNode(memBlock),
461      ResetGenNode(Seq(
462        ModuleNode(exuBlocks.head),
463        ResetGenNode(
464          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
465        ),
466        ResetGenNode(Seq(
467          ModuleNode(ctrlBlock),
468          ResetGenNode(Seq(
469            ModuleNode(frontend)
470          ))
471        ))
472      ))
473    )
474  )
475
476  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
477
478}
479