xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision 51e45dbbf87325e45ff2af6ca86ed6c7eed04464)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import org.chipsalliance.cde.config
20import org.chipsalliance.cde.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import system.HasSoCParameter
27import utils._
28import utility._
29import xiangshan.backend._
30import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
31import xiangshan.frontend._
32import xiangshan.mem.L1PrefetchFuzzer
33
34import scala.collection.mutable.ListBuffer
35
36abstract class XSModule(implicit val p: Parameters) extends Module
37  with HasXSParameter
38  with HasFPUParameters
39
40//remove this trait after impl module logic
41trait NeedImpl {
42  this: RawModule =>
43  protected def IO[T <: Data](iodef: T): T = {
44    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
45    val io = chisel3.IO(iodef)
46    io <> DontCare
47    io
48  }
49}
50
51class WritebackSourceParams(
52  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
53 ) {
54  def length: Int = exuConfigs.length
55  def ++(that: WritebackSourceParams): WritebackSourceParams = {
56    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
57  }
58}
59
60trait HasWritebackSource {
61  val writebackSourceParams: Seq[WritebackSourceParams]
62  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
63    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
64    val source = sourceMod.writebackSource.get
65    require(source.length == writebackSourceParams.length, "length mismatch between sources")
66    for ((s, p) <- source.zip(writebackSourceParams)) {
67      require(s.length == p.length, "params do not match with the exuOutput")
68    }
69    source
70  }
71  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
72    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
73    val source = sourceMod.writebackSource1.get
74    require(source.length == writebackSourceParams.length, "length mismatch between sources")
75    for ((s, p) <- source.zip(writebackSourceParams)) {
76      require(s.length == p.length, "params do not match with the exuOutput")
77    }
78    source
79  }
80  val writebackSourceImp: HasWritebackSourceImp
81}
82
83trait HasWritebackSourceImp {
84  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
85  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
86}
87
88trait HasWritebackSink {
89  // Caches all sources. The selected source will be the one with smallest length.
90  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
91  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
92    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
93    writebackSinks += ((source, realIndex))
94    this
95  }
96
97  def writebackSinksParams: Seq[WritebackSourceParams] = {
98    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }.toSeq
99  }
100  final def writebackSinksMod(
101     thisMod: Option[HasWritebackSource] = None,
102     thisModImp: Option[HasWritebackSourceImp] = None
103   ): Seq[Seq[HasWritebackSourceImp]] = {
104    require(thisMod.isDefined == thisModImp.isDefined)
105    writebackSinks.map(_._1.map(source =>
106      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
107    ).toSeq
108  }
109  final def writebackSinksImp(
110    thisMod: Option[HasWritebackSource] = None,
111    thisModImp: Option[HasWritebackSourceImp] = None
112  ): Seq[Seq[ValidIO[ExuOutput]]] = {
113    val sourceMod = writebackSinksMod(thisMod, thisModImp)
114    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
115      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
116    }.toSeq
117  }
118  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
119    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
120  }
121  def generateWritebackIO(
122    thisMod: Option[HasWritebackSource] = None,
123    thisModImp: Option[HasWritebackSourceImp] = None
124   ): Unit
125}
126
127abstract class XSBundle(implicit val p: Parameters) extends Bundle
128  with HasXSParameter
129
130abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
131  with HasXSParameter with HasExuWbHelper
132{
133  override def shouldBeInlined: Boolean = false
134  // interrupt sinks
135  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
136  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
137  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
138  // outer facing nodes
139  val frontend = LazyModule(new Frontend())
140  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
141
142  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
143  val intWbPorts = wbArbiter.intWbPorts
144  val fpWbPorts = wbArbiter.fpWbPorts
145
146  // TODO: better RS organization
147  // generate rs according to number of function units
148  require(exuParameters.JmpCnt == 1)
149  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
150  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
151  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
152
153  // one RS every 2 MDUs
154  val schedulePorts = Seq(
155    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
156    Seq(
157      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
158      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
159      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
160      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
161      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
162      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
163    ),
164    Seq(
165      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
166      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
167    )
168  )
169
170  // should do outer fast wakeup ports here
171  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
172    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
173    val outerPorts = sche.map(cfg => {
174      // exe units from this scheduler need fastUops from exeunits
175      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
176      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
177      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
178      // exe units from other schedulers need fastUop from outside
179      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
180      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
181      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
182      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
183      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
184    })
185    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
186    outerPorts
187  }
188
189  // allow mdu and fmisc to have 2*numDeq enqueue ports
190  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
191    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
192    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
193    else Seq((0, i))
194  })
195  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
196                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
197                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
198  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
199    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
200    else Seq((0, i))
201  })
202
203  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
204
205  val outIntRfReadPorts = Seq(0, 0)
206  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
207  val hasIntRf = Seq(true, false)
208  val hasFpRf = Seq(false, true)
209  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
210    case (((sche, disp), other), i) =>
211      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
212  }
213
214  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
215    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
216      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
217    )
218  })))
219
220  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
221  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
222  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
223  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
224  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
225  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
226}
227
228class XSCore()(implicit p: config.Parameters) extends XSCoreBase
229  with HasXSDts
230{
231  lazy val module = new XSCoreImp(this)
232}
233
234class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
235  with HasXSParameter
236  with HasSoCParameter {
237  val io = IO(new Bundle {
238    val hartId = Input(UInt(64.W))
239    val reset_vector = Input(UInt(PAddrBits.W))
240    val cpu_halt = Output(Bool())
241    val l2_pf_enable = Output(Bool())
242    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
243    val beu_errors = Output(new XSL1BusErrors())
244    val l2_hint = Input(Valid(new L2ToL1Hint()))
245    val l2PfqBusy = Input(Bool())
246    val debugTopDown = new Bundle {
247      val robHeadPaddr = Valid(UInt(PAddrBits.W))
248      val l2MissMatch = Input(Bool())
249      val l3MissMatch = Input(Bool())
250    }
251  })
252
253  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
254
255  private val frontend = outer.frontend.module
256  private val ctrlBlock = outer.ctrlBlock.module
257  private val wb2Ctrl = outer.wb2Ctrl.module
258  private val memBlock = outer.memBlock.module
259  private val exuBlocks = outer.exuBlocks.map(_.module)
260
261  frontend.io.hartId  := io.hartId
262  ctrlBlock.io.hartId := io.hartId
263  exuBlocks.foreach(_.io.hartId := io.hartId)
264  memBlock.io.hartId := io.hartId
265  outer.wbArbiter.module.io.hartId := io.hartId
266  frontend.io.reset_vector := io.reset_vector
267
268  io.cpu_halt := ctrlBlock.io.cpu_halt
269
270  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
271  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.mem_to_ooo.writeback
272  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
273  outer.wbArbiter.module.io.in <> allWriteback
274  val rfWriteback = outer.wbArbiter.module.io.out
275
276  // memblock error exception writeback, 1 cycle after normal writeback
277  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
278
279  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
280  outer.wb2Ctrl.generateWritebackIO()
281
282  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
283  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
284  io.beu_errors.l2 <> DontCare
285
286  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
287  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
288  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
289  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
290
291  frontend.io.backend <> ctrlBlock.io.frontend
292  frontend.io.sfence <> fenceio.sfence
293  frontend.io.tlbCsr <> csrioIn.tlb
294  frontend.io.csrCtrl <> csrioIn.customCtrl
295  frontend.io.fencei := fenceio.fencei
296
297  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
298  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
299  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
300  ctrlBlock.io.stIn <> memBlock.io.mem_to_ooo.stIn
301  ctrlBlock.io.memoryViolation <> memBlock.io.mem_to_ooo.memoryViolation
302  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.ooo_to_mem.enqLsq
303  exuBlocks.foreach(b => {
304    b.io.scheExtra.lcommit := memBlock.io.mem_to_ooo.lqDeq
305    b.io.scheExtra.scommit := memBlock.io.mem_to_ooo.sqDeq
306    b.io.scheExtra.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
307    b.io.scheExtra.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
308  })
309  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
310  outer.ctrlBlock.generateWritebackIO()
311
312  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.mem_to_ooo.otherFastWakeup
313  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
314  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
315  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
316  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
317  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
318  val allFastUop1 = intFastUop1 ++ fpFastUop1
319
320  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
321  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
322  ctrlBlock.io.enqLsq <> memBlock.io.ooo_to_mem.enqLsq
323  ctrlBlock.io.lqDeq := memBlock.io.mem_to_ooo.lqDeq
324  ctrlBlock.io.sqDeq := memBlock.io.mem_to_ooo.sqDeq
325  ctrlBlock.io.lqCanAccept := memBlock.io.mem_to_ooo.lsqio.lqCanAccept
326  ctrlBlock.io.sqCanAccept := memBlock.io.mem_to_ooo.lsqio.sqCanAccept
327  ctrlBlock.io.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
328  ctrlBlock.io.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
329  ctrlBlock.io.robHeadLsIssue := exuBlocks.map(_.io.scheExtra.robHeadLsIssue).reduce(_ || _)
330
331  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
332  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
333
334  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){
335    // read load pc at load s0
336    c.ptr := e.bits.uop.cf.ftqPtr
337    c.offset := e.bits.uop.cf.ftqOffset
338  }
339  // return load pc at load s2
340  memBlock.io.ooo_to_mem.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
341
342  for((c, e) <- ctrlBlock.io.st_pc_read.zip(exuBlocks(0).io.issue.get.drop(exuParameters.LduCnt))){
343    // read store pc at store s0
344    c.ptr := e.bits.uop.cf.ftqPtr
345    c.offset := e.bits.uop.cf.ftqOffset
346  }
347  // return store pc at store s2
348  memBlock.io.ooo_to_mem.storePc <> VecInit(ctrlBlock.io.st_pc_read.map(_.data))
349
350  memBlock.io.ooo_to_mem.issue <> exuBlocks(0).io.issue.get
351  // By default, instructions do not have exceptions when they enter the function units.
352  memBlock.io.ooo_to_mem.issue.map(_.bits.uop.clearExceptions())
353  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.ooo_to_mem.loadFastMatch
354  exuBlocks(0).io.scheExtra.loadFastFuOpType.get <> memBlock.io.ooo_to_mem.loadFastFuOpType
355  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.ooo_to_mem.loadFastImm
356
357  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
358  exuBlocks.map(_.io).foreach { exu =>
359    exu.redirect <> ctrlBlock.io.redirect
360    exu.allocPregs <> ctrlBlock.io.allocPregs
361    exu.rfWriteback <> rfWriteback
362    exu.fastUopIn <> allFastUop1
363    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
364    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
365    exu.scheExtra.stIssuePtr <> memBlock.io.mem_to_ooo.stIssuePtr
366    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
367    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
368    exu.scheExtra.robDeqPtr := ctrlBlock.io.robDeqPtr
369    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.mem_to_ooo.stIn).foreach{case (sink, src) => {
370      sink.bits := src.bits
371      sink.valid := src.valid
372    }}
373    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
374      sink.valid := src.valid
375      sink.bits := src.bits
376    }}
377  }
378  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
379  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
380
381  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
382  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
383  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
384
385  csrioIn.hartId <> io.hartId
386  csrioIn.perf <> DontCare
387  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
388  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
389  csrioIn.perf.memInfo <> memBlock.io.memInfo
390  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
391
392  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
393  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
394  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
395  csrioIn.perf.perfEventsHc       <> io.perfEvents
396
397  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
398  csrioIn.fpu.isIllegal := false.B
399  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
400  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
401  csrioIn.exception <> ctrlBlock.io.robio.exception
402  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
403  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
404  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
405  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
406  csrioIn.memExceptionVAddr <> memBlock.io.mem_to_ooo.lsqio.vaddr
407
408  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
409  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
410  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
411  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
412  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
413
414  csrioIn.distributedUpdate(0).w.valid := memBlock.io.mem_to_ooo.csrUpdate.w.valid
415  csrioIn.distributedUpdate(0).w.bits := memBlock.io.mem_to_ooo.csrUpdate.w.bits
416  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
417  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
418
419  fenceio.sfence <> memBlock.io.ooo_to_mem.sfence
420  memBlock.io.fetch_to_mem.itlb <> frontend.io.ptw
421  memBlock.io.ooo_to_mem.flushSb := fenceio.sbuffer.flushSb
422  fenceio.sbuffer.sbIsEmpty := memBlock.io.mem_to_ooo.sbIsEmpty
423
424
425  memBlock.io.redirect <> ctrlBlock.io.redirect
426  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
427
428  memBlock.io.ooo_to_mem.csrCtrl <> csrioIn.customCtrl
429  memBlock.io.ooo_to_mem.tlbCsr <> csrioIn.tlb
430
431  memBlock.io.ooo_to_mem.lsqio.lcommit   := ctrlBlock.io.robio.lsq.lcommit
432  memBlock.io.ooo_to_mem.lsqio.scommit   := ctrlBlock.io.robio.lsq.scommit
433  memBlock.io.ooo_to_mem.lsqio.pendingld := ctrlBlock.io.robio.lsq.pendingld
434  memBlock.io.ooo_to_mem.lsqio.pendingst := ctrlBlock.io.robio.lsq.pendingst
435  memBlock.io.ooo_to_mem.lsqio.commit    := ctrlBlock.io.robio.lsq.commit
436  memBlock.io.ooo_to_mem.lsqio.pendingPtr:= ctrlBlock.io.robio.lsq.pendingPtr
437  ctrlBlock.io.robio.lsq.mmio            := memBlock.io.mem_to_ooo.lsqio.mmio
438  ctrlBlock.io.robio.lsq.uop             := memBlock.io.mem_to_ooo.lsqio.uop
439//  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
440  memBlock.io.ooo_to_mem.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
441  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
442  memBlock.io.mem_to_ooo.lsTopdownInfo <> ctrlBlock.io.robio.lsTopdownInfo
443  memBlock.io.l2_hint.valid := io.l2_hint.valid
444  memBlock.io.l2_hint.bits.sourceId := io.l2_hint.bits.sourceId
445  memBlock.io.l2PfqBusy := io.l2PfqBusy
446  memBlock.io.int2vlsu <> DontCare
447  memBlock.io.vec2vlsu <> DontCare
448  memBlock.io.vlsu2vec <> DontCare
449  memBlock.io.vlsu2int <> DontCare
450  memBlock.io.vlsu2ctrl <> DontCare
451
452  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
453  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
454
455  // top-down info
456  memBlock.io.debugTopDown.robHeadVaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadVaddr
457  frontend.io.debugTopDown.robHeadVaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadVaddr
458  io.debugTopDown.robHeadPaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadPaddr
459  ctrlBlock.io.debugTopDown.fromCore.l2MissMatch := io.debugTopDown.l2MissMatch
460  ctrlBlock.io.debugTopDown.fromCore.l3MissMatch := io.debugTopDown.l3MissMatch
461  ctrlBlock.io.debugTopDown.fromCore.fromMem := memBlock.io.debugTopDown.toCore
462  memBlock.io.debugRolling := ctrlBlock.io.debugRolling
463
464  // Modules are reset one by one
465  val resetTree = ResetGenNode(
466    Seq(
467      ModuleNode(memBlock),
468      ResetGenNode(Seq(
469        ModuleNode(exuBlocks.head),
470        ResetGenNode(
471          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
472        ),
473        ResetGenNode(Seq(
474          ModuleNode(ctrlBlock),
475          ResetGenNode(Seq(
476            ModuleNode(frontend)
477          ))
478        ))
479      ))
480    )
481  )
482
483  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
484
485}
486