1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan 18 19import chipsalliance.rocketchip.config 20import chipsalliance.rocketchip.config.Parameters 21import chisel3._ 22import chisel3.util._ 23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp} 24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple} 25import freechips.rocketchip.tile.HasFPUParameters 26import system.HasSoCParameter 27import utils._ 28import utility._ 29import xiangshan.backend._ 30import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper} 31import xiangshan.frontend._ 32import xiangshan.mem.L1PrefetchFuzzer 33 34import scala.collection.mutable.ListBuffer 35 36abstract class XSModule(implicit val p: Parameters) extends Module 37 with HasXSParameter 38 with HasFPUParameters 39 40//remove this trait after impl module logic 41trait NeedImpl { 42 this: RawModule => 43 override protected def IO[T <: Data](iodef: T): T = { 44 println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module") 45 val io = chisel3.experimental.IO(iodef) 46 io <> DontCare 47 io 48 } 49} 50 51class WritebackSourceParams( 52 var exuConfigs: Seq[Seq[ExuConfig]] = Seq() 53 ) { 54 def length: Int = exuConfigs.length 55 def ++(that: WritebackSourceParams): WritebackSourceParams = { 56 new WritebackSourceParams(exuConfigs ++ that.exuConfigs) 57 } 58} 59 60trait HasWritebackSource { 61 val writebackSourceParams: Seq[WritebackSourceParams] 62 final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = { 63 require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]") 64 val source = sourceMod.writebackSource.get 65 require(source.length == writebackSourceParams.length, "length mismatch between sources") 66 for ((s, p) <- source.zip(writebackSourceParams)) { 67 require(s.length == p.length, "params do not match with the exuOutput") 68 } 69 source 70 } 71 final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = { 72 require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]") 73 val source = sourceMod.writebackSource1.get 74 require(source.length == writebackSourceParams.length, "length mismatch between sources") 75 for ((s, p) <- source.zip(writebackSourceParams)) { 76 require(s.length == p.length, "params do not match with the exuOutput") 77 } 78 source 79 } 80 val writebackSourceImp: HasWritebackSourceImp 81} 82 83trait HasWritebackSourceImp { 84 def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None 85 def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None 86} 87 88trait HasWritebackSink { 89 // Caches all sources. The selected source will be the one with smallest length. 90 var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])] 91 def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = { 92 val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0) 93 writebackSinks += ((source, realIndex)) 94 this 95 } 96 97 def writebackSinksParams: Seq[WritebackSourceParams] = { 98 writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) } 99 } 100 final def writebackSinksMod( 101 thisMod: Option[HasWritebackSource] = None, 102 thisModImp: Option[HasWritebackSourceImp] = None 103 ): Seq[Seq[HasWritebackSourceImp]] = { 104 require(thisMod.isDefined == thisModImp.isDefined) 105 writebackSinks.map(_._1.map(source => 106 if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp) 107 ) 108 } 109 final def writebackSinksImp( 110 thisMod: Option[HasWritebackSource] = None, 111 thisModImp: Option[HasWritebackSourceImp] = None 112 ): Seq[Seq[ValidIO[ExuOutput]]] = { 113 val sourceMod = writebackSinksMod(thisMod, thisModImp) 114 writebackSinks.zip(sourceMod).map{ case ((s, i), m) => 115 s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2)) 116 } 117 } 118 def selWritebackSinks(func: WritebackSourceParams => Int): Int = { 119 writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2 120 } 121 def generateWritebackIO( 122 thisMod: Option[HasWritebackSource] = None, 123 thisModImp: Option[HasWritebackSourceImp] = None 124 ): Unit 125} 126 127abstract class XSBundle(implicit val p: Parameters) extends Bundle 128 with HasXSParameter 129 130abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule 131 with HasXSParameter with HasExuWbHelper 132{ 133 // interrupt sinks 134 val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2)) 135 val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1)) 136 val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1)) 137 // outer facing nodes 138 val frontend = LazyModule(new Frontend()) 139 val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO())) 140 141 val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts)) 142 val intWbPorts = wbArbiter.intWbPorts 143 val fpWbPorts = wbArbiter.fpWbPorts 144 145 // TODO: better RS organization 146 // generate rs according to number of function units 147 require(exuParameters.JmpCnt == 1) 148 require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0) 149 require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0) 150 require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation 151 152 // one RS every 2 MDUs 153 val schedulePorts = Seq( 154 // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget 155 Seq( 156 (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()), 157 (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()), 158 (JumpCSRExeUnitCfg, 1, Seq(), Seq()), 159 (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()), 160 (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()), 161 (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()) 162 ), 163 Seq( 164 (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)), 165 (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq()) 166 ) 167 ) 168 169 // should do outer fast wakeup ports here 170 val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) => 171 val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _) 172 val outerPorts = sche.map(cfg => { 173 // exe units from this scheduler need fastUops from exeunits 174 val outerWakeupInSche = sche.filter(_._1.wakeupFromExu) 175 val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1) 176 val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1) 177 // exe units from other schedulers need fastUop from outside 178 val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1) 179 val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1) 180 val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource) 181 val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource) 182 getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted 183 }) 184 println(s"inter-scheduler wakeup sources for $i: $outerPorts") 185 outerPorts 186 } 187 188 // allow mdu and fmisc to have 2*numDeq enqueue ports 189 val intDpPorts = (0 until exuParameters.AluCnt).map(i => { 190 if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i)) 191 else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i)) 192 else Seq((0, i)) 193 }) 194 val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++ 195 (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++ 196 (0 until exuParameters.StuCnt).map(i => Seq((5, i))) 197 val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => { 198 if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i)) 199 else Seq((0, i)) 200 }) 201 202 val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts) 203 204 val outIntRfReadPorts = Seq(0, 0) 205 val outFpRfReadPorts = Seq(0, StorePipelineWidth) 206 val hasIntRf = Seq(true, false) 207 val hasFpRf = Seq(false, true) 208 val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map { 209 case (((sche, disp), other), i) => 210 LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i))) 211 } 212 213 val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => { 214 case XSCoreParamsKey => up(XSCoreParamsKey).copy( 215 IssQueSize = exuBlocks.head.scheduler.getMemRsEntries 216 ) 217 }))) 218 219 val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs)) 220 wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock) 221 val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs)) 222 val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs)) 223 val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter)) 224 writebackSources.foreach(s => ctrlBlock.addWritebackSink(s)) 225} 226 227class XSCore()(implicit p: config.Parameters) extends XSCoreBase 228 with HasXSDts 229{ 230 lazy val module = new XSCoreImp(this) 231} 232 233class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer) 234 with HasXSParameter 235 with HasSoCParameter { 236 val io = IO(new Bundle { 237 val hartId = Input(UInt(64.W)) 238 val reset_vector = Input(UInt(PAddrBits.W)) 239 val cpu_halt = Output(Bool()) 240 val l2_pf_enable = Output(Bool()) 241 val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent)) 242 val beu_errors = Output(new XSL1BusErrors()) 243 val l2_hint = Input(Valid(new L2ToL1Hint())) 244 val l2PfqBusy = Input(Bool()) 245 val debugTopDown = new Bundle { 246 val robHeadPaddr = Valid(UInt(PAddrBits.W)) 247 val l2MissMatch = Input(Bool()) 248 val l3MissMatch = Input(Bool()) 249 } 250 }) 251 252 println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}") 253 254 val frontend = outer.frontend.module 255 val ctrlBlock = outer.ctrlBlock.module 256 val wb2Ctrl = outer.wb2Ctrl.module 257 val memBlock = outer.memBlock.module 258 val exuBlocks = outer.exuBlocks.map(_.module) 259 260 frontend.io.hartId := io.hartId 261 ctrlBlock.io.hartId := io.hartId 262 exuBlocks.foreach(_.io.hartId := io.hartId) 263 memBlock.io.hartId := io.hartId 264 outer.wbArbiter.module.io.hartId := io.hartId 265 frontend.io.reset_vector := io.reset_vector 266 267 io.cpu_halt := ctrlBlock.io.cpu_halt 268 269 outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect 270 val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.mem_to_ooo.writeback 271 require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}") 272 outer.wbArbiter.module.io.in <> allWriteback 273 val rfWriteback = outer.wbArbiter.module.io.out 274 275 // memblock error exception writeback, 1 cycle after normal writeback 276 wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error 277 278 wb2Ctrl.io.redirect <> ctrlBlock.io.redirect 279 outer.wb2Ctrl.generateWritebackIO() 280 281 io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo() 282 io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo() 283 284 require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1) 285 val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head 286 val csrioIn = csrFenceMod.io.fuExtra.csrio.get 287 val fenceio = csrFenceMod.io.fuExtra.fenceio.get 288 289 frontend.io.backend <> ctrlBlock.io.frontend 290 frontend.io.sfence <> fenceio.sfence 291 frontend.io.tlbCsr <> csrioIn.tlb 292 frontend.io.csrCtrl <> csrioIn.customCtrl 293 frontend.io.fencei := fenceio.fencei 294 295 ctrlBlock.io.csrCtrl <> csrioIn.customCtrl 296 val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _)) 297 ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect) 298 ctrlBlock.io.stIn <> memBlock.io.mem_to_ooo.stIn 299 ctrlBlock.io.memoryViolation <> memBlock.io.mem_to_ooo.memoryViolation 300 exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.ooo_to_mem.enqLsq 301 exuBlocks.foreach(b => { 302 b.io.scheExtra.lcommit := memBlock.io.mem_to_ooo.lqDeq 303 b.io.scheExtra.scommit := memBlock.io.mem_to_ooo.sqDeq 304 b.io.scheExtra.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt 305 b.io.scheExtra.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt 306 }) 307 val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp])) 308 outer.ctrlBlock.generateWritebackIO() 309 310 val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.mem_to_ooo.otherFastWakeup 311 require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}") 312 val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1) 313 val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1) 314 val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head)) 315 val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head)) 316 val allFastUop1 = intFastUop1 ++ fpFastUop1 317 318 ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in) 319 ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady) 320 ctrlBlock.io.enqLsq <> memBlock.io.ooo_to_mem.enqLsq 321 ctrlBlock.io.lqDeq := memBlock.io.mem_to_ooo.lqDeq 322 ctrlBlock.io.sqDeq := memBlock.io.mem_to_ooo.sqDeq 323 ctrlBlock.io.lqCanAccept := memBlock.io.mem_to_ooo.lsqio.lqCanAccept 324 ctrlBlock.io.sqCanAccept := memBlock.io.mem_to_ooo.lsqio.sqCanAccept 325 ctrlBlock.io.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt 326 ctrlBlock.io.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt 327 ctrlBlock.io.robHeadLsIssue := exuBlocks.map(_.io.scheExtra.robHeadLsIssue).reduce(_ || _) 328 329 exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get 330 exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get 331 332 for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){ 333 // read load pc at load s0 334 c.ptr := e.bits.uop.cf.ftqPtr 335 c.offset := e.bits.uop.cf.ftqOffset 336 } 337 // return load pc at load s2 338 memBlock.io.ooo_to_mem.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data)) 339 340 for((c, e) <- ctrlBlock.io.st_pc_read.zip(exuBlocks(0).io.issue.get.drop(exuParameters.LduCnt))){ 341 // read store pc at store s0 342 c.ptr := e.bits.uop.cf.ftqPtr 343 c.offset := e.bits.uop.cf.ftqOffset 344 } 345 // return store pc at store s2 346 memBlock.io.ooo_to_mem.storePc <> VecInit(ctrlBlock.io.st_pc_read.map(_.data)) 347 348 memBlock.io.ooo_to_mem.issue <> exuBlocks(0).io.issue.get 349 // By default, instructions do not have exceptions when they enter the function units. 350 memBlock.io.ooo_to_mem.issue.map(_.bits.uop.clearExceptions()) 351 exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.ooo_to_mem.loadFastMatch 352 exuBlocks(0).io.scheExtra.loadFastFuOpType.get <> memBlock.io.ooo_to_mem.loadFastFuOpType 353 exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.ooo_to_mem.loadFastImm 354 355 val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt) 356 exuBlocks.map(_.io).foreach { exu => 357 exu.redirect <> ctrlBlock.io.redirect 358 exu.allocPregs <> ctrlBlock.io.allocPregs 359 exu.rfWriteback <> rfWriteback 360 exu.fastUopIn <> allFastUop1 361 exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc 362 exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target 363 exu.scheExtra.stIssuePtr <> memBlock.io.mem_to_ooo.stIssuePtr 364 exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat 365 exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat 366 exu.scheExtra.robDeqPtr := ctrlBlock.io.robDeqPtr 367 exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.mem_to_ooo.stIn).foreach{case (sink, src) => { 368 sink.bits := src.bits 369 sink.valid := src.valid 370 }} 371 exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => { 372 sink.valid := src.valid 373 sink.bits := src.bits 374 }} 375 } 376 XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1) 377 XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1) 378 379 ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs) 380 ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs) 381 ctrlBlock.perfinfo.perfEventsRs := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs)) 382 383 csrioIn.hartId <> io.hartId 384 csrioIn.perf <> DontCare 385 csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr 386 csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo 387 csrioIn.perf.memInfo <> memBlock.io.memInfo 388 csrioIn.perf.frontendInfo <> frontend.io.frontendInfo 389 390 csrioIn.perf.perfEventsFrontend <> frontend.getPerf 391 csrioIn.perf.perfEventsCtrl <> ctrlBlock.getPerf 392 csrioIn.perf.perfEventsLsu <> memBlock.getPerf 393 csrioIn.perf.perfEventsHc <> io.perfEvents 394 395 csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags 396 csrioIn.fpu.isIllegal := false.B 397 csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs 398 csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get 399 csrioIn.exception <> ctrlBlock.io.robio.exception 400 csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet 401 csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget 402 csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet 403 csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent 404 csrioIn.memExceptionVAddr <> memBlock.io.mem_to_ooo.lsqio.vaddr 405 406 csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0) 407 csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1) 408 csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0) 409 csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0) 410 csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0) 411 412 csrioIn.distributedUpdate(0).w.valid := memBlock.io.mem_to_ooo.csrUpdate.w.valid 413 csrioIn.distributedUpdate(0).w.bits := memBlock.io.mem_to_ooo.csrUpdate.w.bits 414 csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid 415 csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits 416 417 fenceio.sfence <> memBlock.io.ooo_to_mem.sfence 418 memBlock.io.fetch_to_mem.itlb <> frontend.io.ptw 419 memBlock.io.ooo_to_mem.flushSb := fenceio.sbuffer.flushSb 420 fenceio.sbuffer.sbIsEmpty := memBlock.io.mem_to_ooo.sbIsEmpty 421 422 423 memBlock.io.redirect <> ctrlBlock.io.redirect 424 memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get 425 426 memBlock.io.ooo_to_mem.csrCtrl <> csrioIn.customCtrl 427 memBlock.io.ooo_to_mem.tlbCsr <> csrioIn.tlb 428 429 memBlock.io.ooo_to_mem.lsqio.lcommit := ctrlBlock.io.robio.lsq.lcommit 430 memBlock.io.ooo_to_mem.lsqio.scommit := ctrlBlock.io.robio.lsq.scommit 431 memBlock.io.ooo_to_mem.lsqio.pendingld := ctrlBlock.io.robio.lsq.pendingld 432 memBlock.io.ooo_to_mem.lsqio.pendingst := ctrlBlock.io.robio.lsq.pendingst 433 memBlock.io.ooo_to_mem.lsqio.commit := ctrlBlock.io.robio.lsq.commit 434 memBlock.io.ooo_to_mem.lsqio.pendingPtr:= ctrlBlock.io.robio.lsq.pendingPtr 435 ctrlBlock.io.robio.lsq.mmio := memBlock.io.mem_to_ooo.lsqio.mmio 436 ctrlBlock.io.robio.lsq.uop := memBlock.io.mem_to_ooo.lsqio.uop 437// memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq 438 memBlock.io.ooo_to_mem.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType) 439 memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls 440 memBlock.io.mem_to_ooo.lsTopdownInfo <> ctrlBlock.io.robio.lsTopdownInfo 441 memBlock.io.l2_hint.valid := io.l2_hint.valid 442 memBlock.io.l2_hint.bits.sourceId := io.l2_hint.bits.sourceId 443 memBlock.io.l2PfqBusy := io.l2PfqBusy 444 445 // if l2 prefetcher use stream prefetch, it should be placed in XSCore 446 io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable 447 448 // top-down info 449 memBlock.io.debugTopDown.robHeadVaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadVaddr 450 frontend.io.debugTopDown.robHeadVaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadVaddr 451 io.debugTopDown.robHeadPaddr := ctrlBlock.io.debugTopDown.fromRob.robHeadPaddr 452 ctrlBlock.io.debugTopDown.fromCore.l2MissMatch := io.debugTopDown.l2MissMatch 453 ctrlBlock.io.debugTopDown.fromCore.l3MissMatch := io.debugTopDown.l3MissMatch 454 ctrlBlock.io.debugTopDown.fromCore.fromMem := memBlock.io.debugTopDown.toCore 455 456 // Modules are reset one by one 457 val resetTree = ResetGenNode( 458 Seq( 459 ModuleNode(memBlock), 460 ResetGenNode(Seq( 461 ModuleNode(exuBlocks.head), 462 ResetGenNode( 463 exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module) 464 ), 465 ResetGenNode(Seq( 466 ModuleNode(ctrlBlock), 467 ResetGenNode(Seq( 468 ModuleNode(frontend) 469 )) 470 )) 471 )) 472 ) 473 ) 474 475 ResetGen(resetTree, reset, !debugOpts.FPGAPlatform) 476 477} 478