1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan 18 19import chipsalliance.rocketchip.config 20import chipsalliance.rocketchip.config.Parameters 21import chisel3._ 22import chisel3.util._ 23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp} 24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple} 25import freechips.rocketchip.tile.HasFPUParameters 26import freechips.rocketchip.tilelink.TLBuffer 27import system.HasSoCParameter 28import utils._ 29import utility._ 30import xiangshan.backend._ 31import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper} 32import xiangshan.cache.mmu._ 33import xiangshan.frontend._ 34import xiangshan.mem.L1PrefetchFuzzer 35 36import scala.collection.mutable.ListBuffer 37 38abstract class XSModule(implicit val p: Parameters) extends Module 39 with HasXSParameter 40 with HasFPUParameters 41 42//remove this trait after impl module logic 43trait NeedImpl { 44 this: RawModule => 45 override protected def IO[T <: Data](iodef: T): T = { 46 println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module") 47 val io = chisel3.experimental.IO(iodef) 48 io <> DontCare 49 io 50 } 51} 52 53class WritebackSourceParams( 54 var exuConfigs: Seq[Seq[ExuConfig]] = Seq() 55 ) { 56 def length: Int = exuConfigs.length 57 def ++(that: WritebackSourceParams): WritebackSourceParams = { 58 new WritebackSourceParams(exuConfigs ++ that.exuConfigs) 59 } 60} 61 62trait HasWritebackSource { 63 val writebackSourceParams: Seq[WritebackSourceParams] 64 final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = { 65 require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]") 66 val source = sourceMod.writebackSource.get 67 require(source.length == writebackSourceParams.length, "length mismatch between sources") 68 for ((s, p) <- source.zip(writebackSourceParams)) { 69 require(s.length == p.length, "params do not match with the exuOutput") 70 } 71 source 72 } 73 final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = { 74 require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]") 75 val source = sourceMod.writebackSource1.get 76 require(source.length == writebackSourceParams.length, "length mismatch between sources") 77 for ((s, p) <- source.zip(writebackSourceParams)) { 78 require(s.length == p.length, "params do not match with the exuOutput") 79 } 80 source 81 } 82 val writebackSourceImp: HasWritebackSourceImp 83} 84 85trait HasWritebackSourceImp { 86 def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None 87 def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None 88} 89 90trait HasWritebackSink { 91 // Caches all sources. The selected source will be the one with smallest length. 92 var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])] 93 def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = { 94 val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0) 95 writebackSinks += ((source, realIndex)) 96 this 97 } 98 99 def writebackSinksParams: Seq[WritebackSourceParams] = { 100 writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) } 101 } 102 final def writebackSinksMod( 103 thisMod: Option[HasWritebackSource] = None, 104 thisModImp: Option[HasWritebackSourceImp] = None 105 ): Seq[Seq[HasWritebackSourceImp]] = { 106 require(thisMod.isDefined == thisModImp.isDefined) 107 writebackSinks.map(_._1.map(source => 108 if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp) 109 ) 110 } 111 final def writebackSinksImp( 112 thisMod: Option[HasWritebackSource] = None, 113 thisModImp: Option[HasWritebackSourceImp] = None 114 ): Seq[Seq[ValidIO[ExuOutput]]] = { 115 val sourceMod = writebackSinksMod(thisMod, thisModImp) 116 writebackSinks.zip(sourceMod).map{ case ((s, i), m) => 117 s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2)) 118 } 119 } 120 def selWritebackSinks(func: WritebackSourceParams => Int): Int = { 121 writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2 122 } 123 def generateWritebackIO( 124 thisMod: Option[HasWritebackSource] = None, 125 thisModImp: Option[HasWritebackSourceImp] = None 126 ): Unit 127} 128 129abstract class XSBundle(implicit val p: Parameters) extends Bundle 130 with HasXSParameter 131 132abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule 133 with HasXSParameter with HasExuWbHelper 134{ 135 // interrupt sinks 136 val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2)) 137 val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1)) 138 val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1)) 139 // outer facing nodes 140 val frontend = LazyModule(new Frontend()) 141 val ptw = LazyModule(new L2TLBWrapper()) 142 val ptw_to_l2_buffer = if (!coreParams.softPTW) LazyModule(new TLBuffer) else null 143 val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO())) 144 145 if (!coreParams.softPTW) { 146 ptw_to_l2_buffer.node := ptw.node 147 } 148 149 val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts)) 150 val intWbPorts = wbArbiter.intWbPorts 151 val fpWbPorts = wbArbiter.fpWbPorts 152 153 // TODO: better RS organization 154 // generate rs according to number of function units 155 require(exuParameters.JmpCnt == 1) 156 require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0) 157 require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0) 158 require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation 159 160 // one RS every 2 MDUs 161 val schedulePorts = Seq( 162 // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget 163 Seq( 164 (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()), 165 (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()), 166 (JumpCSRExeUnitCfg, 1, Seq(), Seq()), 167 (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()), 168 (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()), 169 (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()) 170 ), 171 Seq( 172 (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)), 173 (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq()) 174 ) 175 ) 176 177 // should do outer fast wakeup ports here 178 val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) => 179 val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _) 180 val outerPorts = sche.map(cfg => { 181 // exe units from this scheduler need fastUops from exeunits 182 val outerWakeupInSche = sche.filter(_._1.wakeupFromExu) 183 val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1) 184 val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1) 185 // exe units from other schedulers need fastUop from outside 186 val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1) 187 val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1) 188 val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource) 189 val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource) 190 getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted 191 }) 192 println(s"inter-scheduler wakeup sources for $i: $outerPorts") 193 outerPorts 194 } 195 196 // allow mdu and fmisc to have 2*numDeq enqueue ports 197 val intDpPorts = (0 until exuParameters.AluCnt).map(i => { 198 if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i)) 199 else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i)) 200 else Seq((0, i)) 201 }) 202 val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++ 203 (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++ 204 (0 until exuParameters.StuCnt).map(i => Seq((5, i))) 205 val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => { 206 if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i)) 207 else Seq((0, i)) 208 }) 209 210 val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts) 211 212 val outIntRfReadPorts = Seq(0, 0) 213 val outFpRfReadPorts = Seq(0, StorePipelineWidth) 214 val hasIntRf = Seq(true, false) 215 val hasFpRf = Seq(false, true) 216 val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map { 217 case (((sche, disp), other), i) => 218 LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i))) 219 } 220 221 val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => { 222 case XSCoreParamsKey => up(XSCoreParamsKey).copy( 223 IssQueSize = exuBlocks.head.scheduler.getMemRsEntries 224 ) 225 }))) 226 227 val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs)) 228 wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock) 229 val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs)) 230 val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs)) 231 val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter)) 232 writebackSources.foreach(s => ctrlBlock.addWritebackSink(s)) 233} 234 235class XSCore()(implicit p: config.Parameters) extends XSCoreBase 236 with HasXSDts 237{ 238 lazy val module = new XSCoreImp(this) 239} 240 241class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer) 242 with HasXSParameter 243 with HasSoCParameter { 244 val io = IO(new Bundle { 245 val hartId = Input(UInt(64.W)) 246 val reset_vector = Input(UInt(PAddrBits.W)) 247 val cpu_halt = Output(Bool()) 248 val l2_pf_enable = Output(Bool()) 249 val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent)) 250 val beu_errors = Output(new XSL1BusErrors()) 251 val l2_hint = Input(Valid(new L2ToL1Hint())) 252 }) 253 254 println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}") 255 256 val frontend = outer.frontend.module 257 val ctrlBlock = outer.ctrlBlock.module 258 val wb2Ctrl = outer.wb2Ctrl.module 259 val memBlock = outer.memBlock.module 260 val ptw = outer.ptw.module 261 val ptw_to_l2_buffer = if (!coreParams.softPTW) outer.ptw_to_l2_buffer.module else null 262 val exuBlocks = outer.exuBlocks.map(_.module) 263 264 frontend.io.hartId := io.hartId 265 ctrlBlock.io.hartId := io.hartId 266 exuBlocks.foreach(_.io.hartId := io.hartId) 267 memBlock.io.hartId := io.hartId 268 outer.wbArbiter.module.io.hartId := io.hartId 269 frontend.io.reset_vector := io.reset_vector 270 271 io.cpu_halt := ctrlBlock.io.cpu_halt 272 273 outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect 274 val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.writeback 275 require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}") 276 outer.wbArbiter.module.io.in <> allWriteback 277 val rfWriteback = outer.wbArbiter.module.io.out 278 279 // memblock error exception writeback, 1 cycle after normal writeback 280 wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error 281 282 wb2Ctrl.io.redirect <> ctrlBlock.io.redirect 283 outer.wb2Ctrl.generateWritebackIO() 284 285 io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo() 286 io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo() 287 288 require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1) 289 val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head 290 val csrioIn = csrFenceMod.io.fuExtra.csrio.get 291 val fenceio = csrFenceMod.io.fuExtra.fenceio.get 292 293 frontend.io.backend <> ctrlBlock.io.frontend 294 frontend.io.sfence <> fenceio.sfence 295 frontend.io.tlbCsr <> csrioIn.tlb 296 frontend.io.csrCtrl <> csrioIn.customCtrl 297 frontend.io.fencei := fenceio.fencei 298 299 ctrlBlock.io.csrCtrl <> csrioIn.customCtrl 300 val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _)) 301 ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect) 302 ctrlBlock.io.stIn <> memBlock.io.stIn 303 ctrlBlock.io.memoryViolation <> memBlock.io.memoryViolation 304 exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.enqLsq 305 exuBlocks.foreach(b => { 306 b.io.scheExtra.lcommit := memBlock.io.lqDeq 307 b.io.scheExtra.scommit := memBlock.io.sqDeq 308 b.io.scheExtra.lqCancelCnt := memBlock.io.lqCancelCnt 309 b.io.scheExtra.sqCancelCnt := memBlock.io.sqCancelCnt 310 }) 311 val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp])) 312 outer.ctrlBlock.generateWritebackIO() 313 314 val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.otherFastWakeup 315 require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}") 316 val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1) 317 val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1) 318 val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head)) 319 val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head)) 320 val allFastUop1 = intFastUop1 ++ fpFastUop1 321 322 ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in) 323 ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady) 324 ctrlBlock.io.enqLsq <> memBlock.io.enqLsq 325 ctrlBlock.io.lqDeq := memBlock.io.lqDeq 326 ctrlBlock.io.sqDeq := memBlock.io.sqDeq 327 ctrlBlock.io.lqCanAccept := memBlock.io.lsqio.lqCanAccept 328 ctrlBlock.io.sqCanAccept := memBlock.io.lsqio.sqCanAccept 329 ctrlBlock.io.lqCancelCnt := memBlock.io.lqCancelCnt 330 ctrlBlock.io.sqCancelCnt := memBlock.io.sqCancelCnt 331 ctrlBlock.io.robHeadLsIssue := exuBlocks.map(_.io.scheExtra.robHeadLsIssue).reduce(_ || _) 332 333 exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get 334 exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get 335 336 for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){ 337 // read load pc at load s0 338 c.ptr := e.bits.uop.cf.ftqPtr 339 c.offset := e.bits.uop.cf.ftqOffset 340 } 341 // return load pc at load s2 342 memBlock.io.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data)) 343 memBlock.io.issue <> exuBlocks(0).io.issue.get 344 // By default, instructions do not have exceptions when they enter the function units. 345 memBlock.io.issue.map(_.bits.uop.clearExceptions()) 346 exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.loadFastMatch 347 exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.loadFastImm 348 349 val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt) 350 exuBlocks.map(_.io).foreach { exu => 351 exu.redirect <> ctrlBlock.io.redirect 352 exu.allocPregs <> ctrlBlock.io.allocPregs 353 exu.rfWriteback <> rfWriteback 354 exu.fastUopIn <> allFastUop1 355 exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc 356 exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target 357 exu.scheExtra.stIssuePtr <> memBlock.io.stIssuePtr 358 exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat 359 exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat 360 exu.scheExtra.robDeqPtr := ctrlBlock.io.robDeqPtr 361 exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.stIn).foreach{case (sink, src) => { 362 sink.bits := src.bits 363 sink.valid := src.valid 364 }} 365 exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => { 366 sink.valid := src.valid 367 sink.bits := src.bits 368 }} 369 } 370 XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1) 371 XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1) 372 373 ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs) 374 ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs) 375 if (!coreParams.softPTW) { 376 memBlock.io.perfEventsPTW := ptw.getPerf 377 } else { 378 memBlock.io.perfEventsPTW := DontCare 379 } 380 ctrlBlock.perfinfo.perfEventsRs := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs)) 381 382 csrioIn.hartId <> io.hartId 383 csrioIn.perf <> DontCare 384 csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr 385 csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo 386 csrioIn.perf.memInfo <> memBlock.io.memInfo 387 csrioIn.perf.frontendInfo <> frontend.io.frontendInfo 388 389 csrioIn.perf.perfEventsFrontend <> frontend.getPerf 390 csrioIn.perf.perfEventsCtrl <> ctrlBlock.getPerf 391 csrioIn.perf.perfEventsLsu <> memBlock.getPerf 392 csrioIn.perf.perfEventsHc <> io.perfEvents 393 394 csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags 395 csrioIn.fpu.isIllegal := false.B 396 csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs 397 csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get 398 csrioIn.exception <> ctrlBlock.io.robio.exception 399 csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet 400 csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget 401 csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet 402 csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent 403 csrioIn.memExceptionVAddr <> memBlock.io.lsqio.exceptionAddr.vaddr 404 405 csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0) 406 csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1) 407 csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0) 408 csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0) 409 csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0) 410 411 csrioIn.distributedUpdate(0).w.valid := memBlock.io.csrUpdate.w.valid 412 csrioIn.distributedUpdate(0).w.bits := memBlock.io.csrUpdate.w.bits 413 csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid 414 csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits 415 416 fenceio.sfence <> memBlock.io.sfence 417 fenceio.sbuffer <> memBlock.io.fenceToSbuffer 418 419 memBlock.io.redirect <> ctrlBlock.io.redirect 420 memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get 421 memBlock.io.csrCtrl <> csrioIn.customCtrl 422 memBlock.io.tlbCsr <> csrioIn.tlb 423 memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq 424 memBlock.io.lsqio.exceptionAddr.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType) 425 memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls 426 memBlock.io.lsTopdownInfo <> ctrlBlock.io.robio.lsTopdownInfo 427 memBlock.io.l2_hint.valid := io.l2_hint.valid 428 memBlock.io.l2_hint.bits.sourceId := io.l2_hint.bits.sourceId 429 430 val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay,frontend.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.ifilterSize) 431 val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, ptw.io.tlb(0), fenceio.sfence, csrioIn.tlb) 432 val dtlbRepeater1 = PTWFilter(ldtlbParams.fenceDelay, memBlock.io.ptw, fenceio.sfence, csrioIn.tlb, l2tlbParams.dfilterSize) 433 val dtlbRepeater2 = PTWRepeaterNB(passReady = false, ldtlbParams.fenceDelay, dtlbRepeater1.io.ptw, ptw.io.tlb(1), fenceio.sfence, csrioIn.tlb) 434 ptw.io.sfence <> fenceio.sfence 435 ptw.io.csr.tlb <> csrioIn.tlb 436 ptw.io.csr.distribute_csr <> csrioIn.customCtrl.distribute_csr 437 438 ExcitingUtils.addSource(dtlbRepeater1.io.rob_head_miss_in_tlb, s"miss_in_dtlb_${coreParams.HartId}", ExcitingUtils.Perf, true) 439 440 // if l2 prefetcher use stream prefetch, it should be placed in XSCore 441 io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable 442 443 // Modules are reset one by one 444 val resetTree = ResetGenNode( 445 Seq( 446 ModuleNode(memBlock), ModuleNode(dtlbRepeater1), 447 ResetGenNode(Seq( 448 ModuleNode(itlbRepeater2), 449 ModuleNode(ptw), 450 ModuleNode(dtlbRepeater2), 451 ModuleNode(ptw_to_l2_buffer), 452 )), 453 ResetGenNode(Seq( 454 ModuleNode(exuBlocks.head), 455 ResetGenNode( 456 exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module) 457 ), 458 ResetGenNode(Seq( 459 ModuleNode(ctrlBlock), 460 ResetGenNode(Seq( 461 ModuleNode(frontend), ModuleNode(itlbRepeater1) 462 )) 463 )) 464 )) 465 ) 466 ) 467 468 ResetGen(resetTree, reset, !debugOpts.FPGAPlatform) 469 470} 471