1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2024 Institute of Computing Technology, Chinese Academy of Sciences 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package top 18 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.dataview._ 22import xiangshan._ 23import utils._ 24import utility._ 25import utility.sram.SramBroadcastBundle 26import system._ 27import device._ 28import org.chipsalliance.cde.config._ 29import freechips.rocketchip.amba.axi4._ 30import freechips.rocketchip.devices.debug.DebugModuleKey 31import freechips.rocketchip.diplomacy._ 32import freechips.rocketchip.interrupts._ 33import freechips.rocketchip.tilelink._ 34import coupledL2.tl2chi.{CHIAsyncBridgeSink, PortIO} 35import freechips.rocketchip.tile.MaxHartIdBits 36import freechips.rocketchip.util.{AsyncQueueParams, AsyncQueueSource} 37import chisel3.experimental.{ChiselAnnotation, annotate} 38import sifive.enterprise.firrtl.NestedPrefixModulesAnnotation 39import freechips.rocketchip.util.AsyncResetSynchronizerShiftReg 40 41import difftest.common.DifftestWiring 42import difftest.util.Profile 43 44class XSNoCTop()(implicit p: Parameters) extends BaseXSSoc with HasSoCParameter 45{ 46 override lazy val desiredName: String = "XSTop" 47 48 ResourceBinding { 49 val width = ResourceInt(2) 50 val model = "freechips,rocketchip-unknown" 51 Resource(ResourceAnchors.root, "model").bind(ResourceString(model)) 52 Resource(ResourceAnchors.root, "compat").bind(ResourceString(model + "-dev")) 53 Resource(ResourceAnchors.soc, "compat").bind(ResourceString(model + "-soc")) 54 Resource(ResourceAnchors.root, "width").bind(width) 55 Resource(ResourceAnchors.soc, "width").bind(width) 56 Resource(ResourceAnchors.cpus, "width").bind(ResourceInt(1)) 57 def bindManagers(xbar: TLNexusNode) = { 58 ManagerUnification(xbar.edges.in.head.manager.managers).foreach{ manager => 59 manager.resources.foreach(r => r.bind(manager.toResource)) 60 } 61 } 62 } 63 64 require(enableCHI) 65 66 // xstile 67 val core_with_l2 = LazyModule(new XSTileWrap()(p.alter((site, here, up) => { 68 case XSCoreParamsKey => tiles.head 69 case PerfCounterOptionsKey => up(PerfCounterOptionsKey).copy(perfDBHartID = tiles.head.HartId) 70 }))) 71 72 // imsic bus top 73 val u_imsic_bus_top = LazyModule(new imsic_bus_top) 74 75 // interrupts 76 val clintIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 2)) 77 val debugIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 1)) 78 val plicIntNode = IntSourceNode(IntSourcePortSimple(1, 2, 1)) 79 val nmiIntNode = IntSourceNode(IntSourcePortSimple(1, 1, (new NonmaskableInterruptIO).elements.size)) 80 val beuIntNode = IntSinkNode(IntSinkPortSimple(1, 1)) 81 core_with_l2.clintIntNode := clintIntNode 82 core_with_l2.debugIntNode := debugIntNode 83 core_with_l2.plicIntNode :*= plicIntNode 84 core_with_l2.nmiIntNode := nmiIntNode 85 beuIntNode := core_with_l2.beuIntNode 86 val clint = InModuleBody(clintIntNode.makeIOs()) 87 val debug = InModuleBody(debugIntNode.makeIOs()) 88 val plic = InModuleBody(plicIntNode.makeIOs()) 89 val nmi = InModuleBody(nmiIntNode.makeIOs()) 90 val beu = InModuleBody(beuIntNode.makeIOs()) 91 92 // asynchronous bridge sink node 93 val tlAsyncSinkOpt = Option.when(SeperateTLBus && EnableSeperateTLAsync)( 94 LazyModule(new TLAsyncCrossingSink(SeperateTLAsyncBridge.get)) 95 ) 96 tlAsyncSinkOpt.foreach(_.node := core_with_l2.tlAsyncSourceOpt.get.node) 97 // synchronous sink node 98 val tlSyncSinkOpt = Option.when(SeperateTLBus && !EnableSeperateTLAsync)(TLTempNode()) 99 tlSyncSinkOpt.foreach(_ := core_with_l2.tlSyncSourceOpt.get) 100 101 // The Manager Node is only used to make IO 102 val tl = Option.when(SeperateTLBus)(TLManagerNode(Seq( 103 TLSlavePortParameters.v1( 104 managers = SeperateTLBusRanges map { address => 105 TLSlaveParameters.v1( 106 address = Seq(address), 107 regionType = RegionType.UNCACHED, 108 executable = true, 109 supportsGet = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 110 supportsPutPartial = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 111 supportsPutFull = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 112 fifoId = Some(0) 113 ) 114 115 }, 116 beatBytes = 8 117 ) 118 ))) 119 val tlXbar = Option.when(SeperateTLBus)(TLXbar()) 120 tlAsyncSinkOpt.foreach(sink => tlXbar.get := sink.node) 121 tlSyncSinkOpt.foreach(sink => tlXbar.get := sink) 122 tl.foreach(_ := tlXbar.get) 123 // seperate TL io 124 val io_tl = tl.map(x => InModuleBody(x.makeIOs())) 125 126 // reset nodes 127 val core_rst_node = BundleBridgeSource(() => Reset()) 128 core_with_l2.tile.core_reset_sink := core_rst_node 129 130 class XSNoCTopImp(wrapper: XSNoCTop) extends LazyRawModuleImp(wrapper) { 131 soc.XSTopPrefix.foreach { prefix => 132 val mod = this.toNamed 133 annotate(new ChiselAnnotation { 134 def toFirrtl = NestedPrefixModulesAnnotation(mod, prefix, true) 135 }) 136 } 137 FileRegisters.add("dts", dts) 138 FileRegisters.add("graphml", graphML) 139 FileRegisters.add("json", json) 140 FileRegisters.add("plusArgs", freechips.rocketchip.util.PlusArgArtefacts.serialize_cHeader()) 141 142 val clock = IO(Input(Clock())) 143 val reset = IO(Input(AsyncReset())) 144 val noc_clock = EnableCHIAsyncBridge.map(_ => IO(Input(Clock()))) 145 val noc_reset = EnableCHIAsyncBridge.map(_ => IO(Input(AsyncReset()))) 146 val soc_clock = IO(Input(Clock())) 147 val soc_reset = IO(Input(AsyncReset())) 148 private val hasMbist = tiles.head.hasMbist 149 private val hasSramCtl = tiles.head.hasSramCtl 150 private val hasDFT = hasMbist || hasSramCtl 151 val io = IO(new Bundle { 152 val hartId = Input(UInt(p(MaxHartIdBits).W)) 153 val riscv_halt = Output(Bool()) 154 val riscv_critical_error = Output(Bool()) 155 val hartResetReq = Input(Bool()) 156 val hartIsInReset = Output(Bool()) 157 val riscv_rst_vec = Input(UInt(soc.PAddrBits.W)) 158 val chi = new PortIO 159 val nodeID = Input(UInt(soc.NodeIDWidthList(issue).W)) 160 val clintTime = Input(ValidIO(UInt(64.W))) 161 val traceCoreInterface = new Bundle { 162 val fromEncoder = Input(new Bundle { 163 val enable = Bool() 164 val stall = Bool() 165 }) 166 val toEncoder = Output(new Bundle { 167 val cause = UInt(TraceCauseWidth.W) 168 val tval = UInt(TraceTvalWidth.W) 169 val priv = UInt(TracePrivWidth.W) 170 val iaddr = UInt((TraceTraceGroupNum * TraceIaddrWidth).W) 171 val itype = UInt((TraceTraceGroupNum * TraceItypeWidth).W) 172 val iretire = UInt((TraceTraceGroupNum * TraceIretireWidthCompressed).W) 173 val ilastsize = UInt((TraceTraceGroupNum * TraceIlastsizeWidth).W) 174 }) 175 } 176 val dft = Option.when(hasDFT)(Input(new SramBroadcastBundle)) 177 val dft_reset = Option.when(hasMbist)(Input(new DFTResetSignals())) 178 val lp = Option.when(EnablePowerDown) (new LowPowerIO) 179 }) 180 // imsic axi4 io 181 val imsic_axi4 = wrapper.u_imsic_bus_top.axi4.map(x => IO(Flipped(new VerilogAXI4Record(x.elts.head.params.copy(addrBits = 32))))) 182 // imsic tl io 183 val imsic_m_tl = wrapper.u_imsic_bus_top.tl_m.map(x => IO(chiselTypeOf(x.getWrappedValue))) 184 val imsic_s_tl = wrapper.u_imsic_bus_top.tl_s.map(x => IO(chiselTypeOf(x.getWrappedValue))) 185 // imsic bare io 186 val imsic = wrapper.u_imsic_bus_top.module.msi.map(x => IO(chiselTypeOf(x))) 187 188 val noc_reset_sync = EnableCHIAsyncBridge.map(_ => withClockAndReset(noc_clock, noc_reset) { ResetGen(2, io.dft_reset) }) 189 val soc_reset_sync = withClockAndReset(soc_clock, soc_reset) { ResetGen(2, io.dft_reset) } 190 wrapper.core_with_l2.module.io.dft.zip(io.dft).foreach { case (a, b) => a := b } 191 wrapper.core_with_l2.module.io.dft_reset.zip(io.dft_reset).foreach { case (a, b) => a := b } 192 // device clock and reset 193 wrapper.u_imsic_bus_top.module.clock := soc_clock 194 wrapper.u_imsic_bus_top.module.reset := soc_reset_sync 195 196 // imsic axi4 io connection 197 imsic_axi4.foreach(_.viewAs[AXI4Bundle] <> wrapper.u_imsic_bus_top.axi4.get.elements.head._2) 198 // imsic tl io connection 199 wrapper.u_imsic_bus_top.tl_m.foreach(_ <> imsic_m_tl.get) 200 wrapper.u_imsic_bus_top.tl_s.foreach(_ <> imsic_s_tl.get) 201 // imsic bare io connection 202 wrapper.u_imsic_bus_top.module.msi.foreach(_ <> imsic.get) 203 204 // input 205 dontTouch(io) 206 207 /* 208 SoC control the sequence of power on/off with isolation/reset/clock 209 */ 210 val soc_rst_n = io.lp.map(_.i_cpu_sw_rst_n).getOrElse(true.B) 211 val soc_iso_en = io.lp.map(_.i_cpu_iso_en).getOrElse(false.B) 212 213 /* Core+L2 reset when: 214 1. normal reset from SoC 215 2. SoC initialize reset during Power on/off flow 216 */ 217 val cpuReset = reset.asBool || !soc_rst_n 218 val cpuReset_sync = withClockAndReset(clock, cpuReset.asAsyncReset)(ResetGen(2, io.dft_reset)) 219 //Interrupt sources collect 220 val msip = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(clint.head(0), 3, 0)} 221 val mtip = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(clint.head(1), 3, 0)} 222 val meip = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(plic.head(0), 3, 0)} 223 val seip = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(plic.last(0), 3, 0)} 224 val nmi_31 = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(nmi.head(0), 3, 0)} 225 val nmi_43 = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(nmi.head(1), 3, 0)} 226 val debugIntr = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(debug.head(0),3,0)} 227 val msi_info_vld = withClockAndReset(clock, cpuReset_sync) {AsyncResetSynchronizerShiftReg(core_with_l2.module.io.msiInfo.valid, 3, 0)} 228 val intSrc = Cat(msip, mtip, meip, seip, nmi_31, nmi_43, debugIntr, msi_info_vld) 229 230 /* 231 * CPU Low Power State: 232 * 1. core+L2 Low power state transactions is triggered by l2 flush request from core CSR 233 * 2. wait L2 flush done 234 * 3. wait Core to wfi -> send out < io.o_cpu_no_op > 235 */ 236 val sIDLE :: sL2FLUSH :: sWAITWFI :: sEXITCO :: sPOFFREQ :: Nil = Enum(5) 237 val lpState = withClockAndReset(clock, cpuReset_sync) {RegInit(sIDLE)} 238 val l2_flush_en = withClockAndReset(clock, cpuReset_sync) { 239 AsyncResetSynchronizerShiftReg(core_with_l2.module.io.l2_flush_en.getOrElse(false.B), 3, 0) 240 } 241 val l2_flush_done = withClockAndReset(clock, cpuReset_sync) { 242 AsyncResetSynchronizerShiftReg(core_with_l2.module.io.l2_flush_done.getOrElse(false.B), 3, 0) 243 } 244 val isWFI = withClockAndReset(clock, cpuReset_sync) { 245 AsyncResetSynchronizerShiftReg(core_with_l2.module.io.cpu_halt, 3, 0) 246 } 247 val exitco = withClockAndReset(clock, cpuReset_sync) { 248 AsyncResetSynchronizerShiftReg((!io.chi.syscoreq & !io.chi.syscoack),3, 0)} 249 val QACTIVE = WireInit(false.B) 250 val QACCEPTn = WireInit(false.B) 251 lpState := lpStateNext(lpState, l2_flush_en, l2_flush_done, isWFI, exitco, QACTIVE, QACCEPTn) 252 io.lp.foreach { lp => lp.o_cpu_no_op := lpState === sPOFFREQ } // inform SoC core+l2 want to power off 253 254 /*WFI clock Gating state 255 1. works only when lpState is IDLE means Core+L2 works in normal state 256 2. when Core is in wfi state, core+l2 clock is gated 257 3. only reset/interrupt/snoop could recover core+l2 clock 258 */ 259 val sNORMAL :: sGCLOCK :: sAWAKE :: sFLITWAKE :: Nil = Enum(4) 260 val wfiState = withClockAndReset(clock, cpuReset_sync) {RegInit(sNORMAL)} 261 val isNormal = lpState === sIDLE 262 val wfiGateClock = withClockAndReset(clock, cpuReset_sync) {RegInit(false.B)} 263 val flitpend = io.chi.rx.snp.flitpend | io.chi.rx.rsp.flitpend | io.chi.rx.dat.flitpend 264 wfiState := withClockAndReset(clock, cpuReset_sync){WfiStateNext(wfiState, isWFI, isNormal, flitpend, intSrc)} 265 266 if (WFIClockGate) { 267 wfiGateClock := (wfiState === sGCLOCK) 268 }else { 269 wfiGateClock := false.B 270 } 271 272 273 274 /* during power down sequence, SoC reset will gate clock */ 275 val pwrdownGateClock = withClockAndReset(clock, cpuReset_sync.asAsyncReset) {RegInit(false.B)} 276 pwrdownGateClock := !soc_rst_n && lpState === sPOFFREQ 277 /* 278 physical power off handshake: 279 i_cpu_pwrdown_req_n 280 o_cpu_pwrdown_ack_n means all power is safely on 281 */ 282 val soc_pwrdown_n = io.lp.map(_.i_cpu_pwrdown_req_n).getOrElse(true.B) 283 io.lp.foreach { lp => lp.o_cpu_pwrdown_ack_n := core_with_l2.module.io.pwrdown_ack_n.getOrElse(true.B) } 284 285 286 /* Core+L2 hardware initial clock gating as: 287 1. Gate clock when SoC reset CPU with < io.i_cpu_sw_rst_n > valid 288 2. Gate clock when SoC is enable clock (Core+L2 in normal state) and core is in wfi state 289 3. Disable clock gate at the cycle of Flitpend valid in rx.snp channel 290 */ 291 val cpuClockEn = !wfiGateClock && !pwrdownGateClock | io.chi.rx.snp.flitpend 292 293 dontTouch(wfiGateClock) 294 dontTouch(pwrdownGateClock) 295 dontTouch(cpuClockEn) 296 297 core_with_l2.module.clock := ClockGate(false.B, cpuClockEn, clock) 298 core_with_l2.module.reset := cpuReset.asAsyncReset 299 core_with_l2.module.noc_reset.foreach(_ := noc_reset.get) 300 core_with_l2.module.soc_reset := soc_reset 301 core_with_l2.module.io.hartId := io.hartId 302 core_with_l2.module.io.nodeID.get := io.nodeID 303 io.riscv_halt := core_with_l2.module.io.cpu_halt 304 io.riscv_critical_error := core_with_l2.module.io.cpu_crtical_error 305 core_with_l2.module.io.hartResetReq := io.hartResetReq 306 io.hartIsInReset := core_with_l2.module.io.hartIsInReset 307 core_with_l2.module.io.reset_vector := io.riscv_rst_vec 308 core_with_l2.module.io.iso_en.foreach { _ := io.lp.map(_.i_cpu_iso_en).getOrElse(false.B) } 309 core_with_l2.module.io.pwrdown_req_n.foreach { _ := io.lp.map(_.i_cpu_pwrdown_req_n).getOrElse(true.B) } 310 // trace Interface 311 val traceInterface = core_with_l2.module.io.traceCoreInterface 312 traceInterface.fromEncoder := io.traceCoreInterface.fromEncoder 313 io.traceCoreInterface.toEncoder.priv := traceInterface.toEncoder.priv 314 io.traceCoreInterface.toEncoder.cause := traceInterface.toEncoder.trap.cause 315 io.traceCoreInterface.toEncoder.tval := traceInterface.toEncoder.trap.tval 316 io.traceCoreInterface.toEncoder.iaddr := VecInit(traceInterface.toEncoder.groups.map(_.bits.iaddr)).asUInt 317 io.traceCoreInterface.toEncoder.itype := VecInit(traceInterface.toEncoder.groups.map(_.bits.itype)).asUInt 318 io.traceCoreInterface.toEncoder.iretire := VecInit(traceInterface.toEncoder.groups.map(_.bits.iretire)).asUInt 319 io.traceCoreInterface.toEncoder.ilastsize := VecInit(traceInterface.toEncoder.groups.map(_.bits.ilastsize)).asUInt 320 321 EnableClintAsyncBridge match { 322 case Some(param) => 323 withClockAndReset(soc_clock, soc_reset_sync) { 324 val source = Module(new AsyncQueueSource(UInt(64.W), param)) 325 source.io.enq.valid := io.clintTime.valid 326 source.io.enq.bits := io.clintTime.bits 327 core_with_l2.module.io.clintTime <> source.io.async 328 } 329 case None => 330 core_with_l2.module.io.clintTime <> io.clintTime 331 } 332 333 EnableCHIAsyncBridge match { 334 case Some(param) => 335 withClockAndReset(noc_clock.get, noc_reset_sync.get) { 336 val sink = Module(new CHIAsyncBridgeSink(param)) 337 sink.io.async <> core_with_l2.module.io.chi 338 io.chi <> sink.io.deq 339 } 340 case None => 341 io.chi <> core_with_l2.module.io.chi 342 } 343 344 // Seperate DebugModule TL Async Queue Sink 345 if (SeperateTLBus && EnableSeperateTLAsync) { 346 tlAsyncSinkOpt.get.module.clock := soc_clock 347 tlAsyncSinkOpt.get.module.reset := soc_reset_sync 348 } 349 350 core_with_l2.module.io.msiInfo.valid := wrapper.u_imsic_bus_top.module.msiio.vld_req 351 core_with_l2.module.io.msiInfo.bits := wrapper.u_imsic_bus_top.module.msiio.data 352 wrapper.u_imsic_bus_top.module.msiio.vld_ack := core_with_l2.module.io.msiAck 353 // tie off core soft reset 354 core_rst_node.out.head._1 := false.B.asAsyncReset 355 356 core_with_l2.module.io.debugTopDown.l3MissMatch := false.B 357 core_with_l2.module.io.l3Miss := false.B 358 } 359 360 lazy val module = new XSNoCTopImp(this) 361} 362 363class XSNoCDiffTop(implicit p: Parameters) extends Module { 364 override val desiredName: String = "XSDiffTop" 365 val l_soc = LazyModule(new XSNoCTop()) 366 val soc = Module(l_soc.module) 367 368 // Expose XSTop IOs outside, i.e. io 369 def exposeIO(data: Data, name: String): Unit = { 370 val dummy = IO(chiselTypeOf(data)).suggestName(name) 371 dummy <> data 372 } 373 def exposeOptionIO(data: Option[Data], name: String): Unit = { 374 if (data.isDefined) { 375 val dummy = IO(chiselTypeOf(data.get)).suggestName(name) 376 dummy <> data.get 377 } 378 } 379 exposeIO(l_soc.clint, "clint") 380 exposeIO(l_soc.debug, "debug") 381 exposeIO(l_soc.plic, "plic") 382 exposeIO(l_soc.beu, "beu") 383 exposeIO(l_soc.nmi, "nmi") 384 soc.clock := clock 385 soc.reset := reset.asAsyncReset 386 exposeIO(soc.soc_clock, "soc_clock") 387 exposeIO(soc.soc_reset, "soc_reset") 388 exposeIO(soc.io, "io") 389 exposeOptionIO(soc.noc_clock, "noc_clock") 390 exposeOptionIO(soc.noc_reset, "noc_reset") 391 exposeOptionIO(soc.imsic_axi4, "imsic_axi4") 392 exposeOptionIO(soc.imsic_m_tl, "imsic_m_tl") 393 exposeOptionIO(soc.imsic_s_tl, "imsic_s_tl") 394 exposeOptionIO(soc.imsic, "imsic") 395 396 // TODO: 397 // XSDiffTop is only part of DUT, we can not instantiate difftest here. 398 // Temporarily we collect Performance counters for each DiffTop, need control signals passed from Difftest 399 val timer = IO(Input(UInt(64.W))) 400 val logEnable = IO(Input(Bool())) 401 val clean = IO(Input(Bool())) 402 val dump = IO(Input(Bool())) 403 XSLog.collect(timer, logEnable, clean, dump) 404 DifftestWiring.createAndConnectExtraIOs() 405 Profile.generateJson("XiangShan") 406 XSNoCDiffTopChecker() 407} 408 409// TODO: 410// Currently we use two-step XiangShan-Difftest, generating XS(with Diff Interface only) and Difftest seperately 411// To avoid potential interface problem between XS and Diff, we add Checker and CI(dual-core) 412// We will try one-step XS-Diff later 413object XSNoCDiffTopChecker { 414 def apply(): Unit = { 415 val verilog = 416 """ 417 |`define CONFIG_XSCORE_NR 2 418 |`include "gateway_interface.svh" 419 |module XSDiffTopChecker( 420 | input cpu_clk, 421 | input cpu_rstn, 422 | input sys_clk, 423 | input sys_rstn 424 |); 425 |wire [63:0] timer; 426 |wire logEnable; 427 |wire clean; 428 |wire dump; 429 |// FIXME: use siganls from Difftest rather than default value 430 |assign timer = 64'b0; 431 |assign logEnable = 1'b0; 432 |assign clean = 1'b0; 433 |assign dump = 1'b0; 434 |gateway_if gateway_if_i(); 435 |core_if core_if_o[`CONFIG_XSCORE_NR](); 436 |generate 437 | genvar i; 438 | for (i = 0; i < `CONFIG_XSCORE_NR; i = i+1) 439 | begin: u_CPU_TOP 440 | // FIXME: add missing ports 441 | XSDiffTop u_XSTop ( 442 | .clock (cpu_clk), 443 | .noc_clock (sys_clk), 444 | .soc_clock (sys_clk), 445 | .io_hartId (6'h0 + i), 446 | .timer (timer), 447 | .logEnable (logEnable), 448 | .clean (clean), 449 | .dump (dump), 450 | .gateway_out (core_if_o[i]) 451 | ); 452 | end 453 |endgenerate 454 | CoreToGateway u_CoreToGateway( 455 | .gateway_out (gateway_if_i.out), 456 | .core_in (core_if_o) 457 | ); 458 | GatewayEndpoint u_GatewayEndpoint( 459 | .clock (sys_clk), 460 | .reset (sys_rstn), 461 | .gateway_in (gateway_if_i.in), 462 | .step () 463 | ); 464 | 465 |endmodule 466 """.stripMargin 467 FileRegisters.writeOutputFile("./build", "XSDiffTopChecker.sv", verilog) 468 } 469} 470