xref: /XiangShan/src/main/scala/xiangshan/frontend/Frontend.scala (revision cf7d6b7a1a781c73aeb87de112de2e7fe5ea3b7c)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18import chisel3._
19import chisel3.util._
20import freechips.rocketchip.diplomacy.LazyModule
21import freechips.rocketchip.diplomacy.LazyModuleImp
22import org.chipsalliance.cde.config.Parameters
23import utility._
24import utils._
25import xiangshan._
26import xiangshan.backend.fu.PFEvent
27import xiangshan.backend.fu.PMP
28import xiangshan.backend.fu.PMPChecker
29import xiangshan.backend.fu.PMPReqBundle
30import xiangshan.cache.mmu._
31import xiangshan.frontend.icache._
32
33class Frontend()(implicit p: Parameters) extends LazyModule with HasXSParameter {
34  override def shouldBeInlined: Boolean = false
35  val inner       = LazyModule(new FrontendInlined)
36  lazy val module = new FrontendImp(this)
37}
38
39class FrontendImp(wrapper: Frontend)(implicit p: Parameters) extends LazyModuleImp(wrapper) {
40  val io      = IO(wrapper.inner.module.io.cloneType)
41  val io_perf = IO(wrapper.inner.module.io_perf.cloneType)
42  io <> wrapper.inner.module.io
43  io_perf <> wrapper.inner.module.io_perf
44  if (p(DebugOptionsKey).ResetGen) {
45    ResetGen(ResetGenNode(Seq(ModuleNode(wrapper.inner.module))), reset, sim = false)
46  }
47}
48
49class FrontendInlined()(implicit p: Parameters) extends LazyModule with HasXSParameter {
50  override def shouldBeInlined: Boolean = true
51
52  val instrUncache = LazyModule(new InstrUncache())
53  val icache       = LazyModule(new ICache())
54
55  lazy val module = new FrontendInlinedImp(this)
56}
57
58class FrontendInlinedImp(outer: FrontendInlined) extends LazyModuleImp(outer)
59    with HasXSParameter
60    with HasPerfEvents {
61  val io = IO(new Bundle() {
62    val hartId       = Input(UInt(hartIdLen.W))
63    val reset_vector = Input(UInt(PAddrBits.W))
64    val fencei       = Input(Bool())
65    val ptw          = new TlbPtwIO()
66    val backend      = new FrontendToCtrlIO
67    val softPrefetch = Vec(backendParams.LduCnt, Flipped(Valid(new SoftIfetchPrefetchBundle)))
68    val sfence       = Input(new SfenceBundle)
69    val tlbCsr       = Input(new TlbCsrBundle)
70    val csrCtrl      = Input(new CustomCSRCtrlIO)
71    val error        = ValidIO(new L1CacheErrorInfo)
72    val frontendInfo = new Bundle {
73      val ibufFull = Output(Bool())
74      val bpuInfo = new Bundle {
75        val bpRight = Output(UInt(XLEN.W))
76        val bpWrong = Output(UInt(XLEN.W))
77      }
78    }
79    val resetInFrontend = Output(Bool())
80    val debugTopDown = new Bundle {
81      val robHeadVaddr = Flipped(Valid(UInt(VAddrBits.W)))
82    }
83  })
84
85  // decouped-frontend modules
86  val instrUncache = outer.instrUncache.module
87  val icache       = outer.icache.module
88  val bpu          = Module(new Predictor)
89  val ifu          = Module(new NewIFU)
90  val ibuffer      = Module(new IBuffer)
91  val ftq          = Module(new Ftq)
92
93  val needFlush            = RegNext(io.backend.toFtq.redirect.valid)
94  val FlushControlRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsCtrl)
95  val FlushMemVioRedirect  = RegNext(io.backend.toFtq.redirect.bits.debugIsMemVio)
96  val FlushControlBTBMiss  = Wire(Bool())
97  val FlushTAGEMiss        = Wire(Bool())
98  val FlushSCMiss          = Wire(Bool())
99  val FlushITTAGEMiss      = Wire(Bool())
100  val FlushRASMiss         = Wire(Bool())
101
102  val tlbCsr  = DelayN(io.tlbCsr, 2)
103  val csrCtrl = DelayN(io.csrCtrl, 2)
104  val sfence  = RegNext(RegNext(io.sfence))
105
106  // trigger
107  ifu.io.frontendTrigger := csrCtrl.frontend_trigger
108
109  // bpu ctrl
110  bpu.io.ctrl         := csrCtrl.bp_ctrl
111  bpu.io.reset_vector := io.reset_vector
112
113// pmp
114  val PortNumber = ICacheParameters().PortNumber
115  val pmp        = Module(new PMP())
116  val pmp_check  = VecInit(Seq.fill(coreParams.ipmpPortNum)(Module(new PMPChecker(3, sameCycle = true)).io))
117  pmp.io.distribute_csr := csrCtrl.distribute_csr
118  val pmp_req_vec = Wire(Vec(coreParams.ipmpPortNum, Valid(new PMPReqBundle())))
119  (0 until 2 * PortNumber).foreach(i => pmp_req_vec(i) <> icache.io.pmp(i).req)
120  pmp_req_vec.last <> ifu.io.pmp.req
121
122  for (i <- pmp_check.indices) {
123    pmp_check(i).apply(tlbCsr.priv.imode, pmp.io.pmp, pmp.io.pma, pmp_req_vec(i))
124  }
125  (0 until 2 * PortNumber).foreach(i => icache.io.pmp(i).resp <> pmp_check(i).resp)
126  ifu.io.pmp.resp <> pmp_check.last.resp
127
128  val itlb =
129    Module(new TLB(coreParams.itlbPortNum, nRespDups = 1, Seq.fill(PortNumber)(false) ++ Seq(true), itlbParams))
130  itlb.io.requestor.take(PortNumber) zip icache.io.itlb foreach { case (a, b) => a <> b }
131  itlb.io.requestor.last <> ifu.io.iTLBInter // mmio may need re-tlb, blocked
132  itlb.io.hartId := io.hartId
133  itlb.io.base_connect(sfence, tlbCsr)
134  itlb.io.flushPipe.map(_ := needFlush)
135  itlb.io.redirect := DontCare // itlb has flushpipe, don't need redirect signal
136
137  val itlb_ptw = Wire(new VectorTlbPtwIO(coreParams.itlbPortNum))
138  itlb_ptw.connect(itlb.io.ptw)
139  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay, itlb_ptw, sfence, tlbCsr, l2tlbParams.ifilterSize)
140  val itlbRepeater2 =
141    PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, io.ptw, sfence, tlbCsr)
142
143  icache.io.ftqPrefetch <> ftq.io.toPrefetch
144  icache.io.softPrefetch <> io.softPrefetch
145
146  // IFU-Ftq
147  ifu.io.ftqInter.fromFtq <> ftq.io.toIfu
148  ftq.io.toIfu.req.ready := ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
149
150  ftq.io.fromIfu <> ifu.io.ftqInter.toFtq
151  bpu.io.ftq_to_bpu <> ftq.io.toBpu
152  ftq.io.fromBpu <> bpu.io.bpu_to_ftq
153
154  ftq.io.mmioCommitRead <> ifu.io.mmioCommitRead
155  // IFU-ICache
156
157  icache.io.fetch.req <> ftq.io.toICache.req
158  ftq.io.toICache.req.ready := ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
159
160  ifu.io.icacheInter.resp <> icache.io.fetch.resp
161  ifu.io.icacheInter.icacheReady       := icache.io.toIFU
162  ifu.io.icacheInter.topdownIcacheMiss := icache.io.fetch.topdownIcacheMiss
163  ifu.io.icacheInter.topdownItlbMiss   := icache.io.fetch.topdownItlbMiss
164  icache.io.stop                       := ifu.io.icacheStop
165  icache.io.flush                      := ftq.io.icacheFlush
166
167  ifu.io.icachePerfInfo := icache.io.perfInfo
168
169  icache.io.csr_pf_enable     := RegNext(csrCtrl.l1I_pf_enable)
170  icache.io.csr_parity_enable := RegNext(csrCtrl.icache_parity_enable)
171
172  icache.io.fencei := RegNext(io.fencei)
173
174  // IFU-Ibuffer
175  ifu.io.toIbuffer <> ibuffer.io.in
176
177  ftq.io.fromBackend <> io.backend.toFtq
178  io.backend.fromFtq := ftq.io.toBackend
179  io.backend.fromIfu := ifu.io.toBackend
180  io.frontendInfo.bpuInfo <> ftq.io.bpuInfo
181
182  val checkPcMem = Reg(Vec(FtqSize, new Ftq_RF_Components))
183  when(ftq.io.toBackend.pc_mem_wen) {
184    checkPcMem(ftq.io.toBackend.pc_mem_waddr) := ftq.io.toBackend.pc_mem_wdata
185  }
186
187  val checkTargetIdx = Wire(Vec(DecodeWidth, UInt(log2Up(FtqSize).W)))
188  val checkTarget    = Wire(Vec(DecodeWidth, UInt(VAddrBits.W)))
189
190  for (i <- 0 until DecodeWidth) {
191    checkTargetIdx(i) := ibuffer.io.out(i).bits.ftqPtr.value
192    checkTarget(i) := Mux(
193      ftq.io.toBackend.newest_entry_ptr.value === checkTargetIdx(i),
194      ftq.io.toBackend.newest_entry_target,
195      checkPcMem(checkTargetIdx(i) + 1.U).startAddr
196    )
197  }
198
199  // commented out for this br could be the last instruction in the fetch block
200  def checkNotTakenConsecutive = {
201    val prevNotTakenValid  = RegInit(0.B)
202    val prevNotTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
203    for (i <- 0 until DecodeWidth - 1) {
204      // for instrs that is not the last, if a not-taken br, the next instr should have the same ftqPtr
205      // for instrs that is the last, record and check next request
206      when(ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr) {
207        when(ibuffer.io.out(i + 1).fire) {
208          // not last br, check now
209          XSError(checkTargetIdx(i) =/= checkTargetIdx(i + 1), "not-taken br should have same ftqPtr\n")
210        }.otherwise {
211          // last br, record its info
212          prevNotTakenValid  := true.B
213          prevNotTakenFtqIdx := checkTargetIdx(i)
214        }
215      }
216    }
217    when(ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr) {
218      // last instr is a br, record its info
219      prevNotTakenValid  := true.B
220      prevNotTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
221    }
222    when(prevNotTakenValid && ibuffer.io.out(0).fire) {
223      XSError(prevNotTakenFtqIdx =/= checkTargetIdx(0), "not-taken br should have same ftqPtr\n")
224      prevNotTakenValid := false.B
225    }
226    when(needFlush) {
227      prevNotTakenValid := false.B
228    }
229  }
230
231  def checkTakenNotConsecutive = {
232    val prevTakenValid  = RegInit(0.B)
233    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
234    for (i <- 0 until DecodeWidth - 1) {
235      // for instrs that is not the last, if a taken br, the next instr should not have the same ftqPtr
236      // for instrs that is the last, record and check next request
237      when(ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && ibuffer.io.out(i).bits.pred_taken) {
238        when(ibuffer.io.out(i + 1).fire) {
239          // not last br, check now
240          XSError(checkTargetIdx(i) + 1.U =/= checkTargetIdx(i + 1), "taken br should have consecutive ftqPtr\n")
241        }.otherwise {
242          // last br, record its info
243          prevTakenValid  := true.B
244          prevTakenFtqIdx := checkTargetIdx(i)
245        }
246      }
247    }
248    when(ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && ibuffer.io.out(
249      DecodeWidth - 1
250    ).bits.pred_taken) {
251      // last instr is a br, record its info
252      prevTakenValid  := true.B
253      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
254    }
255    when(prevTakenValid && ibuffer.io.out(0).fire) {
256      XSError(prevTakenFtqIdx + 1.U =/= checkTargetIdx(0), "taken br should have consecutive ftqPtr\n")
257      prevTakenValid := false.B
258    }
259    when(needFlush) {
260      prevTakenValid := false.B
261    }
262  }
263
264  def checkNotTakenPC = {
265    val prevNotTakenPC    = Reg(UInt(VAddrBits.W))
266    val prevIsRVC         = Reg(Bool())
267    val prevNotTakenValid = RegInit(0.B)
268
269    for (i <- 0 until DecodeWidth - 1) {
270      when(ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && !ibuffer.io.out(i).bits.pred_taken) {
271        when(ibuffer.io.out(i + 1).fire) {
272          XSError(
273            ibuffer.io.out(i).bits.pc + Mux(ibuffer.io.out(i).bits.pd.isRVC, 2.U, 4.U) =/= ibuffer.io.out(
274              i + 1
275            ).bits.pc,
276            "not-taken br should have consecutive pc\n"
277          )
278        }.otherwise {
279          prevNotTakenValid := true.B
280          prevIsRVC         := ibuffer.io.out(i).bits.pd.isRVC
281          prevNotTakenPC    := ibuffer.io.out(i).bits.pc
282        }
283      }
284    }
285    when(ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && !ibuffer.io.out(
286      DecodeWidth - 1
287    ).bits.pred_taken) {
288      prevNotTakenValid := true.B
289      prevIsRVC         := ibuffer.io.out(DecodeWidth - 1).bits.pd.isRVC
290      prevNotTakenPC    := ibuffer.io.out(DecodeWidth - 1).bits.pc
291    }
292    when(prevNotTakenValid && ibuffer.io.out(0).fire) {
293      XSError(
294        prevNotTakenPC + Mux(prevIsRVC, 2.U, 4.U) =/= ibuffer.io.out(0).bits.pc,
295        "not-taken br should have same pc\n"
296      )
297      prevNotTakenValid := false.B
298    }
299    when(needFlush) {
300      prevNotTakenValid := false.B
301    }
302  }
303
304  def checkTakenPC = {
305    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
306    val prevTakenValid  = RegInit(0.B)
307    val prevTakenTarget = Wire(UInt(VAddrBits.W))
308    prevTakenTarget := checkPcMem(prevTakenFtqIdx + 1.U).startAddr
309
310    for (i <- 0 until DecodeWidth - 1) {
311      when(ibuffer.io.out(i).fire && !ibuffer.io.out(i).bits.pd.notCFI && ibuffer.io.out(i).bits.pred_taken) {
312        when(ibuffer.io.out(i + 1).fire) {
313          XSError(checkTarget(i) =/= ibuffer.io.out(i + 1).bits.pc, "taken instr should follow target pc\n")
314        }.otherwise {
315          prevTakenValid  := true.B
316          prevTakenFtqIdx := checkTargetIdx(i)
317        }
318      }
319    }
320    when(ibuffer.io.out(DecodeWidth - 1).fire && !ibuffer.io.out(DecodeWidth - 1).bits.pd.notCFI && ibuffer.io.out(
321      DecodeWidth - 1
322    ).bits.pred_taken) {
323      prevTakenValid  := true.B
324      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
325    }
326    when(prevTakenValid && ibuffer.io.out(0).fire) {
327      XSError(prevTakenTarget =/= ibuffer.io.out(0).bits.pc, "taken instr should follow target pc\n")
328      prevTakenValid := false.B
329    }
330    when(needFlush) {
331      prevTakenValid := false.B
332    }
333  }
334
335  // checkNotTakenConsecutive
336  checkTakenNotConsecutive
337  checkTakenPC
338  checkNotTakenPC
339
340  ifu.io.rob_commits <> io.backend.toFtq.rob_commits
341
342  ibuffer.io.flush                := needFlush
343  ibuffer.io.ControlRedirect      := FlushControlRedirect
344  ibuffer.io.MemVioRedirect       := FlushMemVioRedirect
345  ibuffer.io.ControlBTBMissBubble := FlushControlBTBMiss
346  ibuffer.io.TAGEMissBubble       := FlushTAGEMiss
347  ibuffer.io.SCMissBubble         := FlushSCMiss
348  ibuffer.io.ITTAGEMissBubble     := FlushITTAGEMiss
349  ibuffer.io.RASMissBubble        := FlushRASMiss
350  ibuffer.io.decodeCanAccept      := io.backend.canAccept
351
352  FlushControlBTBMiss := ftq.io.ControlBTBMissBubble
353  FlushTAGEMiss       := ftq.io.TAGEMissBubble
354  FlushSCMiss         := ftq.io.SCMissBubble
355  FlushITTAGEMiss     := ftq.io.ITTAGEMissBubble
356  FlushRASMiss        := ftq.io.RASMissBubble
357
358  io.backend.cfVec <> ibuffer.io.out
359  io.backend.stallReason <> ibuffer.io.stallReason
360
361  instrUncache.io.req <> ifu.io.uncacheInter.toUncache
362  ifu.io.uncacheInter.fromUncache <> instrUncache.io.resp
363  instrUncache.io.flush := false.B
364  io.error <> RegNext(RegNext(icache.io.error))
365
366  icache.io.hartId := io.hartId
367
368  itlbRepeater1.io.debugTopDown.robHeadVaddr := io.debugTopDown.robHeadVaddr
369
370  val frontendBubble = Mux(io.backend.canAccept, DecodeWidth.U - PopCount(ibuffer.io.out.map(_.valid)), 0.U)
371  XSPerfAccumulate("FrontendBubble", frontendBubble)
372  io.frontendInfo.ibufFull := RegNext(ibuffer.io.full)
373  io.resetInFrontend       := reset.asBool
374
375  // PFEvent
376  val pfevent = Module(new PFEvent)
377  pfevent.io.distribute_csr := io.csrCtrl.distribute_csr
378  val csrevents = pfevent.io.hpmevent.take(8)
379
380  val perfFromUnits = Seq(ifu, ibuffer, icache, ftq, bpu).flatMap(_.getPerfEvents)
381  val perfFromIO    = Seq()
382  val perfBlock     = Seq()
383  // let index = 0 be no event
384  val allPerfEvents = Seq(("noEvent", 0.U)) ++ perfFromUnits ++ perfFromIO ++ perfBlock
385
386  if (printEventCoding) {
387    for (((name, inc), i) <- allPerfEvents.zipWithIndex) {
388      println("Frontend perfEvents Set", name, inc, i)
389    }
390  }
391
392  val allPerfInc          = allPerfEvents.map(_._2.asTypeOf(new PerfEvent))
393  override val perfEvents = HPerfMonitor(csrevents, allPerfInc).getPerfEvents
394  generatePerfEvent()
395}
396