xref: /XiangShan/src/main/scala/xiangshan/frontend/Frontend.scala (revision 5f119905d30367541582754eec5ce89807499dff)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18import org.chipsalliance.cde.config.Parameters
19import chisel3._
20import chisel3.util._
21import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.{PFEvent, PMP, PMPChecker, PMPReqBundle}
26import xiangshan.cache.mmu._
27import xiangshan.frontend.icache._
28
29class Frontend()(implicit p: Parameters) extends LazyModule with HasXSParameter {
30  override def shouldBeInlined: Boolean = false
31  val inner = LazyModule(new FrontendInlined)
32  lazy val module = new FrontendImp(this)
33}
34
35class FrontendImp(wrapper: Frontend)(implicit p: Parameters) extends LazyModuleImp(wrapper) {
36  val io = IO(wrapper.inner.module.io.cloneType)
37  val io_perf = IO(wrapper.inner.module.io_perf.cloneType)
38  io <> wrapper.inner.module.io
39  io_perf <> wrapper.inner.module.io_perf
40  if (p(DebugOptionsKey).ResetGen) {
41    ResetGen(ResetGenNode(Seq(ModuleNode(wrapper.inner.module))), reset, sim = false)
42  }
43}
44
45class FrontendInlined()(implicit p: Parameters) extends LazyModule with HasXSParameter {
46  override def shouldBeInlined: Boolean = true
47
48  val instrUncache  = LazyModule(new InstrUncache())
49  val icache        = LazyModule(new ICache())
50
51  lazy val module = new FrontendInlinedImp(this)
52}
53
54class FrontendInlinedImp (outer: FrontendInlined) extends LazyModuleImp(outer)
55  with HasXSParameter
56  with HasPerfEvents
57{
58  val io = IO(new Bundle() {
59    val hartId = Input(UInt(hartIdLen.W))
60    val reset_vector = Input(UInt(PAddrBits.W))
61    val fencei = Input(Bool())
62    val ptw = new TlbPtwIO()
63    val backend = new FrontendToCtrlIO
64    val softPrefetch = Vec(backendParams.LduCnt, Flipped(Valid(new SoftIfetchPrefetchBundle)))
65    val sfence = Input(new SfenceBundle)
66    val tlbCsr = Input(new TlbCsrBundle)
67    val csrCtrl = Input(new CustomCSRCtrlIO)
68    val error  = ValidIO(new L1CacheErrorInfo)
69    val frontendInfo = new Bundle {
70      val ibufFull  = Output(Bool())
71      val bpuInfo = new Bundle {
72        val bpRight = Output(UInt(XLEN.W))
73        val bpWrong = Output(UInt(XLEN.W))
74      }
75    }
76    val resetInFrontend = Output(Bool())
77    val debugTopDown = new Bundle {
78      val robHeadVaddr = Flipped(Valid(UInt(VAddrBits.W)))
79    }
80  })
81
82  //decouped-frontend modules
83  val instrUncache = outer.instrUncache.module
84  val icache       = outer.icache.module
85  val bpu     = Module(new Predictor)
86  val ifu     = Module(new NewIFU)
87  val ibuffer =  Module(new IBuffer)
88  val ftq = Module(new Ftq)
89
90  val needFlush = RegNext(io.backend.toFtq.redirect.valid)
91  val FlushControlRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsCtrl)
92  val FlushMemVioRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsMemVio)
93  val FlushControlBTBMiss = Wire(Bool())
94  val FlushTAGEMiss = Wire(Bool())
95  val FlushSCMiss = Wire(Bool())
96  val FlushITTAGEMiss = Wire(Bool())
97  val FlushRASMiss = Wire(Bool())
98
99  val tlbCsr = DelayN(io.tlbCsr, 2)
100  val csrCtrl = DelayN(io.csrCtrl, 2)
101  val sfence = RegNext(RegNext(io.sfence))
102
103  // trigger
104  ifu.io.frontendTrigger := csrCtrl.frontend_trigger
105
106  // bpu ctrl
107  bpu.io.ctrl := csrCtrl.bp_ctrl
108  bpu.io.reset_vector := io.reset_vector
109
110// pmp
111  val PortNumber = ICacheParameters().PortNumber
112  val pmp = Module(new PMP())
113  val pmp_check = VecInit(Seq.fill(coreParams.ipmpPortNum)(Module(new PMPChecker(3, sameCycle = true)).io))
114  pmp.io.distribute_csr := csrCtrl.distribute_csr
115  val pmp_req_vec     = Wire(Vec(coreParams.ipmpPortNum, Valid(new PMPReqBundle())))
116  (0 until 2 * PortNumber).foreach(i => pmp_req_vec(i) <> icache.io.pmp(i).req)
117  pmp_req_vec.last <> ifu.io.pmp.req
118
119  for (i <- pmp_check.indices) {
120    pmp_check(i).apply(tlbCsr.priv.imode, pmp.io.pmp, pmp.io.pma, pmp_req_vec(i))
121  }
122  (0 until 2 * PortNumber).foreach(i => icache.io.pmp(i).resp <> pmp_check(i).resp)
123  ifu.io.pmp.resp <> pmp_check.last.resp
124
125  val itlb = Module(new TLB(coreParams.itlbPortNum, nRespDups = 1,
126    Seq.fill(PortNumber)(false) ++ Seq(true), itlbParams))
127  itlb.io.requestor.take(PortNumber) zip icache.io.itlb foreach {case (a,b) => a <> b}
128  itlb.io.requestor.last <> ifu.io.iTLBInter // mmio may need re-tlb, blocked
129  itlb.io.hartId := io.hartId
130  itlb.io.base_connect(sfence, tlbCsr)
131  itlb.io.flushPipe.map(_ := needFlush)
132  itlb.io.redirect := DontCare // itlb has flushpipe, don't need redirect signal
133
134  val itlb_ptw = Wire(new VectorTlbPtwIO(coreParams.itlbPortNum))
135  itlb_ptw.connect(itlb.io.ptw)
136  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay, itlb_ptw, sfence, tlbCsr, l2tlbParams.ifilterSize)
137  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, io.ptw, sfence, tlbCsr)
138
139  icache.io.ftqPrefetch <> ftq.io.toPrefetch
140  icache.io.softPrefetch <> io.softPrefetch
141
142  //IFU-Ftq
143  ifu.io.ftqInter.fromFtq <> ftq.io.toIfu
144  ftq.io.toIfu.req.ready :=  ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
145
146  ftq.io.fromIfu          <> ifu.io.ftqInter.toFtq
147  bpu.io.ftq_to_bpu       <> ftq.io.toBpu
148  ftq.io.fromBpu          <> bpu.io.bpu_to_ftq
149
150  ftq.io.mmioCommitRead   <> ifu.io.mmioCommitRead
151  //IFU-ICache
152
153  icache.io.fetch.req <> ftq.io.toICache.req
154  ftq.io.toICache.req.ready :=  ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
155
156  ifu.io.icacheInter.resp <>    icache.io.fetch.resp
157  ifu.io.icacheInter.icacheReady :=  icache.io.toIFU
158  ifu.io.icacheInter.topdownIcacheMiss := icache.io.fetch.topdownIcacheMiss
159  ifu.io.icacheInter.topdownItlbMiss := icache.io.fetch.topdownItlbMiss
160  icache.io.stop := ifu.io.icacheStop
161  icache.io.flush := ftq.io.icacheFlush
162
163  ifu.io.icachePerfInfo := icache.io.perfInfo
164
165  icache.io.csr_pf_enable     := RegNext(csrCtrl.l1I_pf_enable)
166  icache.io.csr_parity_enable := RegNext(csrCtrl.icache_parity_enable)
167
168  icache.io.fencei := RegNext(io.fencei)
169
170  //IFU-Ibuffer
171  ifu.io.toIbuffer    <> ibuffer.io.in
172
173  ftq.io.fromBackend <> io.backend.toFtq
174  io.backend.fromFtq := ftq.io.toBackend
175  io.backend.fromIfu := ifu.io.toBackend
176  io.frontendInfo.bpuInfo <> ftq.io.bpuInfo
177
178  val checkPcMem = Reg(Vec(FtqSize, new Ftq_RF_Components))
179  when (ftq.io.toBackend.pc_mem_wen) {
180    checkPcMem(ftq.io.toBackend.pc_mem_waddr) := ftq.io.toBackend.pc_mem_wdata
181  }
182
183  val checkTargetIdx = Wire(Vec(DecodeWidth, UInt(log2Up(FtqSize).W)))
184  val checkTarget = Wire(Vec(DecodeWidth, UInt(VAddrBits.W)))
185
186  for (i <- 0 until DecodeWidth) {
187    checkTargetIdx(i) := ibuffer.io.out(i).bits.ftqPtr.value
188    checkTarget(i) := Mux(ftq.io.toBackend.newest_entry_ptr.value === checkTargetIdx(i),
189                        ftq.io.toBackend.newest_entry_target,
190                        checkPcMem(checkTargetIdx(i) + 1.U).startAddr)
191  }
192
193  // commented out for this br could be the last instruction in the fetch block
194  def checkNotTakenConsecutive = {
195    val prevNotTakenValid = RegInit(0.B)
196    val prevNotTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
197    for (i <- 0 until DecodeWidth - 1) {
198      // for instrs that is not the last, if a not-taken br, the next instr should have the same ftqPtr
199      // for instrs that is the last, record and check next request
200      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr) {
201        when (ibuffer.io.out(i+1).fire) {
202          // not last br, check now
203          XSError(checkTargetIdx(i) =/= checkTargetIdx(i+1), "not-taken br should have same ftqPtr\n")
204        } .otherwise {
205          // last br, record its info
206          prevNotTakenValid := true.B
207          prevNotTakenFtqIdx := checkTargetIdx(i)
208        }
209      }
210    }
211    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr) {
212      // last instr is a br, record its info
213      prevNotTakenValid := true.B
214      prevNotTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
215    }
216    when (prevNotTakenValid && ibuffer.io.out(0).fire) {
217      XSError(prevNotTakenFtqIdx =/= checkTargetIdx(0), "not-taken br should have same ftqPtr\n")
218      prevNotTakenValid := false.B
219    }
220    when (needFlush) {
221      prevNotTakenValid := false.B
222    }
223  }
224
225  def checkTakenNotConsecutive = {
226    val prevTakenValid = RegInit(0.B)
227    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
228    for (i <- 0 until DecodeWidth - 1) {
229      // for instrs that is not the last, if a taken br, the next instr should not have the same ftqPtr
230      // for instrs that is the last, record and check next request
231      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && ibuffer.io.out(i).bits.pred_taken) {
232        when (ibuffer.io.out(i+1).fire) {
233          // not last br, check now
234          XSError(checkTargetIdx(i) + 1.U =/= checkTargetIdx(i+1), "taken br should have consecutive ftqPtr\n")
235        } .otherwise {
236          // last br, record its info
237          prevTakenValid := true.B
238          prevTakenFtqIdx := checkTargetIdx(i)
239        }
240      }
241    }
242    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
243      // last instr is a br, record its info
244      prevTakenValid := true.B
245      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
246    }
247    when (prevTakenValid && ibuffer.io.out(0).fire) {
248      XSError(prevTakenFtqIdx + 1.U =/= checkTargetIdx(0), "taken br should have consecutive ftqPtr\n")
249      prevTakenValid := false.B
250    }
251    when (needFlush) {
252      prevTakenValid := false.B
253    }
254  }
255
256  def checkNotTakenPC = {
257    val prevNotTakenPC = Reg(UInt(VAddrBits.W))
258    val prevIsRVC = Reg(Bool())
259    val prevNotTakenValid = RegInit(0.B)
260
261    for (i <- 0 until DecodeWidth - 1) {
262      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && !ibuffer.io.out(i).bits.pred_taken) {
263        when (ibuffer.io.out(i+1).fire) {
264          XSError(ibuffer.io.out(i).bits.pc + Mux(ibuffer.io.out(i).bits.pd.isRVC, 2.U, 4.U) =/= ibuffer.io.out(i+1).bits.pc, "not-taken br should have consecutive pc\n")
265        } .otherwise {
266          prevNotTakenValid := true.B
267          prevIsRVC := ibuffer.io.out(i).bits.pd.isRVC
268          prevNotTakenPC := ibuffer.io.out(i).bits.pc
269        }
270      }
271    }
272    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && !ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
273      prevNotTakenValid := true.B
274      prevIsRVC := ibuffer.io.out(DecodeWidth - 1).bits.pd.isRVC
275      prevNotTakenPC := ibuffer.io.out(DecodeWidth - 1).bits.pc
276    }
277    when (prevNotTakenValid && ibuffer.io.out(0).fire) {
278      XSError(prevNotTakenPC + Mux(prevIsRVC, 2.U, 4.U) =/= ibuffer.io.out(0).bits.pc, "not-taken br should have same pc\n")
279      prevNotTakenValid := false.B
280    }
281    when (needFlush) {
282      prevNotTakenValid := false.B
283    }
284  }
285
286  def checkTakenPC = {
287    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
288    val prevTakenValid = RegInit(0.B)
289    val prevTakenTarget = Wire(UInt(VAddrBits.W))
290    prevTakenTarget := checkPcMem(prevTakenFtqIdx + 1.U).startAddr
291
292    for (i <- 0 until DecodeWidth - 1) {
293      when (ibuffer.io.out(i).fire && !ibuffer.io.out(i).bits.pd.notCFI && ibuffer.io.out(i).bits.pred_taken) {
294        when (ibuffer.io.out(i+1).fire) {
295          XSError(checkTarget(i) =/= ibuffer.io.out(i+1).bits.pc, "taken instr should follow target pc\n")
296        } .otherwise {
297          prevTakenValid := true.B
298          prevTakenFtqIdx := checkTargetIdx(i)
299        }
300      }
301    }
302    when (ibuffer.io.out(DecodeWidth - 1).fire && !ibuffer.io.out(DecodeWidth - 1).bits.pd.notCFI && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
303      prevTakenValid := true.B
304      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
305    }
306    when (prevTakenValid && ibuffer.io.out(0).fire) {
307      XSError(prevTakenTarget =/= ibuffer.io.out(0).bits.pc, "taken instr should follow target pc\n")
308      prevTakenValid := false.B
309    }
310    when (needFlush) {
311      prevTakenValid := false.B
312    }
313  }
314
315  //checkNotTakenConsecutive
316  checkTakenNotConsecutive
317  checkTakenPC
318  checkNotTakenPC
319
320  ifu.io.rob_commits <> io.backend.toFtq.rob_commits
321
322  ibuffer.io.flush := needFlush
323  ibuffer.io.ControlRedirect := FlushControlRedirect
324  ibuffer.io.MemVioRedirect := FlushMemVioRedirect
325  ibuffer.io.ControlBTBMissBubble := FlushControlBTBMiss
326  ibuffer.io.TAGEMissBubble := FlushTAGEMiss
327  ibuffer.io.SCMissBubble := FlushSCMiss
328  ibuffer.io.ITTAGEMissBubble := FlushITTAGEMiss
329  ibuffer.io.RASMissBubble := FlushRASMiss
330  ibuffer.io.decodeCanAccept := io.backend.canAccept
331
332  FlushControlBTBMiss := ftq.io.ControlBTBMissBubble
333  FlushTAGEMiss := ftq.io.TAGEMissBubble
334  FlushSCMiss := ftq.io.SCMissBubble
335  FlushITTAGEMiss := ftq.io.ITTAGEMissBubble
336  FlushRASMiss := ftq.io.RASMissBubble
337
338  io.backend.cfVec <> ibuffer.io.out
339  io.backend.stallReason <> ibuffer.io.stallReason
340
341  instrUncache.io.req   <> ifu.io.uncacheInter.toUncache
342  ifu.io.uncacheInter.fromUncache <> instrUncache.io.resp
343  instrUncache.io.flush := false.B
344  io.error <> RegNext(RegNext(icache.io.error))
345
346  icache.io.hartId := io.hartId
347
348  itlbRepeater1.io.debugTopDown.robHeadVaddr := io.debugTopDown.robHeadVaddr
349
350  val frontendBubble = Mux(io.backend.canAccept, DecodeWidth.U - PopCount(ibuffer.io.out.map(_.valid)), 0.U)
351  XSPerfAccumulate("FrontendBubble", frontendBubble)
352  io.frontendInfo.ibufFull := RegNext(ibuffer.io.full)
353  io.resetInFrontend := reset.asBool
354
355  // PFEvent
356  val pfevent = Module(new PFEvent)
357  pfevent.io.distribute_csr := io.csrCtrl.distribute_csr
358  val csrevents = pfevent.io.hpmevent.take(8)
359
360  val perfFromUnits = Seq(ifu, ibuffer, icache, ftq, bpu).flatMap(_.getPerfEvents)
361  val perfFromIO    = Seq()
362  val perfBlock     = Seq()
363  // let index = 0 be no event
364  val allPerfEvents = Seq(("noEvent", 0.U)) ++ perfFromUnits ++ perfFromIO ++ perfBlock
365
366  if (printEventCoding) {
367    for (((name, inc), i) <- allPerfEvents.zipWithIndex) {
368      println("Frontend perfEvents Set", name, inc, i)
369    }
370  }
371
372  val allPerfInc = allPerfEvents.map(_._2.asTypeOf(new PerfEvent))
373  override val perfEvents = HPerfMonitor(csrevents, allPerfInc).getPerfEvents
374  generatePerfEvent()
375}
376