xref: /XiangShan/src/main/scala/xiangshan/frontend/Frontend.scala (revision f9ac118cd4a950ef018ddb4d9c3d8f332827958e)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18import chipsalliance.rocketchip.config.Parameters
19import chisel3._
20import chisel3.util._
21import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.{PFEvent, PMP, PMPChecker,PMPReqBundle}
26import xiangshan.cache.mmu._
27import xiangshan.frontend.icache._
28
29
30class Frontend()(implicit p: Parameters) extends LazyModule with HasXSParameter{
31
32  val instrUncache  = LazyModule(new InstrUncache())
33  val icache        = LazyModule(new ICache())
34
35  lazy val module = new FrontendImp(this)
36}
37
38
39class FrontendImp (outer: Frontend) extends LazyModuleImp(outer)
40  with HasXSParameter
41  with HasPerfEvents
42{
43  val io = IO(new Bundle() {
44    val hartId = Input(UInt(8.W))
45    val reset_vector = Input(UInt(PAddrBits.W))
46    val fencei = Input(Bool())
47    val ptw = new TlbPtwIO()
48    val backend = new FrontendToCtrlIO
49    val sfence = Input(new SfenceBundle)
50    val tlbCsr = Input(new TlbCsrBundle)
51    val csrCtrl = Input(new CustomCSRCtrlIO)
52    val csrUpdate = new DistributedCSRUpdateReq
53    val error  = new L1CacheErrorInfo
54    val frontendInfo = new Bundle {
55      val ibufFull  = Output(Bool())
56      val bpuInfo = new Bundle {
57        val bpRight = Output(UInt(XLEN.W))
58        val bpWrong = Output(UInt(XLEN.W))
59      }
60    }
61  })
62
63  //decouped-frontend modules
64  val instrUncache = outer.instrUncache.module
65  val icache       = outer.icache.module
66  val bpu     = Module(new Predictor)
67  val ifu     = Module(new NewIFU)
68  val ibuffer =  Module(new Ibuffer)
69  val ftq = Module(new Ftq)
70
71  val needFlush = RegNext(io.backend.toFtq.redirect.valid)
72  val FlushControlRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsCtrl)
73  val FlushMemVioRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsMemVio)
74  val FlushControlBTBMiss = Wire(Bool())
75  val FlushTAGEMiss = Wire(Bool())
76  val FlushSCMiss = Wire(Bool())
77  val FlushITTAGEMiss = Wire(Bool())
78  val FlushRASMiss = Wire(Bool())
79
80  val tlbCsr = DelayN(io.tlbCsr, 2)
81  val csrCtrl = DelayN(io.csrCtrl, 2)
82  val sfence = RegNext(RegNext(io.sfence))
83
84  // trigger
85  ifu.io.frontendTrigger := csrCtrl.frontend_trigger
86  val triggerEn = csrCtrl.trigger_enable
87  ifu.io.csrTriggerEnable := VecInit(triggerEn(0), triggerEn(1), triggerEn(6), triggerEn(8))
88
89  // bpu ctrl
90  bpu.io.ctrl := csrCtrl.bp_ctrl
91  bpu.io.reset_vector := io.reset_vector
92
93// pmp
94  val prefetchPipeNum = ICacheParameters().prefetchPipeNum
95  val pmp = Module(new PMP())
96  val pmp_check = VecInit(Seq.fill(coreParams.ipmpPortNum)(Module(new PMPChecker(3, sameCycle = true)).io))
97  pmp.io.distribute_csr := csrCtrl.distribute_csr
98  val pmp_req_vec     = Wire(Vec(coreParams.ipmpPortNum, Valid(new PMPReqBundle())))
99  (0 until 2 + prefetchPipeNum).foreach(i => pmp_req_vec(i) <> icache.io.pmp(i).req)
100  pmp_req_vec.last <> ifu.io.pmp.req
101
102  for (i <- pmp_check.indices) {
103    pmp_check(i).apply(tlbCsr.priv.imode, pmp.io.pmp, pmp.io.pma, pmp_req_vec(i))
104  }
105  (0 until 2 + prefetchPipeNum).foreach(i => icache.io.pmp(i).resp <> pmp_check(i).resp)
106  ifu.io.pmp.resp <> pmp_check.last.resp
107
108  val itlb = Module(new TLB(coreParams.itlbPortNum, nRespDups = 1,
109    Seq(false, false) ++ Seq.fill(prefetchPipeNum)(false) ++ Seq(true), itlbParams))
110  itlb.io.requestor.take(2 + prefetchPipeNum) zip icache.io.itlb foreach {case (a,b) => a <> b}
111  itlb.io.requestor.last <> ifu.io.iTLBInter // mmio may need re-tlb, blocked
112  itlb.io.base_connect(sfence, tlbCsr)
113  itlb.io.flushPipe.map(_ := needFlush)
114
115  val itlb_ptw = Wire(new VectorTlbPtwIO(coreParams.itlbPortNum))
116  itlb_ptw.connect(itlb.io.ptw)
117  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay, itlb_ptw, sfence, tlbCsr, l2tlbParams.ifilterSize)
118  io.ptw <> itlbRepeater1.io.ptw
119
120  icache.io.prefetch <> ftq.io.toPrefetch
121
122
123  //IFU-Ftq
124  ifu.io.ftqInter.fromFtq <> ftq.io.toIfu
125  ftq.io.toIfu.req.ready :=  ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
126
127  ftq.io.fromIfu          <> ifu.io.ftqInter.toFtq
128  bpu.io.ftq_to_bpu       <> ftq.io.toBpu
129  ftq.io.fromBpu          <> bpu.io.bpu_to_ftq
130
131  ftq.io.mmioCommitRead   <> ifu.io.mmioCommitRead
132  //IFU-ICache
133
134  icache.io.fetch.req <> ftq.io.toICache.req
135  ftq.io.toICache.req.ready :=  ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
136
137  ifu.io.icacheInter.resp <>    icache.io.fetch.resp
138  ifu.io.icacheInter.icacheReady :=  icache.io.toIFU
139  ifu.io.icacheInter.topdownIcacheMiss := icache.io.fetch.topdownIcacheMiss
140  ifu.io.icacheInter.topdownItlbMiss := icache.io.fetch.topdownItlbMiss
141  icache.io.stop := ifu.io.icacheStop
142
143  ifu.io.icachePerfInfo := icache.io.perfInfo
144
145  icache.io.csr.distribute_csr <> DontCare
146  io.csrUpdate := DontCare
147
148  icache.io.csr_pf_enable     := RegNext(csrCtrl.l1I_pf_enable)
149  icache.io.csr_parity_enable := RegNext(csrCtrl.icache_parity_enable)
150
151  icache.io.fencei := io.fencei
152
153  //IFU-Ibuffer
154  ifu.io.toIbuffer    <> ibuffer.io.in
155
156  ftq.io.fromBackend <> io.backend.toFtq
157  io.backend.fromFtq <> ftq.io.toBackend
158  io.frontendInfo.bpuInfo <> ftq.io.bpuInfo
159
160  val checkPcMem = Reg(Vec(FtqSize, new Ftq_RF_Components))
161  when (ftq.io.toBackend.pc_mem_wen) {
162    checkPcMem(ftq.io.toBackend.pc_mem_waddr) := ftq.io.toBackend.pc_mem_wdata
163  }
164
165  val checkTargetIdx = Wire(Vec(DecodeWidth, UInt(log2Up(FtqSize).W)))
166  val checkTarget = Wire(Vec(DecodeWidth, UInt(VAddrBits.W)))
167
168  for (i <- 0 until DecodeWidth) {
169    checkTargetIdx(i) := ibuffer.io.out(i).bits.ftqPtr.value
170    checkTarget(i) := Mux(ftq.io.toBackend.newest_entry_ptr.value === checkTargetIdx(i),
171                        ftq.io.toBackend.newest_entry_target,
172                        checkPcMem(checkTargetIdx(i) + 1.U).startAddr)
173  }
174
175  // commented out for this br could be the last instruction in the fetch block
176  def checkNotTakenConsecutive = {
177    val prevNotTakenValid = RegInit(0.B)
178    val prevNotTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
179    for (i <- 0 until DecodeWidth - 1) {
180      // for instrs that is not the last, if a not-taken br, the next instr should have the same ftqPtr
181      // for instrs that is the last, record and check next request
182      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr) {
183        when (ibuffer.io.out(i+1).fire) {
184          // not last br, check now
185          XSError(checkTargetIdx(i) =/= checkTargetIdx(i+1), "not-taken br should have same ftqPtr\n")
186        } .otherwise {
187          // last br, record its info
188          prevNotTakenValid := true.B
189          prevNotTakenFtqIdx := checkTargetIdx(i)
190        }
191      }
192    }
193    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr) {
194      // last instr is a br, record its info
195      prevNotTakenValid := true.B
196      prevNotTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
197    }
198    when (prevNotTakenValid && ibuffer.io.out(0).fire) {
199      XSError(prevNotTakenFtqIdx =/= checkTargetIdx(0), "not-taken br should have same ftqPtr\n")
200      prevNotTakenValid := false.B
201    }
202    when (needFlush) {
203      prevNotTakenValid := false.B
204    }
205  }
206
207  def checkTakenNotConsecutive = {
208    val prevTakenValid = RegInit(0.B)
209    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
210    for (i <- 0 until DecodeWidth - 1) {
211      // for instrs that is not the last, if a taken br, the next instr should not have the same ftqPtr
212      // for instrs that is the last, record and check next request
213      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && ibuffer.io.out(i).bits.pred_taken) {
214        when (ibuffer.io.out(i+1).fire) {
215          // not last br, check now
216          XSError(checkTargetIdx(i) + 1.U =/= checkTargetIdx(i+1), "taken br should have consecutive ftqPtr\n")
217        } .otherwise {
218          // last br, record its info
219          prevTakenValid := true.B
220          prevTakenFtqIdx := checkTargetIdx(i)
221        }
222      }
223    }
224    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
225      // last instr is a br, record its info
226      prevTakenValid := true.B
227      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
228    }
229    when (prevTakenValid && ibuffer.io.out(0).fire) {
230      XSError(prevTakenFtqIdx + 1.U =/= checkTargetIdx(0), "taken br should have consecutive ftqPtr\n")
231      prevTakenValid := false.B
232    }
233    when (needFlush) {
234      prevTakenValid := false.B
235    }
236  }
237
238  def checkNotTakenPC = {
239    val prevNotTakenPC = Reg(UInt(VAddrBits.W))
240    val prevIsRVC = Reg(Bool())
241    val prevNotTakenValid = RegInit(0.B)
242
243    for (i <- 0 until DecodeWidth - 1) {
244      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && !ibuffer.io.out(i).bits.pred_taken) {
245        when (ibuffer.io.out(i+1).fire) {
246          XSError(ibuffer.io.out(i).bits.pc + Mux(ibuffer.io.out(i).bits.pd.isRVC, 2.U, 4.U) =/= ibuffer.io.out(i+1).bits.pc, "not-taken br should have consecutive pc\n")
247        } .otherwise {
248          prevNotTakenValid := true.B
249          prevIsRVC := ibuffer.io.out(i).bits.pd.isRVC
250          prevNotTakenPC := ibuffer.io.out(i).bits.pc
251        }
252      }
253    }
254    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && !ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
255      prevNotTakenValid := true.B
256      prevIsRVC := ibuffer.io.out(DecodeWidth - 1).bits.pd.isRVC
257      prevNotTakenPC := ibuffer.io.out(DecodeWidth - 1).bits.pc
258    }
259    when (prevNotTakenValid && ibuffer.io.out(0).fire) {
260      XSError(prevNotTakenPC + Mux(prevIsRVC, 2.U, 4.U) =/= ibuffer.io.out(0).bits.pc, "not-taken br should have same pc\n")
261      prevNotTakenValid := false.B
262    }
263    when (needFlush) {
264      prevNotTakenValid := false.B
265    }
266  }
267
268  def checkTakenPC = {
269    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
270    val prevTakenValid = RegInit(0.B)
271    val prevTakenTarget = Wire(UInt(VAddrBits.W))
272    prevTakenTarget := checkPcMem(prevTakenFtqIdx + 1.U).startAddr
273
274    for (i <- 0 until DecodeWidth - 1) {
275      when (ibuffer.io.out(i).fire && !ibuffer.io.out(i).bits.pd.notCFI && ibuffer.io.out(i).bits.pred_taken) {
276        when (ibuffer.io.out(i+1).fire) {
277          XSError(checkTarget(i) =/= ibuffer.io.out(i+1).bits.pc, "taken instr should follow target pc\n")
278        } .otherwise {
279          prevTakenValid := true.B
280          prevTakenFtqIdx := checkTargetIdx(i)
281        }
282      }
283    }
284    when (ibuffer.io.out(DecodeWidth - 1).fire && !ibuffer.io.out(DecodeWidth - 1).bits.pd.notCFI && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
285      prevTakenValid := true.B
286      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
287    }
288    when (prevTakenValid && ibuffer.io.out(0).fire) {
289      XSError(prevTakenTarget =/= ibuffer.io.out(0).bits.pc, "taken instr should follow target pc\n")
290      prevTakenValid := false.B
291    }
292    when (needFlush) {
293      prevTakenValid := false.B
294    }
295  }
296
297  //checkNotTakenConsecutive
298  checkTakenNotConsecutive
299  checkTakenPC
300  checkNotTakenPC
301
302  ifu.io.rob_commits <> io.backend.toFtq.rob_commits
303
304  ibuffer.io.flush := needFlush
305  ibuffer.io.ControlRedirect := FlushControlRedirect
306  ibuffer.io.MemVioRedirect := FlushMemVioRedirect
307  ibuffer.io.ControlBTBMissBubble := FlushControlBTBMiss
308  ibuffer.io.TAGEMissBubble := FlushTAGEMiss
309  ibuffer.io.SCMissBubble := FlushSCMiss
310  ibuffer.io.ITTAGEMissBubble := FlushITTAGEMiss
311  ibuffer.io.RASMissBubble := FlushRASMiss
312
313  FlushControlBTBMiss := ftq.io.ControlBTBMissBubble
314  FlushTAGEMiss := ftq.io.TAGEMissBubble
315  FlushSCMiss := ftq.io.SCMissBubble
316  FlushITTAGEMiss := ftq.io.ITTAGEMissBubble
317  FlushRASMiss := ftq.io.RASMissBubble
318
319  io.backend.cfVec <> ibuffer.io.out
320  io.backend.stallReason <> ibuffer.io.stallReason
321  dontTouch(io.backend.stallReason)
322
323  instrUncache.io.req   <> ifu.io.uncacheInter.toUncache
324  ifu.io.uncacheInter.fromUncache <> instrUncache.io.resp
325  instrUncache.io.flush := false.B
326  io.error <> RegNext(RegNext(icache.io.error))
327
328  icache.io.hartId := io.hartId
329
330  val frontendBubble = PopCount((0 until DecodeWidth).map(i => io.backend.cfVec(i).ready && !ibuffer.io.out(i).valid))
331  XSPerfAccumulate("FrontendBubble", frontendBubble)
332  io.frontendInfo.ibufFull := RegNext(ibuffer.io.full)
333
334  // PFEvent
335  val pfevent = Module(new PFEvent)
336  pfevent.io.distribute_csr := io.csrCtrl.distribute_csr
337  val csrevents = pfevent.io.hpmevent.take(8)
338
339  val allPerfEvents = Seq(ifu, ibuffer, icache, ftq, bpu).flatMap(_.getPerf)
340  override val perfEvents = HPerfMonitor(csrevents, allPerfEvents).getPerfEvents
341  generatePerfEvent()
342}
343