xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/IPrefetch.scala (revision 415fcbe20489a5e4808fcba63c48175ae1c48e28)
17052722fSJay/***************************************************************************************
27052722fSJay  * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
37052722fSJay  * Copyright (c) 2020-2021 Peng Cheng Laboratory
47052722fSJay  *
57052722fSJay  * XiangShan is licensed under Mulan PSL v2.
67052722fSJay  * You can use this software according to the terms and conditions of the Mulan PSL v2.
77052722fSJay  * You may obtain a copy of Mulan PSL v2 at:
87052722fSJay  *          http://license.coscl.org.cn/MulanPSL2
97052722fSJay  *
107052722fSJay  * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
117052722fSJay  * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
127052722fSJay  * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
137052722fSJay  *
147052722fSJay  * See the Mulan PSL v2 for more details.
157052722fSJay  ***************************************************************************************/
167052722fSJay
177052722fSJaypackage xiangshan.frontend.icache
187052722fSJay
197052722fSJayimport chisel3._
207052722fSJayimport chisel3.util._
21cf7d6b7aSMuziimport org.chipsalliance.cde.config.Parameters
22cf7d6b7aSMuziimport utility._
23cf7d6b7aSMuziimport xiangshan.SoftIfetchPrefetchBundle
247052722fSJayimport xiangshan.cache.mmu._
257052722fSJayimport xiangshan.frontend._
267052722fSJay
277052722fSJayabstract class IPrefetchBundle(implicit p: Parameters) extends ICacheBundle
287052722fSJayabstract class IPrefetchModule(implicit p: Parameters) extends ICacheModule
297052722fSJay
302c9f4a9fSxu_zhclass IPrefetchReq(implicit p: Parameters) extends IPrefetchBundle {
312c9f4a9fSxu_zh  val startAddr:        UInt   = UInt(VAddrBits.W)
322c9f4a9fSxu_zh  val nextlineStart:    UInt   = UInt(VAddrBits.W)
332c9f4a9fSxu_zh  val ftqIdx:           FtqPtr = new FtqPtr
342c9f4a9fSxu_zh  val isSoftPrefetch:   Bool   = Bool()
35fbdb359dSMuzi  val backendException: UInt   = UInt(ExceptionType.width.W)
362c9f4a9fSxu_zh  def crossCacheline:   Bool   = startAddr(blockOffBits - 1) === 1.U
372c9f4a9fSxu_zh
382c9f4a9fSxu_zh  def fromFtqICacheInfo(info: FtqICacheInfo): IPrefetchReq = {
392c9f4a9fSxu_zh    this.startAddr      := info.startAddr
402c9f4a9fSxu_zh    this.nextlineStart  := info.nextlineStart
412c9f4a9fSxu_zh    this.ftqIdx         := info.ftqIdx
422c9f4a9fSxu_zh    this.isSoftPrefetch := false.B
432c9f4a9fSxu_zh    this
442c9f4a9fSxu_zh  }
452c9f4a9fSxu_zh
462c9f4a9fSxu_zh  def fromSoftPrefetch(req: SoftIfetchPrefetchBundle): IPrefetchReq = {
472c9f4a9fSxu_zh    this.startAddr      := req.vaddr
482c9f4a9fSxu_zh    this.nextlineStart  := req.vaddr + (1 << blockOffBits).U
492c9f4a9fSxu_zh    this.ftqIdx         := DontCare
502c9f4a9fSxu_zh    this.isSoftPrefetch := true.B
512c9f4a9fSxu_zh    this
522c9f4a9fSxu_zh  }
532c9f4a9fSxu_zh}
542c9f4a9fSxu_zh
5588895b11Sxu_zhclass IPrefetchIO(implicit p: Parameters) extends IPrefetchBundle {
56b92f8445Sssszwic  // control
57*415fcbe2Sxu_zh  val csr_pf_enable:     Bool = Input(Bool())
58*415fcbe2Sxu_zh  val csr_parity_enable: Bool = Input(Bool())
59*415fcbe2Sxu_zh  val flush:             Bool = Input(Bool())
6058c354d0Sssszwic
61*415fcbe2Sxu_zh  val req:            DecoupledIO[IPrefetchReq]  = Flipped(Decoupled(new IPrefetchReq))
62*415fcbe2Sxu_zh  val flushFromBpu:   BpuFlushInfo               = Flipped(new BpuFlushInfo)
63*415fcbe2Sxu_zh  val itlb:           Vec[TlbRequestIO]          = Vec(PortNumber, new TlbRequestIO)
64*415fcbe2Sxu_zh  val pmp:            Vec[ICachePMPBundle]       = Vec(PortNumber, new ICachePMPBundle)
65*415fcbe2Sxu_zh  val metaRead:       ICacheMetaReqBundle        = new ICacheMetaReqBundle
66*415fcbe2Sxu_zh  val MSHRReq:        DecoupledIO[ICacheMissReq] = DecoupledIO(new ICacheMissReq)
67*415fcbe2Sxu_zh  val MSHRResp:       Valid[ICacheMissResp]      = Flipped(ValidIO(new ICacheMissResp))
68*415fcbe2Sxu_zh  val wayLookupWrite: DecoupledIO[WayLookupInfo] = DecoupledIO(new WayLookupInfo)
697052722fSJay}
707052722fSJay
71cf7d6b7aSMuziclass IPrefetchPipe(implicit p: Parameters) extends IPrefetchModule {
7288895b11Sxu_zh  val io: IPrefetchIO = IO(new IPrefetchIO)
737052722fSJay
74*415fcbe2Sxu_zh  private val (toITLB, fromITLB) = (io.itlb.map(_.req), io.itlb.map(_.resp))
75*415fcbe2Sxu_zh  private val (toPMP, fromPMP)   = (io.pmp.map(_.req), io.pmp.map(_.resp))
76*415fcbe2Sxu_zh  private val (toMeta, fromMeta) = (io.metaRead.toIMeta, io.metaRead.fromIMeta)
77*415fcbe2Sxu_zh  private val (toMSHR, fromMSHR) = (io.MSHRReq, io.MSHRResp)
78*415fcbe2Sxu_zh  private val toWayLookup        = io.wayLookupWrite
797052722fSJay
80*415fcbe2Sxu_zh  private val s0_fire, s1_fire, s2_fire            = WireInit(false.B)
81*415fcbe2Sxu_zh  private val s1_ready, s2_ready                   = WireInit(false.B)
82*415fcbe2Sxu_zh  private val s0_flush, s1_flush, s2_flush         = WireInit(false.B)
83*415fcbe2Sxu_zh  private val from_bpu_s0_flush, from_bpu_s1_flush = WireInit(false.B)
847052722fSJay
85cb6e5d3cSssszwic  /**
86cb6e5d3cSssszwic    ******************************************************************************
87cb6e5d3cSssszwic    * IPrefetch Stage 0
88b92f8445Sssszwic    * - 1. receive ftq req
89b92f8445Sssszwic    * - 2. send req to ITLB
90b92f8445Sssszwic    * - 3. send req to Meta SRAM
91cb6e5d3cSssszwic    ******************************************************************************
92cb6e5d3cSssszwic    */
93*415fcbe2Sxu_zh  private val s0_valid = io.req.valid
94cb6e5d3cSssszwic
95b92f8445Sssszwic  /**
96b92f8445Sssszwic    ******************************************************************************
97b92f8445Sssszwic    * receive ftq req
98b92f8445Sssszwic    ******************************************************************************
99b92f8445Sssszwic    */
100*415fcbe2Sxu_zh  private val s0_req_vaddr        = VecInit(Seq(io.req.bits.startAddr, io.req.bits.nextlineStart))
101*415fcbe2Sxu_zh  private val s0_req_ftqIdx       = io.req.bits.ftqIdx
102*415fcbe2Sxu_zh  private val s0_isSoftPrefetch   = io.req.bits.isSoftPrefetch
103*415fcbe2Sxu_zh  private val s0_doubleline       = io.req.bits.crossCacheline
104*415fcbe2Sxu_zh  private val s0_req_vSetIdx      = s0_req_vaddr.map(get_idx)
105*415fcbe2Sxu_zh  private val s0_backendException = VecInit(Seq.fill(PortNumber)(io.req.bits.backendException))
1067052722fSJay
1072c9f4a9fSxu_zh  from_bpu_s0_flush := !s0_isSoftPrefetch && (io.flushFromBpu.shouldFlushByStage2(s0_req_ftqIdx) ||
1082c9f4a9fSxu_zh    io.flushFromBpu.shouldFlushByStage3(s0_req_ftqIdx))
109b92f8445Sssszwic  s0_flush := io.flush || from_bpu_s0_flush || s1_flush
1107052722fSJay
111*415fcbe2Sxu_zh  private val s0_can_go = s1_ready && toITLB(0).ready && toITLB(1).ready && toMeta.ready
1122c9f4a9fSxu_zh  io.req.ready := s0_can_go
1137052722fSJay
114b92f8445Sssszwic  s0_fire := s0_valid && s0_can_go && !s0_flush
115cb6e5d3cSssszwic
116cb6e5d3cSssszwic  /**
117cb6e5d3cSssszwic    ******************************************************************************
118cb6e5d3cSssszwic    * IPrefetch Stage 1
119b92f8445Sssszwic    * - 1. Receive resp from ITLB
120b92f8445Sssszwic    * - 2. Receive resp from IMeta and check
121b92f8445Sssszwic    * - 3. Monitor the requests from missUnit to write to SRAM.
122*415fcbe2Sxu_zh    * - 4. Write wayLookup
123cb6e5d3cSssszwic    ******************************************************************************
124cb6e5d3cSssszwic    */
125*415fcbe2Sxu_zh  private val s1_valid =
126*415fcbe2Sxu_zh    generatePipeControl(lastFire = s0_fire, thisFire = s1_fire, thisFlush = s1_flush, lastFlush = false.B)
127cb6e5d3cSssszwic
128*415fcbe2Sxu_zh  private val s1_req_vaddr        = RegEnable(s0_req_vaddr, 0.U.asTypeOf(s0_req_vaddr), s0_fire)
129*415fcbe2Sxu_zh  private val s1_isSoftPrefetch   = RegEnable(s0_isSoftPrefetch, 0.U.asTypeOf(s0_isSoftPrefetch), s0_fire)
130*415fcbe2Sxu_zh  private val s1_doubleline       = RegEnable(s0_doubleline, 0.U.asTypeOf(s0_doubleline), s0_fire)
131*415fcbe2Sxu_zh  private val s1_req_ftqIdx       = RegEnable(s0_req_ftqIdx, 0.U.asTypeOf(s0_req_ftqIdx), s0_fire)
132*415fcbe2Sxu_zh  private val s1_req_vSetIdx      = VecInit(s1_req_vaddr.map(get_idx))
133*415fcbe2Sxu_zh  private val s1_backendException = RegEnable(s0_backendException, 0.U.asTypeOf(s0_backendException), s0_fire)
1347052722fSJay
135*415fcbe2Sxu_zh  private val m_idle :: m_itlbResend :: m_metaResend :: m_enqWay :: m_enterS2 :: Nil = Enum(5)
136*415fcbe2Sxu_zh
137*415fcbe2Sxu_zh  private val state      = RegInit(m_idle)
138*415fcbe2Sxu_zh  private val next_state = WireDefault(state)
139*415fcbe2Sxu_zh  private val s0_fire_r  = RegNext(s0_fire)
140b92f8445Sssszwic  dontTouch(state)
141b92f8445Sssszwic  dontTouch(next_state)
142b92f8445Sssszwic  state := next_state
1437052722fSJay
144b92f8445Sssszwic  /**
145b92f8445Sssszwic    ******************************************************************************
146b92f8445Sssszwic    * resend itlb req if miss
147b92f8445Sssszwic    ******************************************************************************
148b92f8445Sssszwic    */
149*415fcbe2Sxu_zh  private val s1_wait_itlb = RegInit(VecInit(Seq.fill(PortNumber)(false.B)))
150b92f8445Sssszwic  (0 until PortNumber).foreach { i =>
151b92f8445Sssszwic    when(s1_flush) {
152b92f8445Sssszwic      s1_wait_itlb(i) := false.B
153b92f8445Sssszwic    }.elsewhen(RegNext(s0_fire) && fromITLB(i).bits.miss) {
154b92f8445Sssszwic      s1_wait_itlb(i) := true.B
155b92f8445Sssszwic    }.elsewhen(s1_wait_itlb(i) && !fromITLB(i).bits.miss) {
156b92f8445Sssszwic      s1_wait_itlb(i) := false.B
157b92f8445Sssszwic    }
158b92f8445Sssszwic  }
159*415fcbe2Sxu_zh  private val s1_need_itlb = VecInit(Seq(
160cf7d6b7aSMuzi    (RegNext(s0_fire) || s1_wait_itlb(0)) && fromITLB(0).bits.miss,
161cf7d6b7aSMuzi    (RegNext(s0_fire) || s1_wait_itlb(1)) && fromITLB(1).bits.miss && s1_doubleline
162cf7d6b7aSMuzi  ))
163*415fcbe2Sxu_zh  private val tlb_valid_pulse = VecInit(Seq(
164cf7d6b7aSMuzi    (RegNext(s0_fire) || s1_wait_itlb(0)) && !fromITLB(0).bits.miss,
165cf7d6b7aSMuzi    (RegNext(s0_fire) || s1_wait_itlb(1)) && !fromITLB(1).bits.miss && s1_doubleline
166cf7d6b7aSMuzi  ))
167*415fcbe2Sxu_zh  private val tlb_valid_latch =
168cf7d6b7aSMuzi    VecInit((0 until PortNumber).map(i => ValidHoldBypass(tlb_valid_pulse(i), s1_fire, flush = s1_flush)))
169*415fcbe2Sxu_zh  private val itlb_finish = tlb_valid_latch(0) && (!s1_doubleline || tlb_valid_latch(1))
1707052722fSJay
171*415fcbe2Sxu_zh  (0 until PortNumber).foreach { i =>
172b92f8445Sssszwic    toITLB(i).valid             := s1_need_itlb(i) || (s0_valid && (if (i == 0) true.B else s0_doubleline))
173b92f8445Sssszwic    toITLB(i).bits              := DontCare
174b92f8445Sssszwic    toITLB(i).bits.size         := 3.U
175b92f8445Sssszwic    toITLB(i).bits.vaddr        := Mux(s1_need_itlb(i), s1_req_vaddr(i), s0_req_vaddr(i))
176b92f8445Sssszwic    toITLB(i).bits.debug.pc     := Mux(s1_need_itlb(i), s1_req_vaddr(i), s0_req_vaddr(i))
177b92f8445Sssszwic    toITLB(i).bits.cmd          := TlbCmd.exec
178b92f8445Sssszwic    toITLB(i).bits.no_translate := false.B
179b92f8445Sssszwic  }
180b92f8445Sssszwic  fromITLB.foreach(_.ready := true.B)
181b92f8445Sssszwic  io.itlb.foreach(_.req_kill := false.B)
1827052722fSJay
183b92f8445Sssszwic  /**
184b92f8445Sssszwic    ******************************************************************************
185b92f8445Sssszwic    * Receive resp from ITLB
186b92f8445Sssszwic    ******************************************************************************
187b92f8445Sssszwic    */
188*415fcbe2Sxu_zh  private val s1_req_paddr_wire = VecInit(fromITLB.map(_.bits.paddr(0)))
189*415fcbe2Sxu_zh  private val s1_req_paddr_reg = VecInit((0 until PortNumber).map { i =>
19088895b11Sxu_zh    RegEnable(s1_req_paddr_wire(i), 0.U(PAddrBits.W), tlb_valid_pulse(i))
191*415fcbe2Sxu_zh  })
192*415fcbe2Sxu_zh  private val s1_req_paddr = VecInit((0 until PortNumber).map { i =>
19388895b11Sxu_zh    Mux(tlb_valid_pulse(i), s1_req_paddr_wire(i), s1_req_paddr_reg(i))
194*415fcbe2Sxu_zh  })
195*415fcbe2Sxu_zh  private val s1_req_gpaddr_tmp = VecInit((0 until PortNumber).map { i =>
196cf7d6b7aSMuzi    ResultHoldBypass(
197cf7d6b7aSMuzi      valid = tlb_valid_pulse(i),
198dd980d61SXu, Zefan      // NOTE: we dont use GPAddrBits or XLEN here, refer to ICacheMainPipe.scala L43-48 and PR#3795
199dd980d61SXu, Zefan      init = 0.U(PAddrBitsMax.W),
200cf7d6b7aSMuzi      data = fromITLB(i).bits.gpaddr(0)
201cf7d6b7aSMuzi    )
202*415fcbe2Sxu_zh  })
203*415fcbe2Sxu_zh  private val s1_req_isForVSnonLeafPTE_tmp = VecInit((0 until PortNumber).map { i =>
204cf7d6b7aSMuzi    ResultHoldBypass(
205cf7d6b7aSMuzi      valid = tlb_valid_pulse(i),
206cf7d6b7aSMuzi      init = 0.U.asTypeOf(fromITLB(i).bits.isForVSnonLeafPTE),
207cf7d6b7aSMuzi      data = fromITLB(i).bits.isForVSnonLeafPTE
208cf7d6b7aSMuzi    )
209*415fcbe2Sxu_zh  })
210*415fcbe2Sxu_zh  private val s1_itlb_exception = VecInit((0 until PortNumber).map { i =>
211cf7d6b7aSMuzi    ResultHoldBypass(
212cf7d6b7aSMuzi      valid = tlb_valid_pulse(i),
213cf7d6b7aSMuzi      init = 0.U(ExceptionType.width.W),
214cf7d6b7aSMuzi      data = ExceptionType.fromTlbResp(fromITLB(i).bits)
215cf7d6b7aSMuzi    )
216*415fcbe2Sxu_zh  })
217*415fcbe2Sxu_zh  private val s1_itlb_pbmt = VecInit((0 until PortNumber).map { i =>
218cf7d6b7aSMuzi    ResultHoldBypass(
219cf7d6b7aSMuzi      valid = tlb_valid_pulse(i),
220cf7d6b7aSMuzi      init = 0.U.asTypeOf(fromITLB(i).bits.pbmt(0)),
221cf7d6b7aSMuzi      data = fromITLB(i).bits.pbmt(0)
222cf7d6b7aSMuzi    )
223*415fcbe2Sxu_zh  })
224*415fcbe2Sxu_zh  private val s1_itlb_exception_gpf = VecInit(s1_itlb_exception.map(_ === ExceptionType.gpf))
225b92f8445Sssszwic
22691946104Sxu_zh  /* Select gpaddr with the first gpf
22791946104Sxu_zh   * Note: the backend wants the base guest physical address of a fetch block
22891946104Sxu_zh   *       for port(i), its base gpaddr is actually (gpaddr - i * blocksize)
22991946104Sxu_zh   *       see GPAMem: https://github.com/OpenXiangShan/XiangShan/blob/344cf5d55568dd40cd658a9ee66047a505eeb504/src/main/scala/xiangshan/backend/GPAMem.scala#L33-L34
23091946104Sxu_zh   *       see also: https://github.com/OpenXiangShan/XiangShan/blob/344cf5d55568dd40cd658a9ee66047a505eeb504/src/main/scala/xiangshan/frontend/IFU.scala#L374-L375
23191946104Sxu_zh   */
232*415fcbe2Sxu_zh  private val s1_req_gpaddr = PriorityMuxDefault(
23388895b11Sxu_zh    s1_itlb_exception_gpf zip (0 until PortNumber).map(i => s1_req_gpaddr_tmp(i) - (i << blockOffBits).U),
23491946104Sxu_zh    0.U.asTypeOf(s1_req_gpaddr_tmp(0))
23591946104Sxu_zh  )
23691946104Sxu_zh
237*415fcbe2Sxu_zh  private val s1_req_isForVSnonLeafPTE = PriorityMuxDefault(
238ad415ae0SXiaokun-Pei    s1_itlb_exception_gpf zip s1_req_isForVSnonLeafPTE_tmp,
239ad415ae0SXiaokun-Pei    0.U.asTypeOf(s1_req_isForVSnonLeafPTE_tmp(0))
240ad415ae0SXiaokun-Pei  )
241ad415ae0SXiaokun-Pei
242b92f8445Sssszwic  /**
243b92f8445Sssszwic    ******************************************************************************
244b92f8445Sssszwic    * resend metaArray read req when itlb miss finish
245b92f8445Sssszwic    ******************************************************************************
246b92f8445Sssszwic    */
247*415fcbe2Sxu_zh  private val s1_need_meta = ((state === m_itlbResend) && itlb_finish) || (state === m_metaResend)
248b92f8445Sssszwic  toMeta.valid             := s1_need_meta || s0_valid
249b92f8445Sssszwic  toMeta.bits              := DontCare
250b92f8445Sssszwic  toMeta.bits.isDoubleLine := Mux(s1_need_meta, s1_doubleline, s0_doubleline)
251b92f8445Sssszwic
252*415fcbe2Sxu_zh  (0 until PortNumber).foreach { i =>
253b92f8445Sssszwic    toMeta.bits.vSetIdx(i) := Mux(s1_need_meta, s1_req_vSetIdx(i), s0_req_vSetIdx(i))
254cb6e5d3cSssszwic  }
255cb6e5d3cSssszwic
256cb6e5d3cSssszwic  /**
257cb6e5d3cSssszwic    ******************************************************************************
258b92f8445Sssszwic    * Receive resp from IMeta and check
259cb6e5d3cSssszwic    ******************************************************************************
260cb6e5d3cSssszwic    */
261*415fcbe2Sxu_zh  private val s1_req_ptags = VecInit(s1_req_paddr.map(get_phy_tag))
262cb6e5d3cSssszwic
263*415fcbe2Sxu_zh  private val s1_meta_ptags  = fromMeta.tags
264*415fcbe2Sxu_zh  private val s1_meta_valids = fromMeta.entryValid
2659bba777eSssszwic
266*415fcbe2Sxu_zh  private def getWaymask(paddrs: Vec[UInt]): Vec[UInt] = {
26788895b11Sxu_zh    val ptags = paddrs.map(get_phy_tag)
268cf7d6b7aSMuzi    val tag_eq_vec =
269cf7d6b7aSMuzi      VecInit((0 until PortNumber).map(p => VecInit((0 until nWays).map(w => s1_meta_ptags(p)(w) === ptags(p)))))
270*415fcbe2Sxu_zh    val tag_match_vec = VecInit((0 until PortNumber).map { k =>
271cf7d6b7aSMuzi      VecInit(tag_eq_vec(k).zipWithIndex.map { case (way_tag_eq, w) => way_tag_eq && s1_meta_valids(k)(w) })
272*415fcbe2Sxu_zh    })
273b92f8445Sssszwic    val waymasks = VecInit(tag_match_vec.map(_.asUInt))
274b92f8445Sssszwic    waymasks
275cb6e5d3cSssszwic  }
2769bba777eSssszwic
277*415fcbe2Sxu_zh  private val s1_SRAM_waymasks = VecInit((0 until PortNumber).map { port =>
278*415fcbe2Sxu_zh    Mux(tlb_valid_pulse(port), getWaymask(s1_req_paddr_wire)(port), getWaymask(s1_req_paddr_reg)(port))
2795ce94708Sxu_zh  })
280b92f8445Sssszwic
2818966a895Sxu_zh  // select ecc code
2828966a895Sxu_zh  /* NOTE:
2838966a895Sxu_zh   * When ECC check fails, s1_waymasks may be corrupted, so this selected meta_codes may be wrong.
2848966a895Sxu_zh   * However, we can guarantee that the request sent to the l2 cache and the response to the IFU are both correct,
2858966a895Sxu_zh   * considering the probability of bit flipping abnormally is very small, consider there's up to 1 bit being wrong:
2868966a895Sxu_zh   * 1. miss -> fake hit: The wrong bit in s1_waymasks was set to true.B, thus selects the wrong meta_codes,
2878966a895Sxu_zh   *                      but we can detect this by checking whether `encodeMetaECC(req_ptags) === meta_codes`.
2888966a895Sxu_zh   * 2. hit -> fake multi-hit: In normal situation, multi-hit never happens, so multi-hit indicates ECC failure,
2898966a895Sxu_zh   *                           we can detect this by checking whether `PopCount(waymasks) <= 1.U`,
2908966a895Sxu_zh   *                           and meta_codes is not important in this situation.
2918966a895Sxu_zh   * 3. hit -> fake miss: We can't detect this, but we can (pre)fetch the correct data from L2 cache, so it's not a problem.
292*415fcbe2Sxu_zh   * 4. hit -> hit / miss -> miss: ECC failure happens in an irrelevant way, so we don't care about it this time.
2938966a895Sxu_zh   */
294*415fcbe2Sxu_zh  private val s1_SRAM_meta_codes = VecInit((0 until PortNumber).map { port =>
2955ce94708Sxu_zh    Mux1H(s1_SRAM_waymasks(port), fromMeta.codes(port))
2968966a895Sxu_zh  })
2978966a895Sxu_zh
298b92f8445Sssszwic  /**
299b92f8445Sssszwic    ******************************************************************************
3005ce94708Sxu_zh    * update waymasks and meta_codes according to MSHR update data
3015ce94708Sxu_zh    ******************************************************************************
3025ce94708Sxu_zh    */
303*415fcbe2Sxu_zh  private def updateMetaInfo(mask: UInt, vSetIdx: UInt, ptag: UInt, code: UInt): (UInt, UInt) = {
3045ce94708Sxu_zh    require(mask.getWidth == nWays)
3055ce94708Sxu_zh    val new_mask  = WireInit(mask)
3065ce94708Sxu_zh    val new_code  = WireInit(code)
3075ce94708Sxu_zh    val valid     = fromMSHR.valid && !fromMSHR.bits.corrupt
3085ce94708Sxu_zh    val vset_same = fromMSHR.bits.vSetIdx === vSetIdx
3095ce94708Sxu_zh    val ptag_same = getPhyTagFromBlk(fromMSHR.bits.blkPaddr) === ptag
3105ce94708Sxu_zh    val way_same  = fromMSHR.bits.waymask === mask
3115ce94708Sxu_zh    when(valid && vset_same) {
3125ce94708Sxu_zh      when(ptag_same) {
3135ce94708Sxu_zh        new_mask := fromMSHR.bits.waymask
3145ce94708Sxu_zh        // also update meta_codes
3155ce94708Sxu_zh        // we have getPhyTagFromBlk(fromMSHR.bits.blkPaddr) === ptag, so we can use ptag directly for better timing
3165ce94708Sxu_zh        new_code := encodeMetaECC(ptag)
3175ce94708Sxu_zh      }.elsewhen(way_same) {
3185ce94708Sxu_zh        new_mask := 0.U
319*415fcbe2Sxu_zh        // we don't care about new_code, since it's not used for a missed request
3205ce94708Sxu_zh      }
3215ce94708Sxu_zh    }
3225ce94708Sxu_zh    (new_mask, new_code)
3235ce94708Sxu_zh  }
3245ce94708Sxu_zh
325*415fcbe2Sxu_zh  private val s1_SRAM_valid   = s0_fire_r || RegNext(s1_need_meta && toMeta.ready)
326*415fcbe2Sxu_zh  private val s1_MSHR_valid   = fromMSHR.valid && !fromMSHR.bits.corrupt
327*415fcbe2Sxu_zh  private val s1_waymasks     = WireInit(VecInit(Seq.fill(PortNumber)(0.U(nWays.W))))
328*415fcbe2Sxu_zh  private val s1_waymasks_r   = RegEnable(s1_waymasks, 0.U.asTypeOf(s1_waymasks), s1_SRAM_valid || s1_MSHR_valid)
329*415fcbe2Sxu_zh  private val s1_meta_codes   = WireInit(VecInit(Seq.fill(PortNumber)(0.U(ICacheMetaCodeBits.W))))
330*415fcbe2Sxu_zh  private val s1_meta_codes_r = RegEnable(s1_meta_codes, 0.U.asTypeOf(s1_meta_codes), s1_SRAM_valid || s1_MSHR_valid)
3315ce94708Sxu_zh
3325ce94708Sxu_zh  // update waymasks and meta_codes
3335ce94708Sxu_zh  (0 until PortNumber).foreach { i =>
3345ce94708Sxu_zh    val old_waymask    = Mux(s1_SRAM_valid, s1_SRAM_waymasks(i), s1_waymasks_r(i))
3355ce94708Sxu_zh    val old_meta_codes = Mux(s1_SRAM_valid, s1_SRAM_meta_codes(i), s1_meta_codes_r(i))
336*415fcbe2Sxu_zh    val new_info       = updateMetaInfo(old_waymask, s1_req_vSetIdx(i), s1_req_ptags(i), old_meta_codes)
3375ce94708Sxu_zh    s1_waymasks(i)   := new_info._1
3385ce94708Sxu_zh    s1_meta_codes(i) := new_info._2
3395ce94708Sxu_zh  }
3405ce94708Sxu_zh
3415ce94708Sxu_zh  /**
3425ce94708Sxu_zh    ******************************************************************************
343*415fcbe2Sxu_zh    * send enqueue req to WayLookup
344b92f8445Sssszwic    ******** **********************************************************************
345b92f8445Sssszwic    */
346b92f8445Sssszwic  // Disallow enqueuing wayLookup when SRAM write occurs.
3472c9f4a9fSxu_zh  toWayLookup.valid := ((state === m_enqWay) || ((state === m_idle) && itlb_finish)) &&
3482c9f4a9fSxu_zh    !s1_flush && !fromMSHR.valid && !s1_isSoftPrefetch // do not enqueue soft prefetch
349b92f8445Sssszwic  toWayLookup.bits.vSetIdx           := s1_req_vSetIdx
350b92f8445Sssszwic  toWayLookup.bits.waymask           := s1_waymasks
351b92f8445Sssszwic  toWayLookup.bits.ptag              := s1_req_ptags
352b92f8445Sssszwic  toWayLookup.bits.gpaddr            := s1_req_gpaddr
353ad415ae0SXiaokun-Pei  toWayLookup.bits.isForVSnonLeafPTE := s1_req_isForVSnonLeafPTE
3548966a895Sxu_zh  toWayLookup.bits.meta_codes        := s1_meta_codes
3551a5af821Sxu_zh  (0 until PortNumber).foreach { i =>
356*415fcbe2Sxu_zh    // exception in first line is always valid, in second line is valid iff is doubleline request
357*415fcbe2Sxu_zh    val excpValid = if (i == 0) true.B else s1_doubleline
358*415fcbe2Sxu_zh    // Send s1_itlb_exception to WayLookup (instead of s1_exception_out) for better timing.
359*415fcbe2Sxu_zh    // Will check pmp again in mainPipe
36088895b11Sxu_zh    toWayLookup.bits.itlb_exception(i) := Mux(excpValid, s1_itlb_exception(i), ExceptionType.none)
361002c10a4SYanqin Li    toWayLookup.bits.itlb_pbmt(i)      := Mux(excpValid, s1_itlb_pbmt(i), Pbmt.pma)
3621a5af821Sxu_zh  }
363b92f8445Sssszwic
364*415fcbe2Sxu_zh  private val s1_waymasks_vec = s1_waymasks.map(_.asTypeOf(Vec(nWays, Bool())))
365b92f8445Sssszwic  when(toWayLookup.fire) {
366cf7d6b7aSMuzi    assert(
367cf7d6b7aSMuzi      PopCount(s1_waymasks_vec(0)) <= 1.U && (PopCount(s1_waymasks_vec(1)) <= 1.U || !s1_doubleline),
368*415fcbe2Sxu_zh      "Multi-hit:\nport0: count=%d ptag=0x%x vSet=0x%x vaddr=0x%x\nport1: count=%d ptag=0x%x vSet=0x%x vaddr=0x%x",
369cf7d6b7aSMuzi      PopCount(s1_waymasks_vec(0)) > 1.U,
370cf7d6b7aSMuzi      s1_req_ptags(0),
371cf7d6b7aSMuzi      get_idx(s1_req_vaddr(0)),
372cf7d6b7aSMuzi      s1_req_vaddr(0),
373cf7d6b7aSMuzi      PopCount(s1_waymasks_vec(1)) > 1.U && s1_doubleline,
374cf7d6b7aSMuzi      s1_req_ptags(1),
375cf7d6b7aSMuzi      get_idx(s1_req_vaddr(1)),
376cf7d6b7aSMuzi      s1_req_vaddr(1)
377cf7d6b7aSMuzi    )
378b92f8445Sssszwic  }
379b92f8445Sssszwic
380b92f8445Sssszwic  /**
381b92f8445Sssszwic    ******************************************************************************
382b92f8445Sssszwic    * PMP check
383b92f8445Sssszwic    ******************************************************************************
384b92f8445Sssszwic    */
38588895b11Sxu_zh  toPMP.zipWithIndex.foreach { case (p, i) =>
38688895b11Sxu_zh    // if itlb has exception, paddr can be invalid, therefore pmp check can be skipped
387dd02bc3fSxu_zh    p.valid     := s1_valid // !ExceptionType.hasException(s1_itlb_exception(i))
388b92f8445Sssszwic    p.bits.addr := s1_req_paddr(i)
389*415fcbe2Sxu_zh    p.bits.size := 3.U
390b92f8445Sssszwic    p.bits.cmd  := TlbCmd.exec
391b92f8445Sssszwic  }
392*415fcbe2Sxu_zh  private val s1_pmp_exception = VecInit(fromPMP.map(ExceptionType.fromPMPResp))
393*415fcbe2Sxu_zh  private val s1_pmp_mmio      = VecInit(fromPMP.map(_.mmio))
39488895b11Sxu_zh
3958966a895Sxu_zh  // merge s1 itlb/pmp exceptions, itlb has the highest priority, pmp next
3968966a895Sxu_zh  // for timing consideration, meta_corrupt is not merged, and it will NOT cancel prefetch
397*415fcbe2Sxu_zh  private val s1_exception_out = ExceptionType.merge(
398fbdb359dSMuzi    s1_backendException,
399f80535c3Sxu_zh    s1_itlb_exception,
4008966a895Sxu_zh    s1_pmp_exception
401f80535c3Sxu_zh  )
402b92f8445Sssszwic
403002c10a4SYanqin Li  // merge pmp mmio and itlb pbmt
404*415fcbe2Sxu_zh  private val s1_mmio = VecInit((s1_pmp_mmio zip s1_itlb_pbmt).map { case (mmio, pbmt) =>
405002c10a4SYanqin Li    mmio || Pbmt.isUncache(pbmt)
406002c10a4SYanqin Li  })
407002c10a4SYanqin Li
408b92f8445Sssszwic  /**
409b92f8445Sssszwic    ******************************************************************************
410b92f8445Sssszwic    * state machine
411b92f8445Sssszwic    ******** **********************************************************************
412b92f8445Sssszwic    */
413b92f8445Sssszwic
414b92f8445Sssszwic  switch(state) {
415b92f8445Sssszwic    is(m_idle) {
4162c9f4a9fSxu_zh      when(s1_valid) {
4172c9f4a9fSxu_zh        when(!itlb_finish) {
418b92f8445Sssszwic          next_state := m_itlbResend
4198c57174eSxu_zh        }.elsewhen(!toWayLookup.fire) { // itlb_finish
420b92f8445Sssszwic          next_state := m_enqWay
4218c57174eSxu_zh        }.elsewhen(!s2_ready) { // itlb_finish && toWayLookup.fire
422b92f8445Sssszwic          next_state := m_enterS2
4232c9f4a9fSxu_zh        } // .otherwise { next_state := m_idle }
4242c9f4a9fSxu_zh      }   // .otherwise { next_state := m_idle }  // !s1_valid
425b92f8445Sssszwic    }
426b92f8445Sssszwic    is(m_itlbResend) {
4272c9f4a9fSxu_zh      when(itlb_finish) {
4282c9f4a9fSxu_zh        when(!toMeta.ready) {
429b92f8445Sssszwic          next_state := m_metaResend
4308c57174eSxu_zh        }.otherwise { // toMeta.ready
431b92f8445Sssszwic          next_state := m_enqWay
432b92f8445Sssszwic        }
4332c9f4a9fSxu_zh      } // .otherwise { next_state := m_itlbResend }  // !itlb_finish
434b92f8445Sssszwic    }
435b92f8445Sssszwic    is(m_metaResend) {
436b92f8445Sssszwic      when(toMeta.ready) {
437b92f8445Sssszwic        next_state := m_enqWay
4382c9f4a9fSxu_zh      } // .otherwise { next_state := m_metaResend }  // !toMeta.ready
439b92f8445Sssszwic    }
440b92f8445Sssszwic    is(m_enqWay) {
4418c57174eSxu_zh      when(toWayLookup.fire || s1_isSoftPrefetch) {
4428c57174eSxu_zh        when(!s2_ready) {
443b92f8445Sssszwic          next_state := m_enterS2
4448c57174eSxu_zh        }.otherwise { // s2_ready
445b92f8445Sssszwic          next_state := m_idle
446b92f8445Sssszwic        }
4478c57174eSxu_zh      } // .otherwise { next_state := m_enqWay }
448b92f8445Sssszwic    }
449b92f8445Sssszwic    is(m_enterS2) {
450b92f8445Sssszwic      when(s2_ready) {
451b92f8445Sssszwic        next_state := m_idle
452b92f8445Sssszwic      }
453b92f8445Sssszwic    }
454b92f8445Sssszwic  }
455b92f8445Sssszwic
456b92f8445Sssszwic  when(s1_flush) {
457b92f8445Sssszwic    next_state := m_idle
458b92f8445Sssszwic  }
459b92f8445Sssszwic
460b92f8445Sssszwic  /** Stage 1 control */
4612c9f4a9fSxu_zh  from_bpu_s1_flush := s1_valid && !s1_isSoftPrefetch && io.flushFromBpu.shouldFlushByStage3(s1_req_ftqIdx)
462b92f8445Sssszwic  s1_flush          := io.flush || from_bpu_s1_flush
463b92f8445Sssszwic
464b92f8445Sssszwic  s1_ready := next_state === m_idle
465400391a3Sxu_zh  s1_fire  := (next_state === m_idle) && s1_valid && !s1_flush // used to clear s1_valid & itlb_valid_latch
466*415fcbe2Sxu_zh  private val s1_real_fire = s1_fire && io.csr_pf_enable // real "s1 fire" that s1 enters s2
467b92f8445Sssszwic
468b92f8445Sssszwic  /**
469b92f8445Sssszwic    ******************************************************************************
470b92f8445Sssszwic    * IPrefetch Stage 2
471b92f8445Sssszwic    * - 1. Monitor the requests from missUnit to write to SRAM.
472b92f8445Sssszwic    * - 2. send req to missUnit
473b92f8445Sssszwic    ******************************************************************************
474b92f8445Sssszwic    */
475*415fcbe2Sxu_zh  private val s2_valid =
476cf7d6b7aSMuzi    generatePipeControl(lastFire = s1_real_fire, thisFire = s2_fire, thisFlush = s2_flush, lastFlush = false.B)
477b92f8445Sssszwic
478*415fcbe2Sxu_zh  private val s2_req_vaddr      = RegEnable(s1_req_vaddr, 0.U.asTypeOf(s1_req_vaddr), s1_real_fire)
479*415fcbe2Sxu_zh  private val s2_isSoftPrefetch = RegEnable(s1_isSoftPrefetch, 0.U.asTypeOf(s1_isSoftPrefetch), s1_real_fire)
480*415fcbe2Sxu_zh  private val s2_doubleline     = RegEnable(s1_doubleline, 0.U.asTypeOf(s1_doubleline), s1_real_fire)
481*415fcbe2Sxu_zh  private val s2_req_paddr      = RegEnable(s1_req_paddr, 0.U.asTypeOf(s1_req_paddr), s1_real_fire)
482*415fcbe2Sxu_zh  private val s2_exception =
483cf7d6b7aSMuzi    RegEnable(s1_exception_out, 0.U.asTypeOf(s1_exception_out), s1_real_fire) // includes itlb/pmp exception
484*415fcbe2Sxu_zh  // disabled for timing consideration
485*415fcbe2Sxu_zh// private val s2_exception_in =
486*415fcbe2Sxu_zh//   RegEnable(s1_exception_out, 0.U.asTypeOf(s1_exception_out), s1_real_fire)
487*415fcbe2Sxu_zh  private val s2_mmio     = RegEnable(s1_mmio, 0.U.asTypeOf(s1_mmio), s1_real_fire)
488*415fcbe2Sxu_zh  private val s2_waymasks = RegEnable(s1_waymasks, 0.U.asTypeOf(s1_waymasks), s1_real_fire)
489*415fcbe2Sxu_zh  // disabled for timing consideration
490*415fcbe2Sxu_zh// private val s2_meta_codes   = RegEnable(s1_meta_codes, 0.U.asTypeOf(s1_meta_codes), s1_real_fire)
491b92f8445Sssszwic
492*415fcbe2Sxu_zh  private val s2_req_vSetIdx = s2_req_vaddr.map(get_idx)
493*415fcbe2Sxu_zh  private val s2_req_ptags   = s2_req_paddr.map(get_phy_tag)
494b92f8445Sssszwic
4958966a895Sxu_zh  // disabled for timing consideration
4968966a895Sxu_zh//  // do metaArray ECC check
4978966a895Sxu_zh//  val s2_meta_corrupt = VecInit((s2_req_ptags zip s2_meta_codes zip s2_waymasks).map{ case ((meta, code), waymask) =>
4988966a895Sxu_zh//    val hit_num = PopCount(waymask)
4998966a895Sxu_zh//    // NOTE: if not hit, encodeMetaECC(meta) =/= code can also be true, but we don't care about it
5008966a895Sxu_zh//    (encodeMetaECC(meta) =/= code && hit_num === 1.U) ||  // hit one way, but parity code does not match, ECC failure
501*415fcbe2Sxu_zh//      hit_num > 1.U                                       // hit multi-way, must be an ECC failure
5028966a895Sxu_zh//  })
5038966a895Sxu_zh//
5048966a895Sxu_zh//  // generate exception
5058966a895Sxu_zh//  val s2_meta_exception = VecInit(s2_meta_corrupt.map(ExceptionType.fromECC(io.csr_parity_enable, _)))
5068966a895Sxu_zh//
5078966a895Sxu_zh//  // merge meta exception and itlb/pmp exception
5088966a895Sxu_zh//  val s2_exception = ExceptionType.merge(s2_exception_in, s2_meta_exception)
5098966a895Sxu_zh
510b92f8445Sssszwic  /**
511b92f8445Sssszwic    ******************************************************************************
512b92f8445Sssszwic    * Monitor the requests from missUnit to write to SRAM
513b92f8445Sssszwic    ******************************************************************************
514b92f8445Sssszwic    */
515b808ac73Sxu_zh
516b808ac73Sxu_zh  /* NOTE: If fromMSHR.bits.corrupt, we should set s2_MSHR_hits to false.B, and send prefetch requests again.
517b808ac73Sxu_zh   * This is the opposite of how mainPipe handles fromMSHR.bits.corrupt,
518b808ac73Sxu_zh   *   in which we should set s2_MSHR_hits to true.B, and send error to ifu.
519b808ac73Sxu_zh   */
520*415fcbe2Sxu_zh  private val s2_MSHR_match = VecInit((0 until PortNumber).map { i =>
521b808ac73Sxu_zh    (s2_req_vSetIdx(i) === fromMSHR.bits.vSetIdx) &&
522b92f8445Sssszwic    (s2_req_ptags(i) === getPhyTagFromBlk(fromMSHR.bits.blkPaddr)) &&
523b808ac73Sxu_zh    s2_valid && fromMSHR.valid && !fromMSHR.bits.corrupt
524*415fcbe2Sxu_zh  })
525*415fcbe2Sxu_zh  private val s2_MSHR_hits = (0 until PortNumber).map(i => ValidHoldBypass(s2_MSHR_match(i), s2_fire || s2_flush))
526b92f8445Sssszwic
527*415fcbe2Sxu_zh  private val s2_SRAM_hits = s2_waymasks.map(_.orR)
528*415fcbe2Sxu_zh  private val s2_hits      = VecInit((0 until PortNumber).map(i => s2_MSHR_hits(i) || s2_SRAM_hits(i)))
529b808ac73Sxu_zh
530f80535c3Sxu_zh  /* s2_exception includes itlb pf/gpf/af, pmp af and meta corruption (af), neither of which should be prefetched
53188895b11Sxu_zh   * mmio should not be prefetched
532f80535c3Sxu_zh   * also, if previous has exception, latter port should also not be prefetched
53388895b11Sxu_zh   */
534*415fcbe2Sxu_zh  private val s2_miss = VecInit((0 until PortNumber).map { i =>
535b808ac73Sxu_zh    !s2_hits(i) && (if (i == 0) true.B else s2_doubleline) &&
536dd02bc3fSxu_zh    !ExceptionType.hasException(s2_exception.take(i + 1)) &&
53788895b11Sxu_zh    s2_mmio.take(i + 1).map(!_).reduce(_ && _)
538b808ac73Sxu_zh  })
539b92f8445Sssszwic
540b92f8445Sssszwic  /**
541b92f8445Sssszwic    ******************************************************************************
542b92f8445Sssszwic    * send req to missUnit
543b92f8445Sssszwic    ******************************************************************************
544b92f8445Sssszwic    */
545*415fcbe2Sxu_zh  private val toMSHRArbiter = Module(new Arbiter(new ICacheMissReq, PortNumber))
546b92f8445Sssszwic
547b92f8445Sssszwic  // To avoid sending duplicate requests.
548*415fcbe2Sxu_zh  private val has_send = RegInit(VecInit(Seq.fill(PortNumber)(false.B)))
549b92f8445Sssszwic  (0 until PortNumber).foreach { i =>
550400391a3Sxu_zh    when(s1_real_fire) {
551b92f8445Sssszwic      has_send(i) := false.B
552b92f8445Sssszwic    }.elsewhen(toMSHRArbiter.io.in(i).fire) {
553b92f8445Sssszwic      has_send(i) := true.B
554b92f8445Sssszwic    }
555b92f8445Sssszwic  }
556b92f8445Sssszwic
557*415fcbe2Sxu_zh  (0 until PortNumber).foreach { i =>
558b92f8445Sssszwic    toMSHRArbiter.io.in(i).valid         := s2_valid && s2_miss(i) && !has_send(i)
559b92f8445Sssszwic    toMSHRArbiter.io.in(i).bits.blkPaddr := getBlkAddr(s2_req_paddr(i))
560b92f8445Sssszwic    toMSHRArbiter.io.in(i).bits.vSetIdx  := s2_req_vSetIdx(i)
561b92f8445Sssszwic  }
562b92f8445Sssszwic
563b92f8445Sssszwic  toMSHR <> toMSHRArbiter.io.out
564b92f8445Sssszwic
565b92f8445Sssszwic  s2_flush := io.flush
566b92f8445Sssszwic
5672196d1caSxu_zh  // toMSHRArbiter.io.in(i).fire is not used here for timing consideration
568*415fcbe2Sxu_zh// private val s2_finish =
569*415fcbe2Sxu_zh//   (0 until PortNumber).map(i => has_send(i) || !s2_miss(i) || toMSHRArbiter.io.in(i).fire).reduce(_ && _)
570*415fcbe2Sxu_zh  private val s2_finish = (0 until PortNumber).map(i => has_send(i) || !s2_miss(i)).reduce(_ && _)
571b92f8445Sssszwic  s2_ready := s2_finish || !s2_valid
572b92f8445Sssszwic  s2_fire  := s2_valid && s2_finish && !s2_flush
5739bba777eSssszwic
574cb6e5d3cSssszwic  /** PerfAccumulate */
5752c9f4a9fSxu_zh  // the number of bpu flush
5762c9f4a9fSxu_zh  XSPerfAccumulate("bpu_s0_flush", from_bpu_s0_flush)
5772c9f4a9fSxu_zh  XSPerfAccumulate("bpu_s1_flush", from_bpu_s1_flush)
5782c9f4a9fSxu_zh  // the number of prefetch request received from ftq or backend (software prefetch)
5792c9f4a9fSxu_zh//  XSPerfAccumulate("prefetch_req_receive", io.req.fire)
5802c9f4a9fSxu_zh  XSPerfAccumulate("prefetch_req_receive_hw", io.req.fire && !io.req.bits.isSoftPrefetch)
5812c9f4a9fSxu_zh  XSPerfAccumulate("prefetch_req_receive_sw", io.req.fire && io.req.bits.isSoftPrefetch)
582b92f8445Sssszwic  // the number of prefetch request sent to missUnit
5832c9f4a9fSxu_zh//  XSPerfAccumulate("prefetch_req_send", toMSHR.fire)
5842c9f4a9fSxu_zh  XSPerfAccumulate("prefetch_req_send_hw", toMSHR.fire && !s2_isSoftPrefetch)
5852c9f4a9fSxu_zh  XSPerfAccumulate("prefetch_req_send_sw", toMSHR.fire && s2_isSoftPrefetch)
586b92f8445Sssszwic  XSPerfAccumulate("to_missUnit_stall", toMSHR.valid && !toMSHR.ready)
587cf7d6b7aSMuzi
588cb6e5d3cSssszwic  /**
589cb6e5d3cSssszwic    * Count the number of requests that are filtered for various reasons.
590cb6e5d3cSssszwic    * The number of prefetch discard in Performance Accumulator may be
591*415fcbe2Sxu_zh    * a little larger the number of really discarded. Because there can
592cb6e5d3cSssszwic    * be multiple reasons for a canceled request at the same time.
593cb6e5d3cSssszwic    */
594b92f8445Sssszwic  // discard prefetch request by flush
595b92f8445Sssszwic  // XSPerfAccumulate("fdip_prefetch_discard_by_tlb_except",  p1_discard && p1_tlb_except)
596b92f8445Sssszwic  // // discard prefetch request by hit icache SRAM
597b92f8445Sssszwic  // XSPerfAccumulate("fdip_prefetch_discard_by_hit_cache",   p2_discard && p1_meta_hit)
598*415fcbe2Sxu_zh  // // discard prefetch request by hit write SRAM
599*415fcbe2Sxu_zh  // XSPerfAccumulate("fdip_prefetch_discard_by_p1_monitor",  p1_discard && p1_monitor_hit)
600b92f8445Sssszwic  // // discard prefetch request by pmp except or mmio
601b92f8445Sssszwic  // XSPerfAccumulate("fdip_prefetch_discard_by_pmp",         p2_discard && p2_pmp_except)
602b92f8445Sssszwic  // // discard prefetch request by hit mainPipe info
603b92f8445Sssszwic  // // XSPerfAccumulate("fdip_prefetch_discard_by_mainPipe",    p2_discard && p2_mainPipe_hit)
6047052722fSJay}
605