xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/ICacheMainPipe.scala (revision c3b763d06258ce632f3eb5ffd9ad985607c041fb)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend.icache
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.tilelink.ClientStates
23import xiangshan._
24import xiangshan.cache.mmu._
25import utils._
26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle}
27
28class ICacheMainPipeReq(implicit p: Parameters) extends ICacheBundle
29{
30  val vaddr  = UInt(VAddrBits.W)
31  def vsetIdx = get_idx(vaddr)
32}
33
34class ICacheMainPipeResp(implicit p: Parameters) extends ICacheBundle
35{
36  val vaddr    = UInt(VAddrBits.W)
37  val readData = UInt(blockBits.W)
38  val paddr    = UInt(PAddrBits.W)
39  val tlbExcp  = new Bundle{
40    val pageFault = Bool()
41    val accessFault = Bool()
42    val mmio = Bool()
43  }
44}
45
46class ICacheMainPipeBundle(implicit p: Parameters) extends ICacheBundle
47{
48  val req  = Flipped(DecoupledIO(new ICacheMainPipeReq))
49  val resp = ValidIO(new ICacheMainPipeResp)
50}
51
52class ICacheMetaReqBundle(implicit p: Parameters) extends ICacheBundle{
53  val toIMeta       = Decoupled(new ICacheReadBundle)
54  val fromIMeta     = Input(new ICacheMetaRespBundle)
55}
56
57class ICacheDataReqBundle(implicit p: Parameters) extends ICacheBundle{
58  val toIData       = Decoupled(new ICacheReadBundle)
59  val fromIData     = Input(new ICacheDataRespBundle)
60}
61
62class ICacheMSHRBundle(implicit p: Parameters) extends ICacheBundle{
63  val toMSHR        = Decoupled(new ICacheMissReq)
64  val fromMSHR      = Flipped(ValidIO(new ICacheMissResp))
65}
66
67class ICachePMPBundle(implicit p: Parameters) extends ICacheBundle{
68  val req  = Valid(new PMPReqBundle())
69  val resp = Input(new PMPRespBundle())
70}
71
72class ICachePerfInfo(implicit p: Parameters) extends ICacheBundle{
73  val only_0_hit     = Bool()
74  val only_0_miss    = Bool()
75  val hit_0_hit_1    = Bool()
76  val hit_0_miss_1   = Bool()
77  val miss_0_hit_1   = Bool()
78  val miss_0_miss_1  = Bool()
79  val hit_0_except_1 = Bool()
80  val miss_0_except_1 = Bool()
81  val except_0       = Bool()
82  val bank_hit       = Vec(2,Bool())
83  val hit            = Bool()
84}
85
86class ICacheMainPipeInterface(implicit p: Parameters) extends ICacheBundle {
87  /*** internal interface ***/
88  val metaArray   = new ICacheMetaReqBundle
89  val dataArray   = new ICacheDataReqBundle
90  val mshr        = Vec(PortNumber, new ICacheMSHRBundle)
91  val errors      = Output(Vec(PortNumber, new L1CacheErrorInfo))
92  /*** outside interface ***/
93  val fetch       = Vec(PortNumber, new ICacheMainPipeBundle)
94  val pmp         = Vec(PortNumber, new ICachePMPBundle)
95  val itlb        = Vec(PortNumber, new TlbRequestIO)
96  val respStall   = Input(Bool())
97  val perfInfo = Output(new ICachePerfInfo)
98
99  val prefetchEnable = Output(Bool())
100  val prefetchDisable = Output(Bool())
101  val csr_parity_enable = Input(Bool())
102
103}
104
105class ICacheMainPipe(implicit p: Parameters) extends ICacheModule
106{
107  val io = IO(new ICacheMainPipeInterface)
108
109  /** Input/Output port */
110  val (fromIFU, toIFU)    = (io.fetch.map(_.req), io.fetch.map(_.resp))
111  val (toMeta, metaResp)  = (io.metaArray.toIMeta, io.metaArray.fromIMeta)
112  val (toData, dataResp)  = (io.dataArray.toIData,  io.dataArray.fromIData)
113  val (toMSHR, fromMSHR)  = (io.mshr.map(_.toMSHR), io.mshr.map(_.fromMSHR))
114  val (toITLB, fromITLB)  = (io.itlb.map(_.req), io.itlb.map(_.resp))
115  val (toPMP,  fromPMP)   = (io.pmp.map(_.req), io.pmp.map(_.resp))
116  io.itlb.foreach(_.req_kill := false.B)
117
118  /** pipeline control signal */
119  val s1_ready, s2_ready = Wire(Bool())
120  val s0_fire,  s1_fire , s2_fire  = Wire(Bool())
121
122  val missSwitchBit = RegInit(false.B)
123
124  /** replacement status register */
125  val touch_sets = Seq.fill(2)(Wire(Vec(2, UInt(log2Ceil(nSets/2).W))))
126  val touch_ways = Seq.fill(2)(Wire(Vec(2, Valid(UInt(log2Ceil(nWays).W)))) )
127
128  /**
129    ******************************************************************************
130    * ICache Stage 0
131    * - send req to ITLB and wait for tlb miss fixing
132    * - send req to Meta/Data SRAM
133    ******************************************************************************
134    */
135
136  /** s0 control */
137  val s0_valid       = fromIFU.map(_.valid).reduce(_||_)
138  val s0_req_vaddr   = VecInit(fromIFU.map(_.bits.vaddr))
139  val s0_req_vsetIdx = VecInit(fromIFU.map(_.bits.vsetIdx))
140  val s0_only_first  = fromIFU(0).valid && !fromIFU(0).valid
141  val s0_double_line = fromIFU(0).valid && fromIFU(1).valid
142
143  val s0_final_valid       = s0_valid
144  val s0_final_vaddr   = s0_req_vaddr
145  val s0_final_vsetIdx = s0_req_vsetIdx
146  val s0_final_only_first  = s0_only_first
147  val s0_final_double_line = s0_double_line
148
149  /** SRAM request */
150  val fetch_req = List(toMeta, toData)
151  for(i <- 0 until 2) {
152    // fetch_req(i).valid             := (s0_valid || tlb_slot.valid) && !missSwitchBit
153    fetch_req(i).valid             := s0_valid && !missSwitchBit
154    fetch_req(i).bits.isDoubleLine := s0_final_double_line
155    fetch_req(i).bits.vSetIdx      := s0_final_vsetIdx
156  }
157
158  /** s0 tlb **/
159  toITLB(0).valid         := s0_valid
160  toITLB(0).bits.size     := 3.U // TODO: fix the size
161  toITLB(0).bits.vaddr    := s0_req_vaddr(0)
162  toITLB(0).bits.debug.pc := s0_req_vaddr(0)
163
164  toITLB(1).valid         := s0_valid && s0_double_line
165  toITLB(1).bits.size     := 3.U // TODO: fix the size
166  toITLB(1).bits.vaddr    := s0_req_vaddr(1)
167  toITLB(1).bits.debug.pc := s0_req_vaddr(1)
168
169  toITLB.map{port =>
170    port.bits.cmd                 := TlbCmd.exec
171    port.bits.debug.robIdx        := DontCare
172    port.bits.debug.isFirstIssue  := DontCare
173  }
174
175  /** ITLB & ICACHE sync case
176   * when icache is not ready, but itlb is ready
177   * because itlb is non-block, then the req will take the port
178   * then itlb will unset the ready?? itlb is wrongly blocked.
179   * Solution: maybe give itlb a signal to tell whether acquire the slot?
180   */
181
182  val itlb_can_go    = toITLB(0).ready && toITLB(1).ready
183  val icache_can_go  = fetch_req(0).ready && fetch_req(1).ready
184  val pipe_can_go    = !missSwitchBit && s1_ready
185  val s0_can_go      = itlb_can_go && icache_can_go && pipe_can_go
186  val s0_fetch_fire  = s0_valid && s0_can_go
187  s0_fire        := s0_fetch_fire
188  toITLB.map{port => port.bits.kill := !icache_can_go || !pipe_can_go}
189
190  //TODO: fix GTimer() condition
191  fromIFU.map(_.ready := s0_can_go) //&& GTimer() > 500.U )
192
193  /**
194    ******************************************************************************
195    * ICache Stage 1
196    * - get tlb resp data (exceptiong info and physical addresses)
197    * - get Meta/Data SRAM read responses (latched for pipeline stop)
198    * - tag compare/hit check
199    ******************************************************************************
200    */
201
202  /** s1 control */
203
204  val s1_valid = generatePipeControl(lastFire = s0_fire, thisFire = s1_fire, thisFlush = false.B, lastFlush = false.B)
205
206  val s1_req_vaddr   = RegEnable(s0_final_vaddr, s0_fire)
207  val s1_req_vsetIdx = RegEnable(s0_final_vsetIdx, s0_fire)
208  val s1_only_first  = RegEnable(s0_final_only_first, s0_fire)
209  val s1_double_line = RegEnable(s0_final_double_line, s0_fire)
210  //val s1_tlb_miss    = RegEnable(tlb_slot.valid, s0_fire)
211
212  /** tlb response latch for pipeline stop */
213  val tlb_back = fromITLB.map(_.fire())
214  val tlb_need_back = VecInit((0 until PortNumber).map(i => ValidHold(s0_fire && toITLB(i).fire(), s1_fire, false.B)))
215  val tlb_already_recv = RegInit(VecInit(Seq.fill(PortNumber)(false.B)))
216  val tlb_ready_recv = VecInit((0 until PortNumber).map(i => RegNext(s0_fire, false.B) || (s1_valid && !tlb_already_recv(i))))
217  val tlb_resp_valid = Wire(Vec(2, Bool()))
218  for (i <- 0 until PortNumber) {
219    tlb_resp_valid(i) := tlb_already_recv(i) || (tlb_ready_recv(i) && tlb_back(i))
220    when (tlb_already_recv(i) && s1_fire) {
221      tlb_already_recv(i) := false.B
222    }
223    when (tlb_back(i) && tlb_ready_recv(i) && !s1_fire) {
224      tlb_already_recv(i) := true.B
225    }
226    fromITLB(i).ready := tlb_ready_recv(i)
227  }
228  assert(RegNext(Cat((0 until PortNumber).map(i => tlb_need_back(i) || !tlb_resp_valid(i))).andR(), true.B),
229    "when tlb should not back, tlb should not resp valid")
230  assert(RegNext(!s1_valid || Cat(tlb_need_back).orR, true.B), "when s1_valid, need at least one tlb_need_back")
231  assert(RegNext(s1_valid || !Cat(tlb_need_back).orR, true.B), "when !s1_valid, all the tlb_need_back should be false")
232  assert(RegNext(s1_valid || !Cat(tlb_already_recv).orR, true.B), "when !s1_valid, should not tlb_already_recv")
233  assert(RegNext(s1_valid || !Cat(tlb_resp_valid).orR, true.B), "when !s1_valid, should not tlb_resp_valid")
234
235  val tlbRespPAddr = VecInit((0 until PortNumber).map(i => ResultHoldBypass(valid = tlb_back(i), data = fromITLB(i).bits.paddr)))
236  val tlbExcpPF = VecInit((0 until PortNumber).map(i => ResultHoldBypass(valid = tlb_back(i), data = fromITLB(i).bits.excp.pf.instr) && tlb_need_back(i)))
237  val tlbExcpAF = VecInit((0 until PortNumber).map(i => ResultHoldBypass(valid = tlb_back(i), data = fromITLB(i).bits.excp.af.instr) && tlb_need_back(i)))
238  val tlbExcp = VecInit((0 until PortNumber).map(i => tlbExcpPF(i) || tlbExcpPF(i)))
239
240  val tlbRespAllValid = Cat((0 until PortNumber).map(i => !tlb_need_back(i) || tlb_resp_valid(i))).andR
241  s1_ready := s2_ready && tlbRespAllValid  || !s1_valid
242  s1_fire  := s1_valid && tlbRespAllValid && s2_ready
243
244  /** s1 hit check/tag compare */
245  val s1_req_paddr              = tlbRespPAddr
246  val s1_req_ptags              = VecInit(s1_req_paddr.map(get_phy_tag(_)))
247
248  val s1_meta_ptags              = ResultHoldBypass(data = metaResp.tags, valid = RegNext(s0_fire))
249  val s1_meta_cohs               = ResultHoldBypass(data = metaResp.cohs, valid = RegNext(s0_fire))
250  val s1_meta_errors             = ResultHoldBypass(data = metaResp.errors, valid = RegNext(s0_fire))
251
252  val s1_data_cacheline          = ResultHoldBypass(data = dataResp.datas, valid = RegNext(s0_fire))
253  val s1_data_errorBits          = ResultHoldBypass(data = dataResp.codes, valid = RegNext(s0_fire))
254
255  val s1_tag_eq_vec        = VecInit((0 until PortNumber).map( p => VecInit((0 until nWays).map( w =>  s1_meta_ptags(p)(w) ===  s1_req_ptags(p) ))))
256  val s1_tag_match_vec     = VecInit((0 until PortNumber).map( k => VecInit(s1_tag_eq_vec(k).zipWithIndex.map{ case(way_tag_eq, w) => way_tag_eq && s1_meta_cohs(k)(w).isValid()})))
257  val s1_tag_match         = VecInit(s1_tag_match_vec.map(vector => ParallelOR(vector)))
258
259  val s1_port_hit          = VecInit(Seq(s1_tag_match(0) && s1_valid  && !tlbExcp(0),  s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcp(1) ))
260  val s1_bank_miss         = VecInit(Seq(!s1_tag_match(0) && s1_valid && !tlbExcp(0), !s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcp(1) ))
261  val s1_hit               = (s1_port_hit(0) && s1_port_hit(1)) || (!s1_double_line && s1_port_hit(0))
262
263  /** choose victim cacheline */
264  val replacers       = Seq.fill(PortNumber)(ReplacementPolicy.fromString(cacheParams.replacer,nWays,nSets/PortNumber))
265  val s1_victim_oh    = ResultHoldBypass(data = VecInit(replacers.zipWithIndex.map{case (replacer, i) => UIntToOH(replacer.way(s1_req_vsetIdx(i)))}), valid = RegNext(s0_fire))
266
267  val s1_victim_coh   = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_meta_cohs(port))})
268
269  assert(PopCount(s1_tag_match_vec(0)) <= 1.U && PopCount(s1_tag_match_vec(1)) <= 1.U, "Multiple hit in main pipe")
270
271  ((replacers zip touch_sets) zip touch_ways).map{case ((r, s),w) => r.access(s,w)}
272
273  val s1_hit_data      =  VecInit(s1_data_cacheline.zipWithIndex.map { case(bank, i) =>
274    val port_hit_data = Mux1H(s1_tag_match_vec(i).asUInt, bank)
275    port_hit_data
276  })
277
278  /** <PERF> replace victim way number */
279
280  (0 until nWays).map{ w =>
281    XSPerfAccumulate("line_0_hit_way_" + Integer.toString(w, 10),  s1_fire && s1_port_hit(0) && OHToUInt(s1_tag_match_vec(0))  === w.U)
282  }
283
284  (0 until nWays).map{ w =>
285    XSPerfAccumulate("line_0_victim_way_" + Integer.toString(w, 10),  s1_fire && !s1_port_hit(0) && OHToUInt(s1_victim_oh(0))  === w.U)
286  }
287
288  (0 until nWays).map{ w =>
289    XSPerfAccumulate("line_1_hit_way_" + Integer.toString(w, 10),  s1_fire && s1_double_line && s1_port_hit(1) && OHToUInt(s1_tag_match_vec(1))  === w.U)
290  }
291
292  (0 until nWays).map{ w =>
293    XSPerfAccumulate("line_1_victim_way_" + Integer.toString(w, 10),  s1_fire && s1_double_line && !s1_port_hit(1) && OHToUInt(s1_victim_oh(1))  === w.U)
294  }
295
296  /**
297    ******************************************************************************
298    * ICache Stage 2
299    * - send request to MSHR if ICache miss
300    * - generate secondary miss status/data registers
301    * - response to IFU
302    ******************************************************************************
303    */
304
305  /** s2 control */
306  val s2_fetch_finish = Wire(Bool())
307
308  val s2_valid          = generatePipeControl(lastFire = s1_fire, thisFire = s2_fire, thisFlush = false.B, lastFlush = false.B)
309  val s2_miss_available = Wire(Bool())
310
311  s2_ready      := (s2_valid && s2_fetch_finish && !io.respStall) || (!s2_valid && s2_miss_available)
312  s2_fire       := s2_valid && s2_fetch_finish && !io.respStall
313
314  /** s2 data */
315  val mmio = fromPMP.map(port => port.mmio) // TODO: handle it
316
317  val (s2_req_paddr , s2_req_vaddr)   = (RegEnable(s1_req_paddr, s1_fire), RegEnable(s1_req_vaddr, s1_fire))
318  val s2_req_vsetIdx  = RegEnable(s1_req_vsetIdx, s1_fire)
319  val s2_req_ptags    = RegEnable(s1_req_ptags, s1_fire)
320  val s2_only_first   = RegEnable(s1_only_first, s1_fire)
321  val s2_double_line  = RegEnable(s1_double_line, s1_fire)
322  val s2_hit          = RegEnable(s1_hit   , s1_fire)
323  val s2_port_hit     = RegEnable(s1_port_hit, s1_fire)
324  val s2_bank_miss    = RegEnable(s1_bank_miss, s1_fire)
325  val s2_waymask      = RegEnable(s1_victim_oh, s1_fire)
326  val s2_victim_coh   = RegEnable(s1_victim_coh, s1_fire)
327  val s2_tag_match_vec = RegEnable(s1_tag_match_vec, s1_fire)
328
329  assert(RegNext(!s2_valid || s2_req_paddr(0)(11,0) === s2_req_vaddr(0)(11,0), true.B))
330
331  /** status imply that s2 is a secondary miss (no need to resend miss request) */
332  val sec_meet_vec = Wire(Vec(2, Bool()))
333  val s2_fixed_hit_vec = VecInit((0 until 2).map(i => s2_port_hit(i) || sec_meet_vec(i)))
334  val s2_fixed_hit = (s2_valid && s2_fixed_hit_vec(0) && s2_fixed_hit_vec(1) && s2_double_line) || (s2_valid && s2_fixed_hit_vec(0) && !s2_double_line)
335
336  val s2_meta_errors    = RegEnable(s1_meta_errors,    s1_fire)
337  val s2_data_errorBits = RegEnable(s1_data_errorBits, s1_fire)
338  val s2_data_cacheline = RegEnable(s1_data_cacheline, s1_fire)
339
340  val s2_data_errors    = Wire(Vec(PortNumber,Vec(nWays, Bool())))
341
342  (0 until PortNumber).map{ i =>
343    val read_datas = s2_data_cacheline(i).asTypeOf(Vec(nWays,Vec(dataCodeUnitNum, UInt(dataCodeUnit.W))))
344    val read_codes = s2_data_errorBits(i).asTypeOf(Vec(nWays,Vec(dataCodeUnitNum, UInt(dataCodeBits.W))))
345    val data_full_wayBits = VecInit((0 until nWays).map( w =>
346                                  VecInit((0 until dataCodeUnitNum).map(u =>
347                                        Cat(read_codes(w)(u), read_datas(w)(u))))))
348    val data_error_wayBits = VecInit((0 until nWays).map( w =>
349                                  VecInit((0 until dataCodeUnitNum).map(u =>
350                                       cacheParams.dataCode.decode(data_full_wayBits(w)(u)).error ))))
351    if(i == 0){
352      (0 until nWays).map{ w =>
353        s2_data_errors(i)(w) := RegNext(RegNext(s1_fire)) && RegNext(data_error_wayBits(w)).reduce(_||_)
354      }
355    } else {
356      (0 until nWays).map{ w =>
357        s2_data_errors(i)(w) := RegNext(RegNext(s1_fire)) && RegNext(RegNext(s1_double_line)) && RegNext(data_error_wayBits(w)).reduce(_||_)
358      }
359    }
360  }
361
362  val s2_parity_meta_error  = VecInit((0 until PortNumber).map(i => s2_meta_errors(i).reduce(_||_) && io.csr_parity_enable))
363  val s2_parity_data_error  = VecInit((0 until PortNumber).map(i => s2_data_errors(i).reduce(_||_) && io.csr_parity_enable))
364  val s2_parity_error       = VecInit((0 until PortNumber).map(i => RegNext(s2_parity_meta_error(i)) || s2_parity_data_error(i)))
365
366  for(i <- 0 until PortNumber){
367    io.errors(i).valid            := RegNext(s2_parity_error(i) && RegNext(RegNext(s1_fire)))
368    io.errors(i).report_to_beu    := RegNext(s2_parity_error(i) && RegNext(RegNext(s1_fire)))
369    io.errors(i).paddr            := RegNext(RegNext(s2_req_paddr(i)))
370    io.errors(i).source           := DontCare
371    io.errors(i).source.tag       := RegNext(RegNext(s2_parity_meta_error(i)))
372    io.errors(i).source.data      := RegNext(s2_parity_data_error(i))
373    io.errors(i).source.l2        := false.B
374    io.errors(i).opType           := DontCare
375    io.errors(i).opType.fetch     := true.B
376  }
377  XSError(s2_parity_error.reduce(_||_) && RegNext(RegNext(s1_fire)), "ICache has parity error in MainPaipe!")
378
379
380  /** exception and pmp logic **/
381  //PMP Result
382  val s2_tlb_need_back = VecInit((0 until PortNumber).map(i => ValidHold(tlb_need_back(i) && s1_fire, s2_fire, false.B)))
383  val pmpExcpAF = Wire(Vec(PortNumber, Bool()))
384  pmpExcpAF(0)  := fromPMP(0).instr && s2_tlb_need_back(0)
385  pmpExcpAF(1)  := fromPMP(1).instr && s2_double_line && s2_tlb_need_back(1)
386  //exception information
387  val s2_except_pf = RegEnable(tlbExcpPF, s1_fire)
388  val s2_except_af = VecInit(RegEnable(tlbExcpAF, s1_fire).zip(pmpExcpAF).map{
389                                  case(tlbAf, pmpAf) => tlbAf || DataHoldBypass(pmpAf, RegNext(s1_fire)).asBool})
390  val s2_except    = VecInit((0 until 2).map{i => s2_except_pf(i) || s2_except_af(i)})
391  val s2_has_except = s2_valid && (s2_except_af.reduce(_||_) || s2_except_pf.reduce(_||_))
392  //MMIO
393  val s2_mmio      = DataHoldBypass(io.pmp(0).resp.mmio && !s2_except_af(0) && !s2_except_pf(0), RegNext(s1_fire)).asBool()
394
395  //send physical address to PMP
396  io.pmp.zipWithIndex.map { case (p, i) =>
397    p.req.valid := s2_valid && !missSwitchBit
398    p.req.bits.addr := s2_req_paddr(i)
399    p.req.bits.size := 3.U // TODO
400    p.req.bits.cmd := TlbCmd.exec
401  }
402
403  /*** cacheline miss logic ***/
404  val wait_idle :: wait_queue_ready :: wait_send_req  :: wait_two_resp :: wait_0_resp :: wait_1_resp :: wait_one_resp ::wait_finish :: Nil = Enum(8)
405  val wait_state = RegInit(wait_idle)
406
407  val port_miss_fix  = VecInit(Seq(fromMSHR(0).fire() && !s2_port_hit(0),   fromMSHR(1).fire() && s2_double_line && !s2_port_hit(1) ))
408
409  // secondary miss record registers
410  class MissSlot(implicit p: Parameters) extends  ICacheBundle {
411    val m_vSetIdx   = UInt(idxBits.W)
412    val m_pTag      = UInt(tagBits.W)
413    val m_data      = UInt(blockBits.W)
414    val m_corrupt   = Bool()
415  }
416
417  val missSlot    = Seq.fill(2)(RegInit(0.U.asTypeOf(new MissSlot)))
418  val m_invalid :: m_valid :: m_refilled :: m_flushed :: m_wait_sec_miss :: m_check_final ::Nil = Enum(6)
419  val missStateQueue = RegInit(VecInit(Seq.fill(2)(m_invalid)) )
420  val reservedRefillData = Wire(Vec(2, UInt(blockBits.W)))
421
422  s2_miss_available :=  VecInit(missStateQueue.map(entry => entry === m_invalid  || entry === m_wait_sec_miss)).reduce(_&&_)
423
424  val fix_sec_miss     = Wire(Vec(4, Bool()))
425  val sec_meet_0_miss = fix_sec_miss(0) || fix_sec_miss(2)
426  val sec_meet_1_miss = fix_sec_miss(1) || fix_sec_miss(3)
427  sec_meet_vec := VecInit(Seq(sec_meet_0_miss,sec_meet_1_miss ))
428
429  /*** miss/hit pattern: <Control Signal> only raise at the first cycle of s2_valid ***/
430  val cacheline_0_hit  = (s2_port_hit(0) || sec_meet_0_miss)
431  val cacheline_0_miss = !s2_port_hit(0) && !sec_meet_0_miss
432
433  val cacheline_1_hit  = (s2_port_hit(1) || sec_meet_1_miss)
434  val cacheline_1_miss = !s2_port_hit(1) && !sec_meet_1_miss
435
436  val  only_0_miss      = RegNext(s1_fire) && cacheline_0_miss && !s2_double_line && !s2_has_except && !s2_mmio
437  val  only_0_hit       = RegNext(s1_fire) && cacheline_0_hit && !s2_double_line && !s2_mmio
438  val  hit_0_hit_1      = RegNext(s1_fire) && cacheline_0_hit && cacheline_1_hit  && s2_double_line && !s2_mmio
439  val  hit_0_miss_1     = RegNext(s1_fire) && cacheline_0_hit && cacheline_1_miss && s2_double_line  && !s2_has_except && !s2_mmio
440  val  miss_0_hit_1     = RegNext(s1_fire) && cacheline_0_miss && cacheline_1_hit && s2_double_line  && !s2_has_except && !s2_mmio
441  val  miss_0_miss_1    = RegNext(s1_fire) && cacheline_0_miss && cacheline_1_miss && s2_double_line  && !s2_has_except && !s2_mmio
442
443  val  hit_0_except_1   = RegNext(s1_fire) && s2_double_line &&  !s2_except(0) && s2_except(1)  &&  cacheline_0_hit
444  val  miss_0_except_1  = RegNext(s1_fire) && s2_double_line &&  !s2_except(0) && s2_except(1)  &&  cacheline_0_miss
445  val  except_0         = RegNext(s1_fire) && s2_except(0)
446
447  def holdReleaseLatch(valid: Bool, release: Bool, flush: Bool): Bool ={
448    val bit = RegInit(false.B)
449    when(flush)                   { bit := false.B  }
450      .elsewhen(valid && !release)  { bit := true.B  }
451      .elsewhen(release)            { bit := false.B}
452    bit || valid
453  }
454
455  /*** miss/hit pattern latch: <Control Signal> latch the miss/hit patter if pipeline stop ***/
456  val  miss_0_hit_1_latch     =   holdReleaseLatch(valid = miss_0_hit_1,    release = s2_fire,      flush = false.B)
457  val  miss_0_miss_1_latch    =   holdReleaseLatch(valid = miss_0_miss_1,   release = s2_fire,      flush = false.B)
458  val  only_0_miss_latch      =   holdReleaseLatch(valid = only_0_miss,     release = s2_fire,      flush = false.B)
459  val  hit_0_miss_1_latch     =   holdReleaseLatch(valid = hit_0_miss_1,    release = s2_fire,      flush = false.B)
460
461  val  miss_0_except_1_latch  =   holdReleaseLatch(valid = miss_0_except_1, release = s2_fire,      flush = false.B)
462  val  except_0_latch          =   holdReleaseLatch(valid = except_0,    release = s2_fire,      flush = false.B)
463  val  hit_0_except_1_latch         =    holdReleaseLatch(valid = hit_0_except_1,    release = s2_fire,      flush = false.B)
464
465  val only_0_hit_latch        = holdReleaseLatch(valid = only_0_hit,   release = s2_fire,      flush = false.B)
466  val hit_0_hit_1_latch        = holdReleaseLatch(valid = hit_0_hit_1,   release = s2_fire,      flush = false.B)
467
468
469  /*** secondary miss judgment ***/
470
471  def waitSecondComeIn(missState: UInt): Bool = (missState === m_wait_sec_miss)
472
473  def getMissSituat(slotNum : Int, missNum : Int ) :Bool =  {
474    RegNext(s1_fire) && (missSlot(slotNum).m_vSetIdx === s2_req_vsetIdx(missNum)) && (missSlot(slotNum).m_pTag  === s2_req_ptags(missNum)) && !s2_port_hit(missNum)  && waitSecondComeIn(missStateQueue(slotNum)) //&& !s2_mmio
475  }
476
477  val miss_0_s2_0 =   getMissSituat(slotNum = 0, missNum = 0)
478  val miss_0_s2_1 =   getMissSituat(slotNum = 0, missNum = 1)
479  val miss_1_s2_0 =   getMissSituat(slotNum = 1, missNum = 0)
480  val miss_1_s2_1 =   getMissSituat(slotNum = 1, missNum = 1)
481
482  val miss_0_s2_0_latch =   holdReleaseLatch(valid = miss_0_s2_0,    release = s2_fire,      flush = false.B)
483  val miss_0_s2_1_latch =   holdReleaseLatch(valid = miss_0_s2_1,    release = s2_fire,      flush = false.B)
484  val miss_1_s2_0_latch =   holdReleaseLatch(valid = miss_1_s2_0,    release = s2_fire,      flush = false.B)
485  val miss_1_s2_1_latch =   holdReleaseLatch(valid = miss_1_s2_1,    release = s2_fire,      flush = false.B)
486
487
488  val slot_0_solve = fix_sec_miss(0) || fix_sec_miss(1)
489  val slot_1_solve = fix_sec_miss(2) || fix_sec_miss(3)
490  val slot_slove   = VecInit(Seq(slot_0_solve, slot_1_solve))
491
492  fix_sec_miss   := VecInit(Seq(miss_0_s2_0_latch, miss_0_s2_1_latch, miss_1_s2_0_latch, miss_1_s2_1_latch))
493
494  /*** reserved data for secondary miss ***/
495
496  reservedRefillData(0) := DataHoldBypass(data = missSlot(0).m_data, valid = miss_0_s2_0 || miss_0_s2_1)
497  reservedRefillData(1) := DataHoldBypass(data = missSlot(1).m_data, valid = miss_1_s2_0 || miss_1_s2_1)
498
499  /*** miss state machine ***/
500
501  switch(wait_state){
502    is(wait_idle){
503      when(miss_0_except_1_latch){
504        wait_state :=  Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle )
505      }.elsewhen( only_0_miss_latch  || miss_0_hit_1_latch){
506        wait_state :=  Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle )
507      }.elsewhen(hit_0_miss_1_latch){
508        wait_state :=  Mux(toMSHR(1).ready, wait_queue_ready ,wait_idle )
509      }.elsewhen( miss_0_miss_1_latch ){
510        wait_state := Mux(toMSHR(0).ready && toMSHR(1).ready, wait_queue_ready ,wait_idle)
511      }
512    }
513
514    is(wait_queue_ready){
515      wait_state := wait_send_req
516    }
517
518    is(wait_send_req) {
519      when(miss_0_except_1_latch || only_0_miss_latch || hit_0_miss_1_latch || miss_0_hit_1_latch){
520        wait_state :=  wait_one_resp
521      }.elsewhen( miss_0_miss_1_latch ){
522        wait_state := wait_two_resp
523      }
524    }
525
526    is(wait_one_resp) {
527      when( (miss_0_except_1_latch ||only_0_miss_latch || miss_0_hit_1_latch) && fromMSHR(0).fire()){
528        wait_state := wait_finish
529      }.elsewhen( hit_0_miss_1_latch && fromMSHR(1).fire()){
530        wait_state := wait_finish
531      }
532    }
533
534    is(wait_two_resp) {
535      when(fromMSHR(0).fire() && fromMSHR(1).fire()){
536        wait_state := wait_finish
537      }.elsewhen( !fromMSHR(0).fire() && fromMSHR(1).fire() ){
538        wait_state := wait_0_resp
539      }.elsewhen(fromMSHR(0).fire() && !fromMSHR(1).fire()){
540        wait_state := wait_1_resp
541      }
542    }
543
544    is(wait_0_resp) {
545      when(fromMSHR(0).fire()){
546        wait_state := wait_finish
547      }
548    }
549
550    is(wait_1_resp) {
551      when(fromMSHR(1).fire()){
552        wait_state := wait_finish
553      }
554    }
555
556    is(wait_finish) {when(s2_fire) {wait_state := wait_idle }
557    }
558  }
559
560
561  /*** send request to MissUnit ***/
562
563  (0 until 2).map { i =>
564    if(i == 1) toMSHR(i).valid   := (hit_0_miss_1_latch || miss_0_miss_1_latch) && wait_state === wait_queue_ready && !s2_mmio
565        else     toMSHR(i).valid := (only_0_miss_latch || miss_0_hit_1_latch || miss_0_miss_1_latch || miss_0_except_1_latch) && wait_state === wait_queue_ready && !s2_mmio
566    toMSHR(i).bits.paddr    := s2_req_paddr(i)
567    toMSHR(i).bits.vaddr    := s2_req_vaddr(i)
568    toMSHR(i).bits.waymask  := s2_waymask(i)
569    toMSHR(i).bits.coh      := s2_victim_coh(i)
570
571
572    when(toMSHR(i).fire() && missStateQueue(i) === m_invalid){
573      missStateQueue(i)     := m_valid
574      missSlot(i).m_vSetIdx := s2_req_vsetIdx(i)
575      missSlot(i).m_pTag    := get_phy_tag(s2_req_paddr(i))
576    }
577
578    when(fromMSHR(i).fire() && missStateQueue(i) === m_valid ){
579      missStateQueue(i)         := m_refilled
580      missSlot(i).m_data        := fromMSHR(i).bits.data
581      missSlot(i).m_corrupt     := fromMSHR(i).bits.corrupt
582    }
583
584
585    when(s2_fire && missStateQueue(i) === m_refilled){
586      missStateQueue(i)     := m_wait_sec_miss
587    }
588
589    /*** Only the first cycle to check whether meet the secondary miss ***/
590    when(missStateQueue(i) === m_wait_sec_miss){
591      /*** The seondary req has been fix by this slot and another also hit || the secondary req for other cacheline and hit ***/
592      when((slot_slove(i) && s2_fire) || (!slot_slove(i) && s2_fire) ) {
593        missStateQueue(i)     := m_invalid
594      }
595      /*** The seondary req has been fix by this slot but another miss/f3 not ready || the seondary req for other cacheline and miss ***/
596      .elsewhen((slot_slove(i) && !s2_fire && s2_valid) ||  (s2_valid && !slot_slove(i) && !s2_fire) ){
597        missStateQueue(i)     := m_check_final
598      }
599    }
600
601    when(missStateQueue(i) === m_check_final && toMSHR(i).fire()){
602      missStateQueue(i)     :=  m_valid
603      missSlot(i).m_vSetIdx := s2_req_vsetIdx(i)
604      missSlot(i).m_pTag    := get_phy_tag(s2_req_paddr(i))
605    }.elsewhen(missStateQueue(i) === m_check_final) {
606      missStateQueue(i)     :=  m_invalid
607    }
608  }
609
610  io.prefetchEnable := false.B
611  io.prefetchDisable := false.B
612  when(toMSHR.map(_.valid).reduce(_||_)){
613    missSwitchBit := true.B
614    io.prefetchEnable := true.B
615  }.elsewhen(missSwitchBit && s2_fetch_finish){
616    missSwitchBit := false.B
617    io.prefetchDisable := true.B
618  }
619
620
621  val miss_all_fix       =  wait_state === wait_finish
622  s2_fetch_finish        := ((s2_valid && s2_fixed_hit) || miss_all_fix || hit_0_except_1_latch || except_0_latch || s2_mmio)
623
624  /** update replacement status register: 0 is hit access/ 1 is miss access */
625  (touch_ways zip touch_sets).zipWithIndex.map{ case((t_w,t_s), i) =>
626    t_s(0)         := s2_req_vsetIdx(i)
627    t_w(0).valid   := s2_valid && s2_port_hit(i)
628    t_w(0).bits    := OHToUInt(s2_tag_match_vec(i))
629
630    t_s(1)         := s2_req_vsetIdx(i)
631    t_w(1).valid   := s2_valid && !s2_port_hit(i)
632    t_w(1).bits    := OHToUInt(s2_waymask(i))
633  }
634
635  val s2_hit_datas    = RegEnable(s1_hit_data, s1_fire)
636  val s2_datas        = Wire(Vec(2, UInt(blockBits.W)))
637
638  s2_datas.zipWithIndex.map{case(bank,i) =>
639    if(i == 0) bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_0_latch,reservedRefillData(0), Mux(miss_1_s2_0_latch,reservedRefillData(1), missSlot(0).m_data)))
640    else    bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_1_latch,reservedRefillData(0), Mux(miss_1_s2_1_latch,reservedRefillData(1), missSlot(1).m_data)))
641  }
642
643  /** response to IFU */
644
645  (0 until PortNumber).map{ i =>
646    if(i ==0) toIFU(i).valid          := s2_fire
647       else   toIFU(i).valid          := s2_fire && s2_double_line
648    toIFU(i).bits.readData  := s2_datas(i)
649    toIFU(i).bits.paddr     := s2_req_paddr(i)
650    toIFU(i).bits.vaddr     := s2_req_vaddr(i)
651    toIFU(i).bits.tlbExcp.pageFault     := s2_except_pf(i)
652    toIFU(i).bits.tlbExcp.accessFault   := s2_except_af(i) || missSlot(i).m_corrupt
653    toIFU(i).bits.tlbExcp.mmio          := s2_mmio
654
655    when(RegNext(s2_fire && missSlot(i).m_corrupt)){
656      io.errors(i).valid            := true.B
657      io.errors(i).report_to_beu    := false.B // l2 should have report that to bus error unit, no need to do it again
658      io.errors(i).paddr            := RegNext(s2_req_paddr(i))
659      io.errors(i).source.tag       := false.B
660      io.errors(i).source.data      := false.B
661      io.errors(i).source.l2        := true.B
662    }
663  }
664
665  io.perfInfo.only_0_hit    := only_0_hit_latch
666  io.perfInfo.only_0_miss   := only_0_miss_latch
667  io.perfInfo.hit_0_hit_1   := hit_0_hit_1_latch
668  io.perfInfo.hit_0_miss_1  := hit_0_miss_1_latch
669  io.perfInfo.miss_0_hit_1  := miss_0_hit_1_latch
670  io.perfInfo.miss_0_miss_1 := miss_0_miss_1_latch
671  io.perfInfo.hit_0_except_1 := hit_0_except_1_latch
672  io.perfInfo.miss_0_except_1 := miss_0_except_1_latch
673  io.perfInfo.except_0      := except_0_latch
674  io.perfInfo.bank_hit(0)   := only_0_miss_latch  || hit_0_hit_1_latch || hit_0_miss_1_latch || hit_0_except_1_latch
675  io.perfInfo.bank_hit(1)   := miss_0_hit_1_latch || hit_0_hit_1_latch
676  io.perfInfo.hit           := hit_0_hit_1_latch || only_0_hit_latch || hit_0_except_1_latch || except_0_latch
677
678  /** <PERF> fetch bubble generated by icache miss*/
679
680  XSPerfAccumulate("icache_bubble_s2_miss",    s2_valid && !s2_fetch_finish )
681
682}
683