xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/ICacheMissUnit.scala (revision 0466583513e4c1ddbbb566b866b8963635acb20f)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend.icache
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.diplomacy.IdRange
23import freechips.rocketchip.tilelink.ClientStates._
24import freechips.rocketchip.tilelink.TLPermissions._
25import freechips.rocketchip.tilelink._
26import xiangshan._
27import xiangshan.cache._
28import utils._
29import utility._
30import difftest._
31
32
33abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule
34  with HasICacheParameters
35
36abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle
37  with HasICacheParameters
38
39class ICacheMissReq(implicit p: Parameters) extends ICacheBundle
40{
41    val paddr      = UInt(PAddrBits.W)
42    val vaddr      = UInt(VAddrBits.W)
43    val waymask   = UInt(nWays.W)
44
45    def getVirSetIdx = get_idx(vaddr)
46    def getPhyTag    = get_phy_tag(paddr)
47}
48
49
50class ICacheMissResp(implicit p: Parameters) extends ICacheBundle
51{
52    val data     = UInt(blockBits.W)
53    val corrupt  = Bool()
54}
55
56class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{
57    val req       =   Vec(2, Flipped(DecoupledIO(new ICacheMissReq)))
58    val resp      =   Vec(2,ValidIO(new ICacheMissResp))
59    val flush     =   Input(Bool())
60}
61
62
63class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule
64  with MemoryOpConstants
65{
66  val io = IO(new Bundle {
67    val id = Input(UInt(log2Ceil(PortNumber).W))
68
69    val req = Flipped(DecoupledIO(new ICacheMissReq))
70    val resp = ValidIO(new ICacheMissResp)
71
72    //tilelink channel
73    val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle))
74    val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
75
76    val meta_write = DecoupledIO(new ICacheMetaWriteBundle)
77    val data_write = DecoupledIO(new ICacheDataWriteBundle)
78
79    val ongoing_req    = ValidIO(UInt(PAddrBits.W))
80    val fencei = Input(Bool())
81  })
82
83  /** default value for control signals */
84  io.resp := DontCare
85  io.mem_acquire.bits := DontCare
86  io.mem_grant.ready := true.B
87  io.meta_write.bits := DontCare
88  io.data_write.bits := DontCare
89
90  val s_idle  :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_wait_resp :: Nil = Enum(5)
91  val state = RegInit(s_idle)
92  /** control logic transformation */
93  //request register
94  val req = Reg(new ICacheMissReq)
95  val req_idx = req.getVirSetIdx //virtual index
96  val req_tag = req.getPhyTag //physical tag
97  val req_waymask = req.waymask
98  val req_corrupt = RegInit(false.B)
99
100  val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant)
101
102  val needflush_r = RegInit(false.B)
103  when (state === s_idle) { needflush_r := false.B }
104  when (state =/= s_idle && io.fencei) { needflush_r := true.B }
105  val needflush = needflush_r | io.fencei
106
107  //cacheline register
108  val readBeatCnt = Reg(UInt(log2Up(refillCycles).W))
109  val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W)))
110
111  //initial
112  io.resp.bits := DontCare
113  io.mem_acquire.bits := DontCare
114  io.mem_grant.ready := true.B
115  io.meta_write.bits := DontCare
116  io.data_write.bits := DontCare
117
118  io.req.ready := (state === s_idle)
119  io.mem_acquire.valid := (state === s_send_mem_aquire)
120
121  io.ongoing_req.valid := (state =/= s_idle)
122  io.ongoing_req.bits  :=  addrAlign(req.paddr, blockBytes, PAddrBits)
123
124  //state change
125  switch(state) {
126    is(s_idle) {
127      when(io.req.fire()) {
128        readBeatCnt := 0.U
129        state := s_send_mem_aquire
130        req := io.req.bits
131      }
132    }
133
134    // memory request
135    is(s_send_mem_aquire) {
136      when(io.mem_acquire.fire()) {
137        state := s_wait_mem_grant
138      }
139    }
140
141    is(s_wait_mem_grant) {
142      when(edge.hasData(io.mem_grant.bits)) {
143        when(io.mem_grant.fire()) {
144          readBeatCnt := readBeatCnt + 1.U
145          respDataReg(readBeatCnt) := io.mem_grant.bits.data
146          req_corrupt := io.mem_grant.bits.corrupt // TODO: seems has bug
147          when(readBeatCnt === (refillCycles - 1).U) {
148            assert(refill_done, "refill not done!")
149            state := s_write_back
150          }
151        }
152      }
153    }
154
155    is(s_write_back) {
156      state := Mux(io.meta_write.fire() && io.data_write.fire() || needflush, s_wait_resp, s_write_back)
157    }
158
159    is(s_wait_resp) {
160      io.resp.bits.data := respDataReg.asUInt
161      io.resp.bits.corrupt := req_corrupt
162      when(io.resp.fire()) {
163        state := s_idle
164      }
165    }
166  }
167
168  /** refill write and meta write */
169
170  val getBlock = edge.Get(
171    fromSource = io.id,
172    toAddress = addrAlign(req.paddr, blockBytes, PAddrBits),
173    lgSize = (log2Up(cacheParams.blockBytes)).U
174  )._2
175
176  io.mem_acquire.bits := getBlock // getBlock
177  // req source
178  io.mem_acquire.bits.user.lift(ReqSourceKey).foreach(_ := MemReqSource.CPUInst.id.U)
179  require(nSets <= 256) // icache size should not be more than 128KB
180
181  //resp to ifu
182  io.resp.valid := state === s_wait_resp
183
184  io.meta_write.valid := (state === s_write_back && !needflush)
185  io.meta_write.bits.generate(tag = req_tag, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0))
186
187  io.data_write.valid := (state === s_write_back && !needflush)
188  io.data_write.bits.generate(data = respDataReg.asUInt,
189                              idx  = req_idx,
190                              waymask = req_waymask,
191                              bankIdx = req_idx(0),
192                              paddr = req.paddr)
193
194  XSPerfAccumulate(
195    "entryPenalty" + Integer.toString(id, 10),
196    BoolStopWatch(
197      start = io.req.fire(),
198      stop = io.resp.valid,
199      startHighPriority = true)
200  )
201  XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire())
202}
203
204
205class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule
206{
207  val io = IO(new Bundle{
208    val hartId      = Input(UInt(8.W))
209    val req         = Vec(2, Flipped(DecoupledIO(new ICacheMissReq)))
210    val resp        = Vec(2, ValidIO(new ICacheMissResp))
211
212    val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle))
213    val mem_grant   = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
214
215    val meta_write  = DecoupledIO(new ICacheMetaWriteBundle)
216    val data_write  = DecoupledIO(new ICacheDataWriteBundle)
217
218    val prefetch_req          =  Flipped(DecoupledIO(new PIQReq))
219    val mshr_info             =  Vec(totalMSHRNum,ValidIO(UInt(PAddrBits.W)))
220    val freePIQEntry          =  Output(UInt(log2Ceil(nPrefetchEntries).W))
221
222    val fencei = Input(Bool())
223
224    val piq_write_ipbuffer = ValidIO(new IPFBufferWrite)
225
226    val to_main_pipe = Vec(nPrefetchEntries, new PIQToMainPipe)
227  })
228  // assign default values to output signals
229  io.mem_grant.ready := false.B
230
231  val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle,  PortNumber))
232  val refill_arb     = Module(new Arbiter(new ICacheDataWriteBundle,  PortNumber))
233  val ipf_write_arb  = Module(new Arbiter(new IPFBufferWrite,  nPrefetchEntries))
234
235  io.mem_grant.ready := true.B
236
237  val entries = (0 until PortNumber) map { i =>
238    val entry = Module(new ICacheMissEntry(edge, i))
239
240    entry.io.id := i.U
241
242    // entry req
243    entry.io.req.valid := io.req(i).valid
244    entry.io.req.bits  := io.req(i).bits
245    io.req(i).ready    := entry.io.req.ready
246
247    // entry resp
248    meta_write_arb.io.in(i)     <>  entry.io.meta_write
249    refill_arb.io.in(i)         <>  entry.io.data_write
250
251    entry.io.mem_grant.valid := false.B
252    entry.io.mem_grant.bits  := DontCare
253    when (io.mem_grant.bits.source === i.U) {
254      entry.io.mem_grant <> io.mem_grant
255    }
256
257    io.resp(i) <> entry.io.resp
258    io.mshr_info(i) <> entry.io.ongoing_req
259    entry.io.fencei := io.fencei
260//    XSPerfAccumulate(
261//      "entryPenalty" + Integer.toString(i, 10),
262//      BoolStopWatch(
263//        start = entry.io.req.fire(),
264//        stop = entry.io.resp.fire(),
265//        startHighPriority = true)
266//    )
267//    XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire())
268
269    entry
270  }
271
272  val alloc = Wire(UInt(log2Ceil(nPrefetchEntries).W))
273  val toMainPipe = io.to_main_pipe.map(_.info)
274
275  val prefEntries = (PortNumber until PortNumber + nPrefetchEntries) map { i =>
276    val prefetchEntry = Module(new PIQEntry(edge, i))
277
278    prefetchEntry.io.mem_grant.valid := false.B
279    prefetchEntry.io.mem_grant.bits := DontCare
280    prefetchEntry.io.fencei := io.fencei
281
282    ipf_write_arb.io.in(i - PortNumber) <> prefetchEntry.io.piq_write_ipbuffer
283
284    when(io.mem_grant.bits.source === i.U) {
285      prefetchEntry.io.mem_grant <> io.mem_grant
286    }
287
288    prefetchEntry.io.req.valid := io.prefetch_req.valid && ((i-PortNumber).U === alloc)
289    prefetchEntry.io.req.bits  := io.prefetch_req.bits
290
291    prefetchEntry.io.id := i.U
292
293    io.mshr_info(i) := prefetchEntry.io.ongoing_req
294
295    prefetchEntry
296  }
297
298  alloc := PriorityEncoder(prefEntries.map(_.io.req.ready))
299  io.prefetch_req.ready := ParallelOR(prefEntries.map(_.io.req.ready))
300  io.freePIQEntry := PriorityEncoder(prefEntries.map(_.io.req.ready))
301  (0 until nPrefetchEntries).foreach(i => toMainPipe(i) <> prefEntries(i).io.prefetch_entry_data)
302  val tl_a_chanel = entries.map(_.io.mem_acquire) ++ prefEntries.map(_.io.mem_acquire)
303  TLArbiter.lowest(edge, io.mem_acquire, tl_a_chanel:_*)
304
305  io.meta_write     <> meta_write_arb.io.out
306  io.data_write     <> refill_arb.io.out
307
308  io.piq_write_ipbuffer.valid := ipf_write_arb.io.out.valid
309  io.piq_write_ipbuffer.bits  := ipf_write_arb.io.out.bits
310  ipf_write_arb.io.out.ready := true.B
311
312  XSPerfAccumulate("refill_ipf_num", io.piq_write_ipbuffer.fire)
313
314  if (env.EnableDifftest) {
315    val diffipfrefill = Module(new DifftestRefillEvent)
316    diffipfrefill.io.clock := clock
317    diffipfrefill.io.coreid := io.hartId
318    diffipfrefill.io.cacheid := 3.U
319    diffipfrefill.io.valid := ipf_write_arb.io.out.valid
320    diffipfrefill.io.addr := ipf_write_arb.io.out.bits.meta.paddr
321    diffipfrefill.io.data := ipf_write_arb.io.out.bits.data.asTypeOf(diffipfrefill.io.data)
322  }
323
324  if (env.EnableDifftest) {
325    val difftest = Module(new DifftestRefillEvent)
326    difftest.io.clock := clock
327    difftest.io.coreid := io.hartId
328    difftest.io.cacheid := 0.U
329    difftest.io.valid := refill_arb.io.out.valid
330    difftest.io.addr := refill_arb.io.out.bits.paddr
331    difftest.io.data := refill_arb.io.out.bits.data.asTypeOf(difftest.io.data)
332  }
333
334  (0 until nWays).map{ w =>
335    XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10),  entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask)  === w.U)
336    XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10),  entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask)  === w.U)
337  }
338
339}
340
341
342
343