xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/ICacheMissUnit.scala (revision d4112e8865584d276cf40b63ebd417d3e4bcf528)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend.icache
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.diplomacy.IdRange
23import freechips.rocketchip.tilelink.ClientStates._
24import freechips.rocketchip.tilelink.TLPermissions._
25import freechips.rocketchip.tilelink._
26import xiangshan._
27import huancun.{AliasKey, DirtyKey}
28import xiangshan.cache._
29import utils._
30import utility._
31import difftest._
32
33
34abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule
35  with HasICacheParameters
36
37abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle
38  with HasICacheParameters
39
40class ICacheMissReq(implicit p: Parameters) extends ICacheBundle
41{
42    val paddr      = UInt(PAddrBits.W)
43    val vaddr      = UInt(VAddrBits.W)
44    val waymask   = UInt(nWays.W)
45
46    def getVirSetIdx = get_idx(vaddr)
47    def getPhyTag    = get_phy_tag(paddr)
48}
49
50
51class ICacheMissResp(implicit p: Parameters) extends ICacheBundle
52{
53    val data     = UInt(blockBits.W)
54    val corrupt  = Bool()
55}
56
57class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{
58    val req       =   Vec(2, Flipped(DecoupledIO(new ICacheMissReq)))
59    val resp      =   Vec(2,ValidIO(new ICacheMissResp))
60    val flush     =   Input(Bool())
61}
62
63
64class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule
65  with MemoryOpConstants
66{
67  val io = IO(new Bundle {
68    val id = Input(UInt(log2Ceil(PortNumber).W))
69
70    val req = Flipped(DecoupledIO(new ICacheMissReq))
71    val resp = ValidIO(new ICacheMissResp)
72
73    //tilelink channel
74    val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle))
75    val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
76
77    val meta_write = DecoupledIO(new ICacheMetaWriteBundle)
78    val data_write = DecoupledIO(new ICacheDataWriteBundle)
79
80    val toPrefetch    = ValidIO(UInt(PAddrBits.W))
81
82  })
83
84  /** default value for control signals */
85  io.resp := DontCare
86  io.mem_acquire.bits := DontCare
87  io.mem_grant.ready := true.B
88  io.meta_write.bits := DontCare
89  io.data_write.bits := DontCare
90
91  val s_idle  :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_wait_resp :: Nil = Enum(5)
92  val state = RegInit(s_idle)
93  /** control logic transformation */
94  //request register
95  val req = Reg(new ICacheMissReq)
96  val req_idx = req.getVirSetIdx //virtual index
97  val req_tag = req.getPhyTag //physical tag
98  val req_waymask = req.waymask
99  val req_corrupt = RegInit(false.B)
100
101  val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant)
102
103  //cacheline register
104  val readBeatCnt = Reg(UInt(log2Up(refillCycles).W))
105  val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W)))
106
107  //initial
108  io.resp.bits := DontCare
109  io.mem_acquire.bits := DontCare
110  io.mem_grant.ready := true.B
111  io.meta_write.bits := DontCare
112  io.data_write.bits := DontCare
113
114  io.req.ready := (state === s_idle)
115  io.mem_acquire.valid := (state === s_send_mem_aquire)
116
117  io.toPrefetch.valid := (state =/= s_idle)
118  io.toPrefetch.bits  :=  addrAlign(req.paddr, blockBytes, PAddrBits)
119
120  //state change
121  switch(state) {
122    is(s_idle) {
123      when(io.req.fire()) {
124        readBeatCnt := 0.U
125        state := s_send_mem_aquire
126        req := io.req.bits
127      }
128    }
129
130    // memory request
131    is(s_send_mem_aquire) {
132      when(io.mem_acquire.fire()) {
133        state := s_wait_mem_grant
134      }
135    }
136
137    is(s_wait_mem_grant) {
138      when(edge.hasData(io.mem_grant.bits)) {
139        when(io.mem_grant.fire()) {
140          readBeatCnt := readBeatCnt + 1.U
141          respDataReg(readBeatCnt) := io.mem_grant.bits.data
142          req_corrupt := io.mem_grant.bits.corrupt // TODO: seems has bug
143          when(readBeatCnt === (refillCycles - 1).U) {
144            assert(refill_done, "refill not done!")
145            state := s_write_back
146          }
147        }
148      }
149    }
150
151    is(s_write_back) {
152      state := Mux(io.meta_write.fire() && io.data_write.fire(), s_wait_resp, s_write_back)
153    }
154
155    is(s_wait_resp) {
156      io.resp.bits.data := respDataReg.asUInt
157      io.resp.bits.corrupt := req_corrupt
158      when(io.resp.fire()) {
159        state := s_idle
160      }
161    }
162  }
163
164  /** refill write and meta write */
165
166  val getBlock = edge.Get(
167    fromSource = io.id,
168    toAddress = addrAlign(req.paddr, blockBytes, PAddrBits),
169    lgSize = (log2Up(cacheParams.blockBytes)).U
170  )._2
171
172  io.mem_acquire.bits := getBlock // getBlock
173  require(nSets <= 256) // icache size should not be more than 128KB
174
175  //resp to ifu
176  io.resp.valid := state === s_wait_resp
177
178  io.meta_write.valid := (state === s_write_back)
179  io.meta_write.bits.generate(tag = req_tag, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0))
180
181  io.data_write.valid := (state === s_write_back)
182  io.data_write.bits.generate(data = respDataReg.asUInt,
183                              idx  = req_idx,
184                              waymask = req_waymask,
185                              bankIdx = req_idx(0),
186                              paddr = req.paddr)
187
188  XSPerfAccumulate(
189    "entryPenalty" + Integer.toString(id, 10),
190    BoolStopWatch(
191      start = io.req.fire(),
192      stop = io.resp.valid,
193      startHighPriority = true)
194  )
195  XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire())
196
197}
198
199
200class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule
201{
202  val io = IO(new Bundle{
203    val hartId      = Input(UInt(8.W))
204    val req         = Vec(2, Flipped(DecoupledIO(new ICacheMissReq)))
205    val resp        = Vec(2, ValidIO(new ICacheMissResp))
206
207    val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle))
208    val mem_grant   = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
209
210    val meta_write  = DecoupledIO(new ICacheMetaWriteBundle)
211    val data_write  = DecoupledIO(new ICacheDataWriteBundle)
212
213    val prefetch_req          =  Flipped(DecoupledIO(new PIQReq))
214    val prefetch_check        =  Vec(PortNumber,ValidIO(UInt(PAddrBits.W)))
215    val freePIQEntry          =  Output(UInt(log2Ceil(nPrefetchEntries).W))
216
217    val fencei = Input(Bool())
218
219    val piq_write_ipbuffer = ValidIO(new IPFBufferWrite)
220
221    val to_main_pipe = Vec(nPrefetchEntries, new PIQToMainPipe)
222  })
223  // assign default values to output signals
224  io.mem_grant.ready := false.B
225
226  val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle,  PortNumber))
227  val refill_arb     = Module(new Arbiter(new ICacheDataWriteBundle,  PortNumber))
228  val ipf_write_arb  = Module(new Arbiter(new IPFBufferWrite,  nPrefetchEntries))
229
230  io.mem_grant.ready := true.B
231
232  val entries = (0 until PortNumber) map { i =>
233    val entry = Module(new ICacheMissEntry(edge, i))
234
235    entry.io.id := i.U
236
237    // entry req
238    entry.io.req.valid := io.req(i).valid
239    entry.io.req.bits  := io.req(i).bits
240    io.req(i).ready    := entry.io.req.ready
241
242    // entry resp
243    meta_write_arb.io.in(i)     <>  entry.io.meta_write
244    refill_arb.io.in(i)         <>  entry.io.data_write
245
246    entry.io.mem_grant.valid := false.B
247    entry.io.mem_grant.bits  := DontCare
248    when (io.mem_grant.bits.source === i.U) {
249      entry.io.mem_grant <> io.mem_grant
250    }
251
252    io.resp(i) <> entry.io.resp
253    io.prefetch_check(i) <> entry.io.toPrefetch
254
255//    XSPerfAccumulate(
256//      "entryPenalty" + Integer.toString(i, 10),
257//      BoolStopWatch(
258//        start = entry.io.req.fire(),
259//        stop = entry.io.resp.fire(),
260//        startHighPriority = true)
261//    )
262//    XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire())
263
264    entry
265  }
266
267  val alloc = Wire(UInt(log2Ceil(nPrefetchEntries).W))
268  val toMainPipe = io.to_main_pipe.map(_.info)
269
270  val prefEntries = (PortNumber until PortNumber + nPrefetchEntries) map { i =>
271    val prefetchEntry = Module(new PIQEntry(edge, i))
272
273    prefetchEntry.io.mem_grant.valid := false.B
274    prefetchEntry.io.mem_grant.bits := DontCare
275    prefetchEntry.io.fencei := io.fencei
276
277    ipf_write_arb.io.in(i - PortNumber) <> prefetchEntry.io.piq_write_ipbuffer
278
279    when(io.mem_grant.bits.source === i.U) {
280      prefetchEntry.io.mem_grant <> io.mem_grant
281    }
282
283    prefetchEntry.io.req.valid := io.prefetch_req.valid && ((i-PortNumber).U === alloc)
284    prefetchEntry.io.req.bits  := io.prefetch_req.bits
285
286    prefetchEntry.io.id := i.U
287
288    prefetchEntry
289  }
290
291  alloc := PriorityEncoder(prefEntries.map(_.io.req.ready))
292  io.prefetch_req.ready := ParallelOR(prefEntries.map(_.io.req.ready))
293  io.freePIQEntry := PriorityEncoder(prefEntries.map(_.io.req.ready))
294  (0 until nPrefetchEntries).foreach(i => toMainPipe(i) <> prefEntries(i).io.prefetch_entry_data)
295  val tl_a_chanel = entries.map(_.io.mem_acquire) ++ prefEntries.map(_.io.mem_acquire)
296  TLArbiter.lowest(edge, io.mem_acquire, tl_a_chanel:_*)
297
298  io.meta_write     <> meta_write_arb.io.out
299  io.data_write     <> refill_arb.io.out
300
301  io.piq_write_ipbuffer.valid := ipf_write_arb.io.out.valid
302  io.piq_write_ipbuffer.bits  := ipf_write_arb.io.out.bits
303  ipf_write_arb.io.out.ready := true.B
304
305  XSPerfAccumulate("refill_ipf_num", io.piq_write_ipbuffer.fire)
306
307  if (env.EnableDifftest) {
308    val difftest = Module(new DifftestRefillEvent)
309    difftest.io.clock := clock
310    difftest.io.coreid := io.hartId
311    difftest.io.cacheid := 0.U
312    difftest.io.valid := refill_arb.io.out.valid
313    difftest.io.addr := refill_arb.io.out.bits.paddr
314    difftest.io.data := refill_arb.io.out.bits.data.asTypeOf(difftest.io.data)
315  }
316
317  (0 until nWays).map{ w =>
318    XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10),  entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask)  === w.U)
319    XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10),  entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask)  === w.U)
320  }
321
322}
323
324
325
326