xref: /XiangShan/src/main/scala/xiangshan/mem/sbuffer/Sbuffer.scala (revision 3d3419b91b243f8ce20bb52fd57da1c9ea9a7ec0)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import xiangshan.cache._
25import difftest._
26import freechips.rocketchip.util._
27
28class SbufferFlushBundle extends Bundle {
29  val valid = Output(Bool())
30  val empty = Input(Bool())
31}
32
33trait HasSbufferConst extends HasXSParameter {
34  val EvictCycles = 1 << 20
35  val SbufferReplayDelayCycles = 16
36  require(isPow2(EvictCycles))
37  val EvictCountBits = log2Up(EvictCycles+1)
38  val MissqReplayCountBits = log2Up(SbufferReplayDelayCycles) + 1
39
40  val SbufferIndexWidth: Int = log2Up(StoreBufferSize)
41  // paddr = ptag + offset
42  val CacheLineBytes: Int = CacheLineSize / 8
43  val CacheLineWords: Int = CacheLineBytes / DataBytes
44  val OffsetWidth: Int = log2Up(CacheLineBytes)
45  val WordsWidth: Int = log2Up(CacheLineWords)
46  val PTagWidth: Int = PAddrBits - OffsetWidth
47  val VTagWidth: Int = VAddrBits - OffsetWidth
48  val WordOffsetWidth: Int = PAddrBits - WordsWidth
49}
50
51class SbufferEntryState (implicit p: Parameters) extends SbufferBundle {
52  val state_valid    = Bool() // this entry is active
53  val state_inflight = Bool() // sbuffer is trying to write this entry to dcache
54  val w_timeout = Bool() // with timeout resp, waiting for resend store pipeline req timeout
55  val w_sameblock_inflight = Bool() // same cache block dcache req is inflight
56
57  def isInvalid(): Bool = !state_valid
58  def isValid(): Bool = state_valid
59  def isActive(): Bool = state_valid && !state_inflight
60  def isInflight(): Bool = state_inflight
61  def isDcacheReqCandidate(): Bool = state_valid && !state_inflight && !w_sameblock_inflight
62}
63
64class SbufferBundle(implicit p: Parameters) extends XSBundle with HasSbufferConst
65
66class DataWriteReq(implicit p: Parameters) extends SbufferBundle {
67  // univerisal writemask
68  val wvec = UInt(StoreBufferSize.W)
69  // 2 cycle update
70  val mask = UInt((DataBits/8).W)
71  val data = UInt(DataBits.W)
72  val wordOffset = UInt(WordOffsetWidth.W)
73  val wline = Bool() // write whold cacheline
74  // 1 cycle update
75  val cleanMask = Bool() // set whole line's mask to 0
76}
77
78class SbufferData(implicit p: Parameters) extends XSModule with HasSbufferConst {
79  val io = IO(new Bundle(){
80    val writeReq = Vec(EnsbufferWidth, Flipped(ValidIO(new DataWriteReq)))
81    val dataOut = Output(Vec(StoreBufferSize, Vec(CacheLineWords, Vec(DataBytes, UInt(8.W)))))
82    val maskOut = Output(Vec(StoreBufferSize, Vec(CacheLineWords, Vec(DataBytes, Bool()))))
83  })
84
85  val data = Reg(Vec(StoreBufferSize, Vec(CacheLineWords, Vec(DataBytes, UInt(8.W)))))
86  val mask = Reg(Vec(StoreBufferSize, Vec(CacheLineWords, Vec(DataBytes, Bool()))))
87
88  // 2 cycle data / mask update
89  for(i <- 0 until EnsbufferWidth) {
90    val req = io.writeReq(i)
91    for(line <- 0 until StoreBufferSize){
92      val sbuffer_in_s1_line_wen = req.valid && req.bits.wvec(line)
93      val sbuffer_in_s2_line_wen = RegNext(sbuffer_in_s1_line_wen)
94      val line_write_buffer_data = RegEnable(req.bits.data, sbuffer_in_s1_line_wen)
95      val line_write_buffer_wline = RegEnable(req.bits.wline, sbuffer_in_s1_line_wen)
96      val line_write_buffer_mask = RegEnable(req.bits.mask, sbuffer_in_s1_line_wen)
97      val line_write_buffer_offset = RegEnable(req.bits.wordOffset(WordsWidth-1, 0), sbuffer_in_s1_line_wen)
98      sbuffer_in_s1_line_wen.suggestName("sbuffer_in_s1_line_wen_"+line)
99      sbuffer_in_s2_line_wen.suggestName("sbuffer_in_s2_line_wen_"+line)
100      line_write_buffer_data.suggestName("line_write_buffer_data_"+line)
101      line_write_buffer_wline.suggestName("line_write_buffer_wline_"+line)
102      line_write_buffer_mask.suggestName("line_write_buffer_mask_"+line)
103      line_write_buffer_offset.suggestName("line_write_buffer_offset_"+line)
104      for(word <- 0 until CacheLineWords){
105        for(byte <- 0 until DataBytes){
106          val write_byte = sbuffer_in_s2_line_wen && (
107            line_write_buffer_mask(byte) && (line_write_buffer_offset === word.U) ||
108            line_write_buffer_wline
109          )
110          when(write_byte){
111            data(line)(word)(byte) := line_write_buffer_data(byte*8+7, byte*8)
112            mask(line)(word)(byte) := true.B
113          }
114        }
115      }
116    }
117  }
118
119  // 1 cycle line mask clean
120  for(i <- 0 until EnsbufferWidth) {
121    val req = io.writeReq(i)
122    when(req.valid){
123      for(line <- 0 until StoreBufferSize){
124        when(
125          req.bits.wvec(line) &&
126          req.bits.cleanMask
127        ){
128          for(word <- 0 until CacheLineWords){
129            for(byte <- 0 until DataBytes){
130              mask(line)(word)(byte) := false.B
131              val debug_last_cycle_write_byte = RegNext(req.valid && req.bits.wvec(line) && (
132                req.bits.mask(byte) && (req.bits.wordOffset(WordsWidth-1, 0) === word.U) ||
133                req.bits.wline
134              ))
135              assert(!debug_last_cycle_write_byte)
136            }
137          }
138        }
139      }
140    }
141  }
142
143  io.dataOut := data
144  io.maskOut := mask
145}
146
147class Sbuffer(implicit p: Parameters) extends DCacheModule with HasSbufferConst with HasPerfEvents {
148  val io = IO(new Bundle() {
149    val hartId = Input(UInt(8.W))
150    val in = Vec(EnsbufferWidth, Flipped(Decoupled(new DCacheWordReqWithVaddr)))
151    val dcache = Flipped(new DCacheToSbufferIO)
152    val forward = Vec(LoadPipelineWidth, Flipped(new LoadForwardQueryIO))
153    val sqempty = Input(Bool())
154    val flush = Flipped(new SbufferFlushBundle)
155    val csrCtrl = Flipped(new CustomCSRCtrlIO)
156  })
157
158  val dataModule = Module(new SbufferData)
159  dataModule.io.writeReq <> DontCare
160  val writeReq = dataModule.io.writeReq
161
162  val ptag = Reg(Vec(StoreBufferSize, UInt(PTagWidth.W)))
163  val vtag = Reg(Vec(StoreBufferSize, UInt(VTagWidth.W)))
164  val debug_mask = Reg(Vec(StoreBufferSize, Vec(CacheLineWords, Vec(DataBytes, Bool()))))
165  val waitInflightMask = Reg(Vec(StoreBufferSize, UInt(StoreBufferSize.W)))
166  val data = dataModule.io.dataOut
167  val mask = dataModule.io.maskOut
168  val stateVec = RegInit(VecInit(Seq.fill(StoreBufferSize)(0.U.asTypeOf(new SbufferEntryState))))
169  val cohCount = RegInit(VecInit(Seq.fill(StoreBufferSize)(0.U(EvictCountBits.W))))
170  val missqReplayCount = RegInit(VecInit(Seq.fill(StoreBufferSize)(0.U(MissqReplayCountBits.W))))
171
172  val sbuffer_out_s0_fire = Wire(Bool())
173
174  /*
175       idle --[flush]   --> drain   --[buf empty]--> idle
176            --[buf full]--> replace --[dcache resp]--> idle
177  */
178  // x_drain_all: drain store queue and sbuffer
179  // x_drain_sbuffer: drain sbuffer only, block store queue to sbuffer write
180  val x_idle :: x_replace :: x_drain_all :: x_drain_sbuffer :: Nil = Enum(4)
181  def needDrain(state: UInt): Bool =
182    state(1)
183  val sbuffer_state = RegInit(x_idle)
184
185  // ---------------------- Store Enq Sbuffer ---------------------
186
187  def getPTag(pa: UInt): UInt =
188    pa(PAddrBits - 1, PAddrBits - PTagWidth)
189
190  def getVTag(va: UInt): UInt =
191    va(VAddrBits - 1, VAddrBits - VTagWidth)
192
193  def getWord(pa: UInt): UInt =
194    pa(PAddrBits-1, 3)
195
196  def getWordOffset(pa: UInt): UInt =
197    pa(OffsetWidth-1, 3)
198
199  def getAddr(ptag: UInt): UInt =
200    Cat(ptag, 0.U((PAddrBits - PTagWidth).W))
201
202  def getByteOffset(offect: UInt): UInt =
203    Cat(offect(OffsetWidth - 1, 3), 0.U(3.W))
204
205  def isOneOf(key: UInt, seq: Seq[UInt]): Bool =
206    if(seq.isEmpty) false.B else Cat(seq.map(_===key)).orR()
207
208  def widthMap[T <: Data](f: Int => T) = (0 until StoreBufferSize) map f
209
210  // sbuffer entry count
211
212  val plru = new PseudoLRU(StoreBufferSize)
213  val accessIdx = Wire(Vec(EnsbufferWidth + 1, Valid(UInt(SbufferIndexWidth.W))))
214
215  val replaceIdx = plru.way
216  plru.access(accessIdx)
217
218  //-------------------------cohCount-----------------------------
219  // insert and merge: cohCount=0
220  // every cycle cohCount+=1
221  // if cohCount(EvictCountBits-1)==1, evict
222  val cohTimeOutMask = VecInit(widthMap(i => cohCount(i)(EvictCountBits - 1) && stateVec(i).isActive()))
223  val (cohTimeOutIdx, cohHasTimeOut) = PriorityEncoderWithFlag(cohTimeOutMask)
224  val missqReplayTimeOutMask = VecInit(widthMap(i => missqReplayCount(i)(MissqReplayCountBits - 1) && stateVec(i).w_timeout))
225  val (missqReplayTimeOutIdx, missqReplayMayHasTimeOut) = PriorityEncoderWithFlag(missqReplayTimeOutMask)
226  val missqReplayHasTimeOut = RegNext(missqReplayMayHasTimeOut) && !RegNext(sbuffer_out_s0_fire)
227  val missqReplayTimeOutIdxReg = RegEnable(missqReplayTimeOutIdx, missqReplayMayHasTimeOut)
228
229  //-------------------------sbuffer enqueue-----------------------------
230
231  // Now sbuffer enq logic is divided into 3 stages:
232
233  // sbuffer_in_s0:
234  // * read data and meta from store queue
235  // * store them in 2 entry fifo queue
236
237  // sbuffer_in_s1:
238  // * read data and meta from fifo queue
239  // * update sbuffer meta (vtag, ptag, flag)
240  // * prevert that line from being sent to dcache (add a block condition)
241  // * prepare cacheline level write enable signal, RegNext() data and mask
242
243  // sbuffer_in_s2:
244  // * use cacheline level buffer to update sbuffer data and mask
245  // * remove dcache write block (if there is)
246
247  val activeMask = VecInit(stateVec.map(s => s.isActive()))
248  val drainIdx = PriorityEncoder(activeMask)
249
250  val inflightMask = VecInit(stateVec.map(s => s.isInflight()))
251
252  val inptags = io.in.map(in => getPTag(in.bits.addr))
253  val invtags = io.in.map(in => getVTag(in.bits.vaddr))
254  val sameTag = Seq.tabulate(io.in.length)(x => Seq.tabulate(io.in.length)(y => inptags(x) === inptags(y)))
255  val words = (0 until EnsbufferWidth).map(i => getWord(io.in(i).bits.addr))
256  val sameWord = Seq.tabulate(EnsbufferWidth)(x => Seq.tabulate(EnsbufferWidth)(y => words(x) === words(y)))
257
258  // merge condition
259  val mergeMask = Wire(Vec(EnsbufferWidth, Vec(StoreBufferSize, Bool())))
260  val mergeIdx = mergeMask.map(PriorityEncoder(_)) // avoid using mergeIdx for better timing
261  val canMerge = mergeMask.map(ParallelOR(_))
262  val mergeVec = mergeMask.map(_.asUInt)
263
264  for(i <- 0 until EnsbufferWidth){
265    mergeMask(i) := widthMap(j =>
266      inptags(i) === ptag(j) && activeMask(j)
267    )
268    assert(!(PopCount(mergeMask(i).asUInt) > 1.U && io.in(i).fire()))
269  }
270
271  // insert condition
272  // firstInsert: the first invalid entry
273  // if first entry canMerge or second entry has the same ptag with the first entry,
274  // secondInsert equal the first invalid entry, otherwise, the second invalid entry
275  val invalidMask = VecInit(stateVec.map(s => s.isInvalid()))
276  val remInvalidMask = GetRemBits(EnsbufferWidth)(invalidMask.asUInt)
277
278  def getFirstOneOH(input: UInt): UInt = {
279    assert(input.getWidth > 1)
280    val output = WireInit(VecInit(input.asBools))
281    (1 until input.getWidth).map(i => {
282      output(i) := !input(i - 1, 0).orR && input(i)
283    })
284    output.asUInt
285  }
286
287  val remRawInsertVec = remInvalidMask.map(getFirstOneOH(_))
288  val remRawInsert = remInvalidMask.map(PriorityEncoderWithFlag(_)).unzip
289  val (remRawInsertIdx, remCanInsert) = (remRawInsert._1, VecInit(remRawInsert._2))
290  val remInsertIdx = VecInit(remRawInsertIdx.zipWithIndex.map { case (raw, idx) =>
291    if (EnsbufferWidth > 1) Cat(raw, idx.U(log2Ceil(EnsbufferWidth).W))
292    else raw
293  }) // slow to generate, for debug only
294  val remInsertVec = VecInit(GetRemBits.reverse(EnsbufferWidth)(remRawInsertVec))
295
296  val enbufferSelReg = RegInit(0.U(log2Up(EnsbufferWidth).W))
297  if (EnsbufferWidth > 1) when(io.in(0).valid) {
298    enbufferSelReg := enbufferSelReg + 1.U
299  }
300
301  val insertIdxs = (0 until EnsbufferWidth).map(i =>
302    PriorityMuxDefault(if (i == 0) Seq(0.B -> 0.U) else (0 until i).map(j => sameTag(i)(j) -> remInsertIdx(enbufferSelReg + j.U)), remInsertIdx(enbufferSelReg + i.U))
303  ) // slow to generate, for debug only
304  val insertVecs = (0 until EnsbufferWidth).map(i =>
305    PriorityMuxDefault(if (i == 0) Seq(0.B -> 0.U) else (0 until i).map(j => sameTag(i)(j) -> remInsertVec(enbufferSelReg + j.U)), remInsertVec(enbufferSelReg + i.U))
306  ) // slow to generate, for debug only
307  val canInserts = (0 until EnsbufferWidth).map(i =>
308    PriorityMuxDefault(if (i == 0) Seq(0.B -> 0.B) else (0 until i).map(j => sameTag(i)(j) -> remCanInsert(enbufferSelReg + j.U)), remCanInsert(enbufferSelReg + i.U))
309  ).map(_ && sbuffer_state =/= x_drain_sbuffer)
310  val forward_need_uarch_drain = WireInit(false.B)
311  val merge_need_uarch_drain = WireInit(false.B)
312  val do_uarch_drain = RegNext(forward_need_uarch_drain) || RegNext(RegNext(merge_need_uarch_drain))
313  XSPerfAccumulate("do_uarch_drain", do_uarch_drain)
314
315  (0 until EnsbufferWidth).foreach(i =>
316    io.in(i).ready := canInserts(i) && (if (i == 0) 1.B else !sameWord(0)(i) && io.in(i - 1).ready)
317  )
318
319  def wordReqToBufLine( // allocate a new line in sbuffer
320    req: DCacheWordReq,
321    reqptag: UInt,
322    reqvtag: UInt,
323    insertIdx: UInt,
324    insertVec: UInt,
325    wordOffset: UInt,
326    flushMask: Bool
327  ): Unit = {
328    assert(UIntToOH(insertIdx) === insertVec)
329    val sameBlockInflightMask = genSameBlockInflightMask(reqptag)
330    (0 until StoreBufferSize).map(entryIdx => {
331      when(insertVec(entryIdx)){
332        stateVec(entryIdx).state_valid := true.B
333        stateVec(entryIdx).w_sameblock_inflight := sameBlockInflightMask.orR // set w_sameblock_inflight when a line is first allocated
334        when(sameBlockInflightMask.orR){
335          waitInflightMask(entryIdx) := sameBlockInflightMask
336        }
337        cohCount(entryIdx) := 0.U
338        // missqReplayCount(insertIdx) := 0.U
339        ptag(entryIdx) := reqptag
340        vtag(entryIdx) := reqvtag // update vtag iff a new sbuffer line is allocated
341      }
342    })
343  }
344
345  def mergeWordReq( // merge write req into an existing line
346    req: DCacheWordReq,
347    reqptag: UInt,
348    reqvtag: UInt,
349    mergeIdx: UInt,
350    mergeVec: UInt,
351    wordOffset: UInt
352  ): Unit = {
353    assert(UIntToOH(mergeIdx) === mergeVec)
354    (0 until StoreBufferSize).map(entryIdx => {
355      when(mergeVec(entryIdx)) {
356        cohCount(entryIdx) := 0.U
357        // missqReplayCount(entryIdx) := 0.U
358        // check if vtag is the same, if not, trigger sbuffer flush
359        when(reqvtag =/= vtag(entryIdx)) {
360          XSDebug("reqvtag =/= sbufvtag req(vtag %x ptag %x) sbuffer(vtag %x ptag %x)\n",
361            reqvtag << OffsetWidth,
362            reqptag << OffsetWidth,
363            vtag(entryIdx) << OffsetWidth,
364            ptag(entryIdx) << OffsetWidth
365          )
366          merge_need_uarch_drain := true.B
367        }
368      }
369    })
370  }
371
372  for(((in, wordOffset), i) <- io.in.zip(words).zipWithIndex){
373    writeReq(i).valid := in.fire()
374    writeReq(i).bits.wordOffset := wordOffset
375    writeReq(i).bits.mask := in.bits.mask
376    writeReq(i).bits.data := in.bits.data
377    writeReq(i).bits.wline := in.bits.wline
378    writeReq(i).bits.cleanMask := false.B
379    val debug_insertIdx = if(i == 0) firstInsertIdx else secondInsertIdx
380    val insertVec = if(i == 0) firstInsertVec else secondInsertVec
381    assert(!((PopCount(insertVec) > 1.U) && in.fire()))
382    val insertIdx = OHToUInt(insertVec)
383    val flushMask = if(i == 0) true.B else (0 until i).map(j => !sameTag(i)(j)).reduce(_ && _)
384    flushMask.suggestName(s"flushMask_${i}")
385    accessIdx(i).valid := RegNext(in.fire())
386    accessIdx(i).bits := RegNext(Mux(canMerge(i), mergeIdx(i), insertIdx))
387    when(in.fire()){
388      when(canMerge(i)){
389        writeReq(i).bits.wvec := mergeVec(i)
390        mergeWordReq(in.bits, inptags(i), invtags(i), mergeIdx(i), mergeVec(i), wordOffset)
391        XSDebug(p"merge req $i to line [${mergeIdx(i)}]\n")
392      }.otherwise({
393        writeReq(i).bits.wvec := insertVec
394        writeReq(i).bits.cleanMask := flushMask
395        wordReqToBufLine(in.bits, inptags(i), invtags(i), insertIdx, insertVec, wordOffset, flushMask)
396        XSDebug(p"insert req $i to line[$insertIdx]\n")
397        assert(debug_insertIdx === insertIdx)
398      })
399    }
400  }
401
402
403  for(i <- 0 until StoreBufferSize){
404    XSDebug(stateVec(i).isValid(),
405      p"[$i] timeout:${cohCount(i)(EvictCountBits-1)} state:${stateVec(i)}\n"
406    )
407  }
408
409  for((req, i) <- io.in.zipWithIndex){
410    XSDebug(req.fire(),
411      p"accept req [$i]: " +
412        p"addr:${Hexadecimal(req.bits.addr)} " +
413        p"mask:${Binary(req.bits.mask)} " +
414        p"data:${Hexadecimal(req.bits.data)}\n"
415    )
416    XSDebug(req.valid && !req.ready,
417      p"req [$i] blocked by sbuffer\n"
418    )
419  }
420
421  // ---------------------- Send Dcache Req ---------------------
422
423  val sbuffer_empty = Cat(invalidMask).andR()
424  val sq_empty = !Cat(io.in.map(_.valid)).orR()
425  val empty = sbuffer_empty && sq_empty
426  val threshold = RegNext(io.csrCtrl.sbuffer_threshold +& 1.U)
427  val validCount = PopCount(activeMask)
428  val do_eviction = RegNext(validCount >= threshold || validCount === (StoreBufferSize-1).U, init = false.B)
429  require((StoreBufferThreshold + 1) <= StoreBufferSize)
430
431  XSDebug(p"validCount[$validCount]\n")
432
433  io.flush.empty := RegNext(empty && io.sqempty)
434  // lru.io.flush := sbuffer_state === x_drain_all && empty
435  switch(sbuffer_state){
436    is(x_idle){
437      when(io.flush.valid){
438        sbuffer_state := x_drain_all
439      }.elsewhen(do_uarch_drain){
440        sbuffer_state := x_drain_sbuffer
441      }.elsewhen(do_eviction){
442        sbuffer_state := x_replace
443      }
444    }
445    is(x_drain_all){
446      when(empty){
447        sbuffer_state := x_idle
448      }
449    }
450    is(x_drain_sbuffer){
451      when(io.flush.valid){
452        sbuffer_state := x_drain_all
453      }.elsewhen(sbuffer_empty){
454        sbuffer_state := x_idle
455      }
456    }
457    is(x_replace){
458      when(io.flush.valid){
459        sbuffer_state := x_drain_all
460      }.elsewhen(do_uarch_drain){
461        sbuffer_state := x_drain_sbuffer
462      }.elsewhen(!do_eviction){
463        sbuffer_state := x_idle
464      }
465    }
466  }
467  XSDebug(p"sbuffer state:${sbuffer_state} do eviction:${do_eviction} empty:${empty}\n")
468
469  def noSameBlockInflight(idx: UInt): Bool = {
470    // stateVec(idx) itself must not be s_inflight
471    !Cat(widthMap(i => inflightMask(i) && ptag(idx) === ptag(i))).orR()
472  }
473
474  def genSameBlockInflightMask(ptag_in: UInt): UInt = {
475    val mask = VecInit(widthMap(i => inflightMask(i) && ptag_in === ptag(i))).asUInt // quite slow, use it with care
476    assert(!(PopCount(mask) > 1.U))
477    mask
478  }
479
480  def haveSameBlockInflight(ptag_in: UInt): Bool = {
481    genSameBlockInflightMask(ptag_in).orR
482  }
483
484  // ---------------------------------------------------------------------------
485  // sbuffer to dcache pipeline
486  // ---------------------------------------------------------------------------
487
488  // Now sbuffer deq logic is divided into 2 stages:
489
490  // sbuffer_out_s0:
491  // * read data and meta from sbuffer
492  // * RegNext() them
493  // * set line state to inflight
494
495  // sbuffer_out_s1:
496  // * send write req to dcache
497
498  // sbuffer_out_extra:
499  // * receive write result from dcache
500  // * update line state
501
502  val sbuffer_out_s1_ready = Wire(Bool())
503
504  // ---------------------------------------------------------------------------
505  // sbuffer_out_s0
506  // ---------------------------------------------------------------------------
507
508  val need_drain = needDrain(sbuffer_state)
509  val need_replace = do_eviction || (sbuffer_state === x_replace)
510  val sbuffer_out_s0_evictionIdx = Mux(missqReplayHasTimeOut,
511    missqReplayTimeOutIdxReg,
512    Mux(need_drain,
513      drainIdx,
514      Mux(cohHasTimeOut, cohTimeOutIdx, replaceIdx)
515    )
516  )
517
518  // If there is a inflight dcache req which has same ptag with sbuffer_out_s0_evictionIdx's ptag,
519  // current eviction should be blocked.
520  val sbuffer_out_s0_valid = missqReplayHasTimeOut ||
521    stateVec(sbuffer_out_s0_evictionIdx).isDcacheReqCandidate() &&
522    (need_drain || cohHasTimeOut || need_replace)
523  assert(!(
524    stateVec(sbuffer_out_s0_evictionIdx).isDcacheReqCandidate &&
525    !noSameBlockInflight(sbuffer_out_s0_evictionIdx)
526  ))
527  val sbuffer_out_s0_cango = sbuffer_out_s1_ready
528  sbuffer_out_s0_fire := sbuffer_out_s0_valid && sbuffer_out_s0_cango
529
530  // ---------------------------------------------------------------------------
531  // sbuffer_out_s1
532  // ---------------------------------------------------------------------------
533
534  // TODO: use EnsbufferWidth
535  val shouldWaitWriteFinish = VecInit((0 until StorePipelineWidth).map{i =>
536    (RegNext(writeReq(i).bits.wvec).asUInt & UIntToOH(RegNext(sbuffer_out_s0_evictionIdx))).asUInt.orR &&
537    RegNext(writeReq(i).valid)
538  }).asUInt.orR
539  // block dcache write if read / write hazard
540  val blockDcacheWrite = shouldWaitWriteFinish
541
542  val sbuffer_out_s1_valid = RegInit(false.B)
543  sbuffer_out_s1_ready := io.dcache.req.ready && !blockDcacheWrite || !sbuffer_out_s1_valid
544  val sbuffer_out_s1_fire = io.dcache.req.fire()
545
546  // when sbuffer_out_s1_fire, send dcache req stored in pipeline reg to dcache
547  when(sbuffer_out_s1_fire){
548    sbuffer_out_s1_valid := false.B
549  }
550  // when sbuffer_out_s0_fire, read dcache req data and store them in a pipeline reg
551  when(sbuffer_out_s0_cango){
552    sbuffer_out_s1_valid := sbuffer_out_s0_valid
553  }
554  when(sbuffer_out_s0_fire){
555    stateVec(sbuffer_out_s0_evictionIdx).state_inflight := true.B
556    stateVec(sbuffer_out_s0_evictionIdx).w_timeout := false.B
557    // stateVec(sbuffer_out_s0_evictionIdx).s_pipe_req := true.B
558    XSDebug(p"$sbuffer_out_s0_evictionIdx will be sent to Dcache\n")
559  }
560
561  XSDebug(p"need drain:$need_drain cohHasTimeOut: $cohHasTimeOut need replace:$need_replace\n")
562  XSDebug(p"drainIdx:$drainIdx tIdx:$cohTimeOutIdx replIdx:$replaceIdx " +
563    p"blocked:${!noSameBlockInflight(sbuffer_out_s0_evictionIdx)} v:${activeMask(sbuffer_out_s0_evictionIdx)}\n")
564  XSDebug(p"sbuffer_out_s0_valid:$sbuffer_out_s0_valid evictIdx:$sbuffer_out_s0_evictionIdx dcache ready:${io.dcache.req.ready}\n")
565  // Note: if other dcache req in the same block are inflight,
566  // the lru update may not accurate
567  accessIdx(EnsbufferWidth).valid := invalidMask(replaceIdx) || (
568    need_replace && !need_drain && !cohHasTimeOut && !missqReplayHasTimeOut && sbuffer_out_s0_cango && activeMask(replaceIdx))
569  accessIdx(EnsbufferWidth).bits := replaceIdx
570  val sbuffer_out_s1_evictionIdx = RegEnable(sbuffer_out_s0_evictionIdx, enable = sbuffer_out_s0_fire)
571  val sbuffer_out_s1_evictionPTag = RegEnable(ptag(sbuffer_out_s0_evictionIdx), enable = sbuffer_out_s0_fire)
572  val sbuffer_out_s1_evictionVTag = RegEnable(vtag(sbuffer_out_s0_evictionIdx), enable = sbuffer_out_s0_fire)
573
574  io.dcache.req.valid := sbuffer_out_s1_valid && !blockDcacheWrite
575  io.dcache.req.bits := DontCare
576  io.dcache.req.bits.cmd   := MemoryOpConstants.M_XWR
577  io.dcache.req.bits.addr  := getAddr(sbuffer_out_s1_evictionPTag)
578  io.dcache.req.bits.vaddr := getAddr(sbuffer_out_s1_evictionVTag)
579  io.dcache.req.bits.data  := data(sbuffer_out_s1_evictionIdx).asUInt
580  io.dcache.req.bits.mask  := mask(sbuffer_out_s1_evictionIdx).asUInt
581  io.dcache.req.bits.id := sbuffer_out_s1_evictionIdx
582
583  when (sbuffer_out_s1_fire) {
584    assert(!(io.dcache.req.bits.vaddr === 0.U))
585    assert(!(io.dcache.req.bits.addr === 0.U))
586  }
587
588  XSDebug(sbuffer_out_s1_fire,
589    p"send buf [$sbuffer_out_s1_evictionIdx] to Dcache, req fire\n"
590  )
591
592  // update sbuffer status according to dcache resp source
593
594  def id_to_sbuffer_id(id: UInt): UInt = {
595    require(id.getWidth >= log2Up(StoreBufferSize))
596    id(log2Up(StoreBufferSize)-1, 0)
597  }
598
599  // hit resp
600  io.dcache.hit_resps.map(resp => {
601    val dcache_resp_id = resp.bits.id
602    when (resp.fire()) {
603      stateVec(dcache_resp_id).state_inflight := false.B
604      stateVec(dcache_resp_id).state_valid := false.B
605      assert(!resp.bits.replay)
606      assert(!resp.bits.miss) // not need to resp if miss, to be opted
607      assert(stateVec(dcache_resp_id).state_inflight === true.B)
608    }
609
610    // Update w_sameblock_inflight flag is delayed for 1 cycle
611    //
612    // When a new req allocate a new line in sbuffer, sameblock_inflight check will ignore
613    // current dcache.hit_resps. Then, in the next cycle, we have plenty of time to check
614    // if the same block is still inflight
615    (0 until StoreBufferSize).map(i => {
616      when(
617        stateVec(i).w_sameblock_inflight &&
618        stateVec(i).state_valid &&
619        RegNext(resp.fire()) &&
620        waitInflightMask(i) === UIntToOH(RegNext(id_to_sbuffer_id(dcache_resp_id)))
621      ){
622        stateVec(i).w_sameblock_inflight := false.B
623      }
624    })
625  })
626
627
628  // replay resp
629  val replay_resp_id = io.dcache.replay_resp.bits.id
630  when (io.dcache.replay_resp.fire()) {
631    missqReplayCount(replay_resp_id) := 0.U
632    stateVec(replay_resp_id).w_timeout := true.B
633    // waiting for timeout
634    assert(io.dcache.replay_resp.bits.replay)
635    assert(stateVec(replay_resp_id).state_inflight === true.B)
636  }
637
638  // TODO: reuse cohCount
639  (0 until StoreBufferSize).map(i => {
640    when(stateVec(i).w_timeout && stateVec(i).state_inflight && !missqReplayCount(i)(MissqReplayCountBits-1)) {
641      missqReplayCount(i) := missqReplayCount(i) + 1.U
642    }
643    when(activeMask(i) && !cohTimeOutMask(i)){
644      cohCount(i) := cohCount(i)+1.U
645    }
646  })
647
648  if (env.EnableDifftest) {
649    // hit resp
650    io.dcache.hit_resps.zipWithIndex.map{case (resp, index) => {
651      val difftest = Module(new DifftestSbufferEvent)
652      val dcache_resp_id = resp.bits.id
653      difftest.io.clock := clock
654      difftest.io.coreid := io.hartId
655      difftest.io.index := index.U
656      difftest.io.sbufferResp := RegNext(resp.fire())
657      difftest.io.sbufferAddr := RegNext(getAddr(ptag(dcache_resp_id)))
658      difftest.io.sbufferData := RegNext(data(dcache_resp_id).asTypeOf(Vec(CacheLineBytes, UInt(8.W))))
659      difftest.io.sbufferMask := RegNext(mask(dcache_resp_id).asUInt)
660    }}
661  }
662
663  // ---------------------- Load Data Forward ---------------------
664  val mismatch = Wire(Vec(LoadPipelineWidth, Bool()))
665  XSPerfAccumulate("vaddr_match_failed", mismatch.reduce(_ || _))
666  for ((forward, i) <- io.forward.zipWithIndex) {
667    val vtag_matches = VecInit(widthMap(w => vtag(w) === getVTag(forward.vaddr)))
668    val ptag_matches = VecInit(widthMap(w => ptag(w) === getPTag(forward.paddr)))
669    val tag_matches = vtag_matches
670    val tag_mismatch = RegNext(forward.valid) && VecInit(widthMap(w =>
671      RegNext(vtag_matches(w)) =/= RegNext(ptag_matches(w)) && RegNext((activeMask(w) || inflightMask(w)))
672    )).asUInt.orR
673    mismatch(i) := tag_mismatch
674    when (tag_mismatch) {
675      XSDebug("forward tag mismatch: pmatch %x vmatch %x vaddr %x paddr %x\n",
676        RegNext(ptag_matches.asUInt),
677        RegNext(vtag_matches.asUInt),
678        RegNext(forward.vaddr),
679        RegNext(forward.paddr)
680      )
681      forward_need_uarch_drain := true.B
682    }
683    val valid_tag_matches = widthMap(w => tag_matches(w) && activeMask(w))
684    val inflight_tag_matches = widthMap(w => tag_matches(w) && inflightMask(w))
685    val line_offset_mask = UIntToOH(getWordOffset(forward.paddr))
686
687    val valid_tag_match_reg = valid_tag_matches.map(RegNext(_))
688    val inflight_tag_match_reg = inflight_tag_matches.map(RegNext(_))
689    val line_offset_reg = RegNext(line_offset_mask)
690    val forward_mask_candidate_reg = RegEnable(
691      VecInit(mask.map(entry => entry(getWordOffset(forward.paddr)))),
692      forward.valid
693    )
694    val forward_data_candidate_reg = RegEnable(
695      VecInit(data.map(entry => entry(getWordOffset(forward.paddr)))),
696      forward.valid
697    )
698
699    val selectedValidMask = Mux1H(valid_tag_match_reg, forward_mask_candidate_reg)
700    val selectedValidData = Mux1H(valid_tag_match_reg, forward_data_candidate_reg)
701    selectedValidMask.suggestName("selectedValidMask_"+i)
702    selectedValidData.suggestName("selectedValidData_"+i)
703
704    val selectedInflightMask = Mux1H(inflight_tag_match_reg, forward_mask_candidate_reg)
705    val selectedInflightData = Mux1H(inflight_tag_match_reg, forward_data_candidate_reg)
706    selectedInflightMask.suggestName("selectedInflightMask_"+i)
707    selectedInflightData.suggestName("selectedInflightData_"+i)
708
709    // currently not being used
710    val selectedInflightMaskFast = Mux1H(line_offset_mask, Mux1H(inflight_tag_matches, mask).asTypeOf(Vec(CacheLineWords, Vec(DataBytes, Bool()))))
711    val selectedValidMaskFast = Mux1H(line_offset_mask, Mux1H(valid_tag_matches, mask).asTypeOf(Vec(CacheLineWords, Vec(DataBytes, Bool()))))
712
713    forward.dataInvalid := false.B // data in store line merge buffer is always ready
714    forward.matchInvalid := tag_mismatch // paddr / vaddr cam result does not match
715    for (j <- 0 until DataBytes) {
716      forward.forwardMask(j) := false.B
717      forward.forwardData(j) := DontCare
718
719      // valid entries have higher priority than inflight entries
720      when(selectedInflightMask(j)) {
721        forward.forwardMask(j) := true.B
722        forward.forwardData(j) := selectedInflightData(j)
723      }
724      when(selectedValidMask(j)) {
725        forward.forwardMask(j) := true.B
726        forward.forwardData(j) := selectedValidData(j)
727      }
728
729      forward.forwardMaskFast(j) := selectedInflightMaskFast(j) || selectedValidMaskFast(j)
730    }
731  }
732
733  for (i <- 0 until StoreBufferSize) {
734    XSDebug("sbf entry " + i + " : ptag %x vtag %x valid %x active %x inflight %x w_timeout %x\n",
735      ptag(i) << OffsetWidth,
736      vtag(i) << OffsetWidth,
737      stateVec(i).isValid(),
738      activeMask(i),
739      inflightMask(i),
740      stateVec(i).w_timeout
741    )
742  }
743
744  val perf_valid_entry_count = RegNext(PopCount(VecInit(stateVec.map(s => !s.isInvalid())).asUInt))
745  XSPerfHistogram("util", perf_valid_entry_count, true.B, 0, StoreBufferSize, 1)
746  XSPerfAccumulate("sbuffer_req_valid", PopCount(VecInit(io.in.map(_.valid)).asUInt))
747  XSPerfAccumulate("sbuffer_req_fire", PopCount(VecInit(io.in.map(_.fire())).asUInt))
748  XSPerfAccumulate("sbuffer_merge", PopCount(VecInit(io.in.zipWithIndex.map({case (in, i) => in.fire() && canMerge(i)})).asUInt))
749  XSPerfAccumulate("sbuffer_newline", PopCount(VecInit(io.in.zipWithIndex.map({case (in, i) => in.fire() && !canMerge(i)})).asUInt))
750  XSPerfAccumulate("dcache_req_valid", io.dcache.req.valid)
751  XSPerfAccumulate("dcache_req_fire", io.dcache.req.fire())
752  XSPerfAccumulate("sbuffer_idle", sbuffer_state === x_idle)
753  XSPerfAccumulate("sbuffer_flush", sbuffer_state === x_drain_sbuffer)
754  XSPerfAccumulate("sbuffer_replace", sbuffer_state === x_replace)
755  (0 until EnsbufferWidth).foreach(i => XSPerfAccumulate(s"canInserts_${i}", canInserts(i)))
756  XSPerfAccumulate("mainpipe_resp_valid", io.dcache.main_pipe_hit_resp.fire())
757  XSPerfAccumulate("refill_resp_valid", io.dcache.refill_hit_resp.fire())
758  XSPerfAccumulate("replay_resp_valid", io.dcache.replay_resp.fire())
759  XSPerfAccumulate("coh_timeout", cohHasTimeOut)
760
761  // val (store_latency_sample, store_latency) = TransactionLatencyCounter(io.lsu.req.fire(), io.lsu.resp.fire())
762  // XSPerfHistogram("store_latency", store_latency, store_latency_sample, 0, 100, 10)
763  // XSPerfAccumulate("store_req", io.lsu.req.fire())
764
765  val perfEvents = Seq(
766    ("sbuffer_req_valid ", PopCount(VecInit(io.in.map(_.valid)).asUInt)                                                                ),
767    ("sbuffer_req_fire  ", PopCount(VecInit(io.in.map(_.fire())).asUInt)                                                               ),
768    ("sbuffer_merge     ", PopCount(VecInit(io.in.zipWithIndex.map({case (in, i) => in.fire() && canMerge(i)})).asUInt)                ),
769    ("sbuffer_newline   ", PopCount(VecInit(io.in.zipWithIndex.map({case (in, i) => in.fire() && !canMerge(i)})).asUInt)               ),
770    ("dcache_req_valid  ", io.dcache.req.valid                                                                                         ),
771    ("dcache_req_fire   ", io.dcache.req.fire()                                                                                        ),
772    ("sbuffer_idle      ", sbuffer_state === x_idle                                                                                    ),
773    ("sbuffer_flush     ", sbuffer_state === x_drain_sbuffer                                                                           ),
774    ("sbuffer_replace   ", sbuffer_state === x_replace                                                                                 ),
775    ("mpipe_resp_valid  ", io.dcache.main_pipe_hit_resp.fire()                                                                         ),
776    ("refill_resp_valid ", io.dcache.refill_hit_resp.fire()                                                                            ),
777    ("replay_resp_valid ", io.dcache.replay_resp.fire()                                                                                ),
778    ("coh_timeout       ", cohHasTimeOut                                                                                               ),
779    ("sbuffer_1_4_valid ", (perf_valid_entry_count < (StoreBufferSize.U/4.U))                                                          ),
780    ("sbuffer_2_4_valid ", (perf_valid_entry_count > (StoreBufferSize.U/4.U)) & (perf_valid_entry_count <= (StoreBufferSize.U/2.U))    ),
781    ("sbuffer_3_4_valid ", (perf_valid_entry_count > (StoreBufferSize.U/2.U)) & (perf_valid_entry_count <= (StoreBufferSize.U*3.U/4.U))),
782    ("sbuffer_full_valid", (perf_valid_entry_count > (StoreBufferSize.U*3.U/4.U)))
783  )
784  generatePerfEvent()
785
786}
787