xref: /XiangShan/src/main/scala/xiangshan/mem/sbuffer/FakeSbuffer.scala (revision 708ceed4afe43fb0ea3a52407e46b2794c573634)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils.{XSDebug, XSInfo}
23import xiangshan._
24import xiangshan.cache.{DCacheLineIO, DCacheWordReq, MemoryOpConstants}
25
26// Fake Store buffer for XiangShan Out of Order LSU
27class FakeSbuffer(implicit p: Parameters) extends XSModule {
28  val io = IO(new Bundle() {
29    val in = Vec(StorePipelineWidth, Flipped(Decoupled(new DCacheWordReq)))
30    val dcache = new DCacheLineIO
31    val forward = Vec(LoadPipelineWidth, Flipped(new LoadForwardQueryIO))
32  })
33
34  assert(!(io.in(1).valid && !io.in(0).valid))
35
36  // assign default values to signals
37  io.in(1).ready := false.B
38
39  io.dcache.req.valid := false.B
40  io.dcache.req.bits := DontCare
41  io.dcache.resp.ready := false.B
42
43  val s_invalid :: s_req :: s_resp :: Nil = Enum(3)
44
45  val state = RegInit(s_invalid)
46
47  val req = Reg(new DCacheWordReq)
48
49  XSDebug("state: %d\n", state)
50
51  io.in(0).ready := state === s_invalid
52
53  def word_addr(addr: UInt) = (addr >> 3) << 3
54  def block_addr(addr: UInt) = (addr >> 6) << 6
55
56  // --------------------------------------------
57  // s_invalid: receive requests
58  when (state === s_invalid) {
59    when (io.in(0).fire()) {
60      req   := io.in(0).bits
61      state := s_req
62    }
63  }
64
65  val wdataVec = WireInit(VecInit(Seq.fill(8)(0.U(64.W))))
66  val wmaskVec = WireInit(VecInit(Seq.fill(8)(0.U(8.W))))
67  wdataVec(req.addr(5,3)) := req.data
68  wmaskVec(req.addr(5,3)) := req.mask
69
70  when (state === s_req) {
71    val dcache_req = io.dcache.req
72    dcache_req.valid := true.B
73    dcache_req.bits.cmd  := MemoryOpConstants.M_XWR
74    dcache_req.bits.addr := block_addr(req.addr)
75    dcache_req.bits.data := wdataVec.asUInt
76    dcache_req.bits.mask := wmaskVec.asUInt
77    dcache_req.bits.id   := DontCare
78
79    when (dcache_req.fire()) {
80      state := s_resp
81    }
82  }
83
84  when (state === s_resp) {
85    io.dcache.resp.ready := true.B
86    when (io.dcache.resp.fire()) {
87      state := s_invalid
88    }
89  }
90
91  // do forwarding here
92  for (i <- 0 until LoadPipelineWidth) {
93    val addr_match = word_addr(io.forward(i).paddr) === word_addr(req.addr)
94    val mask = io.forward(i).mask & req.mask(7, 0)
95    val mask_match = mask =/= 0.U
96    val need_forward = state =/= s_invalid && addr_match && mask_match
97
98    io.forward(i).forwardMask := Mux(need_forward, VecInit(mask.asBools),
99      VecInit(0.U(8.W).asBools))
100    io.forward(i).forwardData := VecInit((0 until 8) map {i => req.data((i + 1) * 8 - 1, i * 8)})
101  }
102
103  XSInfo(io.in(0).fire(), "ensbuffer addr 0x%x wdata 0x%x mask %b\n", io.in(0).bits.addr, io.in(0).bits.data, io.in(0).bits.mask)
104  XSInfo(io.in(1).fire(), "ensbuffer addr 0x%x wdata 0x%x mask %b\n", io.in(1).bits.addr, io.in(1).bits.data, io.in(0).bits.mask)
105  XSInfo(io.dcache.req.fire(), "desbuffer addr 0x%x wdata 0x%x mask %b\n", io.dcache.req.bits.addr, io.dcache.req.bits.data, io.dcache.req.bits.mask)
106}
107