xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LSQWrapper.scala (revision e283bb5455fe7466a8ad2c9faf7a5b8843fd9b3d)
1package xiangshan.mem
2
3import chisel3._
4import chisel3.util._
5import utils._
6import xiangshan._
7import xiangshan.cache._
8import xiangshan.cache.{DCacheWordIO, DCacheLineIO, TlbRequestIO, MemoryOpConstants}
9import xiangshan.backend.LSUOpType
10import xiangshan.mem._
11import xiangshan.backend.roq.RoqPtr
12
13class ExceptionAddrIO extends XSBundle {
14  val lsIdx = Input(new LSIdx)
15  val isStore = Input(Bool())
16  val vaddr = Output(UInt(VAddrBits.W))
17}
18
19
20class LsqEntry extends XSBundle {
21  val vaddr = UInt(VAddrBits.W) // TODO: need opt
22  val paddr = UInt(PAddrBits.W)
23  val mask = UInt(8.W)
24  val data = UInt(XLEN.W)
25  val exception = UInt(16.W) // TODO: opt size
26  val fwdMask = Vec(8, Bool())
27  val fwdData = Vec(8, UInt(8.W))
28}
29
30class FwdEntry extends XSBundle {
31  val mask = Vec(8, Bool())
32  val data = Vec(8, UInt(8.W))
33}
34
35
36class LSQueueData(size: Int, nchannel: Int) extends XSModule with HasDCacheParameters with HasCircularQueuePtrHelper {
37  val io = IO(new Bundle() {
38    val wb = Vec(nchannel, new Bundle() {
39      val wen = Input(Bool())
40      val index = Input(UInt(log2Up(size).W))
41      val wdata = Input(new LsqEntry)
42    })
43    val uncache = new Bundle() {
44      val wen = Input(Bool())
45      val index = Input(UInt(log2Up(size).W))
46      val wdata = Input(UInt(XLEN.W))
47    }
48    val refill = new Bundle() {
49      val wen = Input(Vec(size, Bool()))
50      val dcache = Input(new DCacheLineResp)
51    }
52    val needForward = Input(Vec(nchannel, Vec(2, UInt(size.W))))
53    val forward = Vec(nchannel, Flipped(new LoadForwardQueryIO))
54    val rdata = Output(Vec(size, new LsqEntry))
55
56    // val debug = new Bundle() {
57    //   val debug_data = Vec(LoadQueueSize, new LsqEntry)
58    // }
59
60    def wbWrite(channel: Int, index: UInt, wdata: LsqEntry): Unit = {
61      require(channel < nchannel && channel >= 0)
62      // need extra "this.wb(channel).wen := true.B"
63      this.wb(channel).index := index
64      this.wb(channel).wdata := wdata
65    }
66
67    def uncacheWrite(index: UInt, wdata: UInt): Unit = {
68      // need extra "this.uncache.wen := true.B"
69      this.uncache.index := index
70      this.uncache.wdata := wdata
71    }
72
73    def forwardQuery(channel: Int, paddr: UInt, needForward1: Data, needForward2: Data): Unit = {
74      this.needForward(channel)(0) := needForward1
75      this.needForward(channel)(1) := needForward2
76      this.forward(channel).paddr := paddr
77    }
78
79    // def refillWrite(ldIdx: Int): Unit = {
80    // }
81    // use "this.refill.wen(ldIdx) := true.B" instead
82  })
83
84  io := DontCare
85
86  val data = Reg(Vec(size, new LsqEntry))
87
88  // writeback to lq/sq
89  (0 until 2).map(i => {
90    when(io.wb(i).wen){
91      data(io.wb(i).index) := io.wb(i).wdata
92    }
93  })
94
95  when(io.uncache.wen){
96    data(io.uncache.index).data := io.uncache.wdata
97  }
98
99  // refill missed load
100  def mergeRefillData(refill: UInt, fwd: UInt, fwdMask: UInt): UInt = {
101    val res = Wire(Vec(8, UInt(8.W)))
102    (0 until 8).foreach(i => {
103      res(i) := Mux(fwdMask(i), fwd(8 * (i + 1) - 1, 8 * i), refill(8 * (i + 1) - 1, 8 * i))
104    })
105    res.asUInt
106  }
107
108  // split dcache result into words
109  val words = VecInit((0 until blockWords) map { i =>
110    io.refill.dcache.data(DataBits * (i + 1) - 1, DataBits * i)
111  })
112
113
114  (0 until size).map(i => {
115    when(io.refill.wen(i) ){
116      val refillData = words(get_word(data(i).paddr))
117      data(i).data := mergeRefillData(refillData, data(i).fwdData.asUInt, data(i).fwdMask.asUInt)
118      XSDebug("miss resp: pos %d addr %x data %x + %x(%b)\n", i.U, data(i).paddr, refillData, data(i).fwdData.asUInt, data(i).fwdMask.asUInt)
119    }
120  })
121
122  // forwarding
123  // Compare ringBufferTail (deqPtr) and forward.sqIdx, we have two cases:
124  // (1) if they have the same flag, we need to check range(tail, sqIdx)
125  // (2) if they have different flags, we need to check range(tail, LoadQueueSize) and range(0, sqIdx)
126  // Forward1: Mux(same_flag, range(tail, sqIdx), range(tail, LoadQueueSize))
127  // Forward2: Mux(same_flag, 0.U,                   range(0, sqIdx)    )
128  // i.e. forward1 is the target entries with the same flag bits and forward2 otherwise
129
130  // entry with larger index should have higher priority since it's data is younger
131
132  // FIXME: old fwd logic for assertion, remove when rtl freeze
133  (0 until nchannel).map(i => {
134
135    val forwardMask1 = WireInit(VecInit(Seq.fill(8)(false.B)))
136    val forwardData1 = WireInit(VecInit(Seq.fill(8)(0.U(8.W))))
137    val forwardMask2 = WireInit(VecInit(Seq.fill(8)(false.B)))
138    val forwardData2 = WireInit(VecInit(Seq.fill(8)(0.U(8.W))))
139
140    for (j <- 0 until size) {
141      val needCheck = io.forward(i).paddr(PAddrBits - 1, 3) === data(j).paddr(PAddrBits - 1, 3)
142      (0 until XLEN / 8).foreach(k => {
143        when (needCheck && data(j).mask(k)) {
144          when (io.needForward(i)(0)(j)) {
145            forwardMask1(k) := true.B
146            forwardData1(k) := data(j).data(8 * (k + 1) - 1, 8 * k)
147          }
148          when (io.needForward(i)(1)(j)) {
149            forwardMask2(k) := true.B
150            forwardData2(k) := data(j).data(8 * (k + 1) - 1, 8 * k)
151          }
152          XSDebug(io.needForward(i)(0)(j) || io.needForward(i)(1)(j),
153            p"forwarding $k-th byte ${Hexadecimal(data(j).data(8 * (k + 1) - 1, 8 * k))} " +
154            p"from ptr $j\n")
155        }
156      })
157    }
158
159    // merge forward lookup results
160    // forward2 is younger than forward1 and should have higher priority
161    val oldFwdResult = Wire(new FwdEntry)
162    (0 until XLEN / 8).map(k => {
163      oldFwdResult.mask(k) := RegNext(forwardMask1(k) || forwardMask2(k))
164      oldFwdResult.data(k) := RegNext(Mux(forwardMask2(k), forwardData2(k), forwardData1(k)))
165    })
166
167    // parallel fwd logic
168    val paddrMatch = Wire(Vec(size, Bool()))
169    val matchResultVec = Wire(Vec(size * 2, new FwdEntry))
170
171    def parallelFwd(xs: Seq[Data]): Data = {
172      ParallelOperation(xs, (a: Data, b: Data) => {
173        val l = a.asTypeOf(new FwdEntry)
174        val r = b.asTypeOf(new FwdEntry)
175        val res = Wire(new FwdEntry)
176        (0 until 8).map(p => {
177          res.mask(p) := l.mask(p) || r.mask(p)
178          res.data(p) := Mux(r.mask(p), r.data(p), l.data(p))
179        })
180        res
181      })
182    }
183
184    for (j <- 0 until size) {
185      paddrMatch(j) := io.forward(i).paddr(PAddrBits - 1, 3) === data(j).paddr(PAddrBits - 1, 3)
186    }
187
188    for (j <- 0 until size) {
189      val needCheck0 = RegNext(paddrMatch(j) && io.needForward(i)(0)(j))
190      val needCheck1 = RegNext(paddrMatch(j) && io.needForward(i)(1)(j))
191      (0 until XLEN / 8).foreach(k => {
192        matchResultVec(j).mask(k) := needCheck0 && data(j).mask(k)
193        matchResultVec(j).data(k) := data(j).data(8 * (k + 1) - 1, 8 * k)
194        matchResultVec(size + j).mask(k) := needCheck1 && data(j).mask(k)
195        matchResultVec(size + j).data(k) := data(j).data(8 * (k + 1) - 1, 8 * k)
196      })
197    }
198
199    val parallelFwdResult = parallelFwd(matchResultVec).asTypeOf(new FwdEntry)
200
201    io.forward(i).forwardMask := parallelFwdResult.mask
202    io.forward(i).forwardData := parallelFwdResult.data
203
204    when(
205      oldFwdResult.mask.asUInt =/= parallelFwdResult.mask.asUInt
206    ){
207      printf("%d: mask error: right: %b false %b\n", GTimer(), oldFwdResult.mask.asUInt, parallelFwdResult.mask.asUInt)
208    }
209
210    for (p <- 0 until 8) {
211      when(
212        oldFwdResult.data(p) =/= parallelFwdResult.data(p) && oldFwdResult.mask(p)
213      ){
214        printf("%d: data "+p+" error: right: %x false %x\n", GTimer(), oldFwdResult.data(p), parallelFwdResult.data(p))
215      }
216    }
217
218  })
219
220  // data read
221  io.rdata := data
222  // io.debug.debug_data := data
223}
224
225// inflight miss block reqs
226class InflightBlockInfo extends XSBundle {
227  val block_addr = UInt(PAddrBits.W)
228  val valid = Bool()
229}
230
231class LsqEnqIO extends XSBundle {
232  val canAccept = Output(Bool())
233  val needAlloc = Vec(RenameWidth, Input(Bool()))
234  val req = Vec(RenameWidth, Flipped(ValidIO(new MicroOp)))
235  val resp = Vec(RenameWidth, Output(new LSIdx))
236}
237
238// Load / Store Queue Wrapper for XiangShan Out of Order LSU
239class LsqWrappper extends XSModule with HasDCacheParameters {
240  val io = IO(new Bundle() {
241    val enq = new LsqEnqIO
242    val brqRedirect = Input(Valid(new Redirect))
243    val loadIn = Vec(LoadPipelineWidth, Flipped(Valid(new LsPipelineBundle)))
244    val storeIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle)))
245    val sbuffer = Vec(StorePipelineWidth, Decoupled(new DCacheWordReq))
246    val ldout = Vec(2, DecoupledIO(new ExuOutput)) // writeback int load
247    val mmioStout = DecoupledIO(new ExuOutput) // writeback uncached store
248    val forward = Vec(LoadPipelineWidth, Flipped(new LoadForwardQueryIO))
249    val commits = Flipped(new RoqCommitIO)
250    val rollback = Output(Valid(new Redirect))
251    val dcache = new DCacheLineIO
252    val uncache = new DCacheWordIO
253    val roqDeqPtr = Input(new RoqPtr)
254    val exceptionAddr = new ExceptionAddrIO
255  })
256
257  val loadQueue = Module(new LoadQueue)
258  val storeQueue = Module(new StoreQueue)
259
260  // io.enq logic
261  // LSQ: send out canAccept when both load queue and store queue are ready
262  // Dispatch: send instructions to LSQ only when they are ready
263  io.enq.canAccept := loadQueue.io.enq.canAccept && storeQueue.io.enq.canAccept
264  loadQueue.io.enq.sqCanAccept := storeQueue.io.enq.canAccept
265  storeQueue.io.enq.lqCanAccept := loadQueue.io.enq.canAccept
266  for (i <- 0 until RenameWidth) {
267    val isStore = CommitType.lsInstIsStore(io.enq.req(i).bits.ctrl.commitType)
268
269    loadQueue.io.enq.needAlloc(i) := io.enq.needAlloc(i) && !isStore
270    loadQueue.io.enq.req(i).valid  := !isStore && io.enq.req(i).valid
271    loadQueue.io.enq.req(i).bits  := io.enq.req(i).bits
272
273    storeQueue.io.enq.needAlloc(i) := io.enq.needAlloc(i) && isStore
274    storeQueue.io.enq.req(i).valid :=  isStore && io.enq.req(i).valid
275    storeQueue.io.enq.req(i).bits := io.enq.req(i).bits
276
277    io.enq.resp(i).lqIdx := loadQueue.io.enq.resp(i)
278    io.enq.resp(i).sqIdx := storeQueue.io.enq.resp(i)
279  }
280
281  // load queue wiring
282  loadQueue.io.brqRedirect <> io.brqRedirect
283  loadQueue.io.loadIn <> io.loadIn
284  loadQueue.io.storeIn <> io.storeIn
285  loadQueue.io.ldout <> io.ldout
286  loadQueue.io.commits <> io.commits
287  loadQueue.io.rollback <> io.rollback
288  loadQueue.io.dcache <> io.dcache
289  loadQueue.io.roqDeqPtr <> io.roqDeqPtr
290  loadQueue.io.exceptionAddr.lsIdx := io.exceptionAddr.lsIdx
291  loadQueue.io.exceptionAddr.isStore := DontCare
292
293  // store queue wiring
294  // storeQueue.io <> DontCare
295  storeQueue.io.brqRedirect <> io.brqRedirect
296  storeQueue.io.storeIn <> io.storeIn
297  storeQueue.io.sbuffer <> io.sbuffer
298  storeQueue.io.mmioStout <> io.mmioStout
299  storeQueue.io.commits <> io.commits
300  storeQueue.io.roqDeqPtr <> io.roqDeqPtr
301  storeQueue.io.exceptionAddr.lsIdx := io.exceptionAddr.lsIdx
302  storeQueue.io.exceptionAddr.isStore := DontCare
303
304  loadQueue.io.load_s1 <> io.forward
305  storeQueue.io.forward <> io.forward // overlap forwardMask & forwardData, DO NOT CHANGE SEQUENCE
306
307  io.exceptionAddr.vaddr := Mux(io.exceptionAddr.isStore, storeQueue.io.exceptionAddr.vaddr, loadQueue.io.exceptionAddr.vaddr)
308
309  // naive uncache arbiter
310  val s_idle :: s_load :: s_store :: Nil = Enum(3)
311  val uncacheState = RegInit(s_idle)
312
313  switch(uncacheState){
314    is(s_idle){
315      when(io.uncache.req.fire()){
316        uncacheState := Mux(loadQueue.io.uncache.req.valid, s_load, s_store)
317      }
318    }
319    is(s_load){
320      when(io.uncache.resp.fire()){
321        uncacheState := s_idle
322      }
323    }
324    is(s_store){
325      when(io.uncache.resp.fire()){
326        uncacheState := s_idle
327      }
328    }
329  }
330
331  loadQueue.io.uncache := DontCare
332  storeQueue.io.uncache := DontCare
333  loadQueue.io.uncache.resp.valid := false.B
334  storeQueue.io.uncache.resp.valid := false.B
335  when(loadQueue.io.uncache.req.valid){
336    io.uncache.req <> loadQueue.io.uncache.req
337  }.otherwise{
338    io.uncache.req <> storeQueue.io.uncache.req
339  }
340  when(uncacheState === s_load){
341    io.uncache.resp <> loadQueue.io.uncache.resp
342  }.otherwise{
343    io.uncache.resp <> storeQueue.io.uncache.resp
344  }
345
346  assert(!(loadQueue.io.uncache.req.valid && storeQueue.io.uncache.req.valid))
347  assert(!(loadQueue.io.uncache.resp.valid && storeQueue.io.uncache.resp.valid))
348  assert(!((loadQueue.io.uncache.resp.valid || storeQueue.io.uncache.resp.valid) && uncacheState === s_idle))
349
350}
351