xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision c6d439803a044ea209139672b25e35fe8d7f4aa0)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3*
4* XiangShan is licensed under Mulan PSL v2.
5* You can use this software according to the terms and conditions of the Mulan PSL v2.
6* You may obtain a copy of Mulan PSL v2 at:
7*          http://license.coscl.org.cn/MulanPSL2
8*
9* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
10* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
11* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
12*
13* See the Mulan PSL v2 for more details.
14***************************************************************************************/
15
16package xiangshan.mem
17
18import chipsalliance.rocketchip.config.Parameters
19import chisel3._
20import chisel3.util._
21import utils._
22import xiangshan._
23import xiangshan.backend.decode.ImmUnion
24import xiangshan.cache._
25
26class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
27  val loadIn = ValidIO(new LsPipelineBundle)
28  val ldout = Flipped(DecoupledIO(new ExuOutput))
29  val loadDataForwarded = Output(Bool())
30  val needReplayFromRS = Output(Bool())
31  val forward = new PipeLoadForwardQueryIO
32}
33
34// Load Pipeline Stage 0
35// Generate addr, use addr to query DCache and DTLB
36class LoadUnit_S0(implicit p: Parameters) extends XSModule {
37  val io = IO(new Bundle() {
38    val in = Flipped(Decoupled(new ExuInput))
39    val out = Decoupled(new LsPipelineBundle)
40    val dtlbReq = DecoupledIO(new TlbReq)
41    val dcacheReq = DecoupledIO(new DCacheWordReq)
42    val rsIdx = Input(UInt(log2Up(IssQueSize).W))
43    val isFirstIssue = Input(Bool())
44  })
45
46  val s0_uop = io.in.bits.uop
47  // val s0_vaddr = io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits)
48  // val s0_mask = genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0))
49  val imm12 = WireInit(s0_uop.ctrl.imm(11,0))
50  val s0_vaddr_lo = io.in.bits.src(0)(11,0) + Cat(0.U(1.W), imm12)
51  val s0_vaddr_hi = Mux(s0_vaddr_lo(12),
52    Mux(imm12(11), io.in.bits.src(0)(VAddrBits-1, 12), io.in.bits.src(0)(VAddrBits-1, 12)+1.U),
53    Mux(imm12(11), io.in.bits.src(0)(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12), io.in.bits.src(0)(VAddrBits-1, 12)),
54  )
55  val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0))
56  val s0_mask = genWmask(s0_vaddr_lo, s0_uop.ctrl.fuOpType(1,0))
57
58  // query DTLB
59  io.dtlbReq.valid := io.in.valid
60  io.dtlbReq.bits.vaddr := s0_vaddr
61  io.dtlbReq.bits.cmd := TlbCmd.read
62  io.dtlbReq.bits.roqIdx := s0_uop.roqIdx
63  io.dtlbReq.bits.debug.pc := s0_uop.cf.pc
64  io.dtlbReq.bits.debug.isFirstIssue := io.isFirstIssue
65
66  // query DCache
67  io.dcacheReq.valid := io.in.valid
68  io.dcacheReq.bits.cmd  := MemoryOpConstants.M_XRD
69  io.dcacheReq.bits.addr := s0_vaddr
70  io.dcacheReq.bits.mask := s0_mask
71  io.dcacheReq.bits.data := DontCare
72
73  // TODO: update cache meta
74  io.dcacheReq.bits.id   := DontCare
75
76  val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List(
77    "b00".U   -> true.B,                   //b
78    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
79    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
80    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
81  ))
82
83  io.out.valid := io.in.valid && io.dcacheReq.ready
84
85  io.out.bits := DontCare
86  io.out.bits.vaddr := s0_vaddr
87  io.out.bits.mask := s0_mask
88  io.out.bits.uop := s0_uop
89  io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned
90  io.out.bits.rsIdx := io.rsIdx
91
92  io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready)
93
94  XSDebug(io.dcacheReq.fire(),
95    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
96  )
97  XSPerfAccumulate("in", io.in.valid)
98  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready)
99  XSPerfAccumulate("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready)
100  XSPerfAccumulate("addr_spec_success", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12))
101  XSPerfAccumulate("addr_spec_failed", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12))
102  XSPerfAccumulate("addr_spec_success_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue)
103  XSPerfAccumulate("addr_spec_failed_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue)
104}
105
106
107// Load Pipeline Stage 1
108// TLB resp (send paddr to dcache)
109class LoadUnit_S1(implicit p: Parameters) extends XSModule {
110  val io = IO(new Bundle() {
111    val in = Flipped(Decoupled(new LsPipelineBundle))
112    val out = Decoupled(new LsPipelineBundle)
113    val dtlbResp = Flipped(DecoupledIO(new TlbResp))
114    val dcachePAddr = Output(UInt(PAddrBits.W))
115    val dcacheKill = Output(Bool())
116    val sbuffer = new LoadForwardQueryIO
117    val lsq = new PipeLoadForwardQueryIO
118  })
119
120  val s1_uop = io.in.bits.uop
121  val s1_paddr = io.dtlbResp.bits.paddr
122  val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR
123  val s1_tlb_miss = io.dtlbResp.bits.miss
124  val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio
125  val s1_mask = io.in.bits.mask
126
127  io.out.bits := io.in.bits // forwardXX field will be updated in s1
128
129  io.dtlbResp.ready := true.B
130
131  // TOOD: PMA check
132  io.dcachePAddr := s1_paddr
133  io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio
134
135  // load forward query datapath
136  io.sbuffer.valid := io.in.valid
137  io.sbuffer.paddr := s1_paddr
138  io.sbuffer.uop := s1_uop
139  io.sbuffer.sqIdx := s1_uop.sqIdx
140  io.sbuffer.mask := s1_mask
141  io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it
142
143  io.lsq.valid := io.in.valid
144  io.lsq.paddr := s1_paddr
145  io.lsq.uop := s1_uop
146  io.lsq.sqIdx := s1_uop.sqIdx
147  io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0
148  io.lsq.mask := s1_mask
149  io.lsq.pc := s1_uop.cf.pc // FIXME: remove it
150
151  io.out.valid := io.in.valid// && !s1_tlb_miss
152  io.out.bits.paddr := s1_paddr
153  io.out.bits.mmio := s1_mmio && !s1_exception
154  io.out.bits.tlbMiss := s1_tlb_miss
155  io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld
156  io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld
157  io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack
158  io.out.bits.rsIdx := io.in.bits.rsIdx
159
160  io.in.ready := !io.in.valid || io.out.ready
161
162  XSPerfAccumulate("in", io.in.valid)
163  XSPerfAccumulate("tlb_miss", io.in.valid && s1_tlb_miss)
164  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready)
165}
166
167
168// Load Pipeline Stage 2
169// DCache resp
170class LoadUnit_S2(implicit p: Parameters) extends XSModule with HasLoadHelper {
171  val io = IO(new Bundle() {
172    val in = Flipped(Decoupled(new LsPipelineBundle))
173    val out = Decoupled(new LsPipelineBundle)
174    val rsFeedback = ValidIO(new RSFeedback)
175    val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp))
176    val lsq = new LoadForwardQueryIO
177    val sbuffer = new LoadForwardQueryIO
178    val dataForwarded = Output(Bool())
179    val needReplayFromRS = Output(Bool())
180  })
181
182  val s2_uop = io.in.bits.uop
183  val s2_mask = io.in.bits.mask
184  val s2_paddr = io.in.bits.paddr
185  val s2_tlb_miss = io.in.bits.tlbMiss
186  val s2_data_invalid = io.lsq.dataInvalid
187  val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR
188  val s2_mmio = io.in.bits.mmio && !s2_exception
189  val s2_cache_miss = io.dcacheResp.bits.miss
190  val s2_cache_replay = io.dcacheResp.bits.replay
191
192  io.dcacheResp.ready := true.B
193  val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio)
194  assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost")
195
196  // feedback tlb result to RS
197  io.rsFeedback.valid := io.in.valid
198  io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception) && !s2_data_invalid
199  io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx
200  io.rsFeedback.bits.flushState := io.in.bits.ptwBack
201  io.rsFeedback.bits.sourceType := Mux(s2_tlb_miss, RSFeedbackType.tlbMiss,
202    Mux(io.lsq.dataInvalid,
203      RSFeedbackType.dataInvalid,
204      RSFeedbackType.mshrFull
205    )
206  )
207
208  // s2_cache_replay is quite slow to generate, send it separately to LQ
209  io.needReplayFromRS := s2_cache_replay
210
211  // merge forward result
212  // lsq has higher priority than sbuffer
213  val forwardMask = Wire(Vec(8, Bool()))
214  val forwardData = Wire(Vec(8, UInt(8.W)))
215
216  val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U && !io.lsq.dataInvalid
217  io.lsq := DontCare
218  io.sbuffer := DontCare
219
220  // generate XLEN/8 Muxs
221  for (i <- 0 until XLEN / 8) {
222    forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i)
223    forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i))
224  }
225
226  XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
227    s2_uop.cf.pc,
228    io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt,
229    io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt
230  )
231
232  // data merge
233  val rdataVec = VecInit((0 until XLEN / 8).map(j =>
234    Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j))))
235  val rdata = rdataVec.asUInt
236  val rdataSel = LookupTree(s2_paddr(2, 0), List(
237    "b000".U -> rdata(63, 0),
238    "b001".U -> rdata(63, 8),
239    "b010".U -> rdata(63, 16),
240    "b011".U -> rdata(63, 24),
241    "b100".U -> rdata(63, 32),
242    "b101".U -> rdata(63, 40),
243    "b110".U -> rdata(63, 48),
244    "b111".U -> rdata(63, 56)
245  ))
246  val rdataPartialLoad = rdataHelper(s2_uop, rdataSel)
247
248  io.out.valid := io.in.valid && !s2_tlb_miss && !s2_data_invalid
249  // Inst will be canceled in store queue / lsq,
250  // so we do not need to care about flush in load / store unit's out.valid
251  io.out.bits := io.in.bits
252  io.out.bits.data := rdataPartialLoad
253  // when exception occurs, set it to not miss and let it write back to roq (via int port)
254  io.out.bits.miss := s2_cache_miss && !s2_exception
255  io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception
256  io.out.bits.mmio := s2_mmio
257
258  // For timing reasons, we can not let
259  // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward
260  // We use io.dataForwarded instead. It means forward logic have prepared all data needed,
261  // and dcache query is no longer needed.
262  // Such inst will be writebacked from load queue.
263  io.dataForwarded := s2_cache_miss && fullForward && !s2_exception
264  // io.out.bits.forwardX will be send to lq
265  io.out.bits.forwardMask := forwardMask
266  // data retbrived from dcache is also included in io.out.bits.forwardData
267  io.out.bits.forwardData := rdataVec
268
269  io.in.ready := io.out.ready || !io.in.valid
270
271  XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n",
272    s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data,
273    forwardData.asUInt, forwardMask.asUInt
274  )
275
276  XSPerfAccumulate("in", io.in.valid)
277  XSPerfAccumulate("dcache_miss", io.in.valid && s2_cache_miss)
278  XSPerfAccumulate("full_forward", io.in.valid && fullForward)
279  XSPerfAccumulate("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward)
280  XSPerfAccumulate("replay",  io.rsFeedback.valid && !io.rsFeedback.bits.hit)
281  XSPerfAccumulate("replay_tlb_miss", io.rsFeedback.valid && !io.rsFeedback.bits.hit && s2_tlb_miss)
282  XSPerfAccumulate("replay_cache", io.rsFeedback.valid && !io.rsFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay)
283  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready)
284}
285
286class LoadUnit(implicit p: Parameters) extends XSModule with HasLoadHelper {
287  val io = IO(new Bundle() {
288    val ldin = Flipped(Decoupled(new ExuInput))
289    val ldout = Decoupled(new ExuOutput)
290    val redirect = Flipped(ValidIO(new Redirect))
291    val flush = Input(Bool())
292    val rsFeedback = ValidIO(new RSFeedback)
293    val rsIdx = Input(UInt(log2Up(IssQueSize).W))
294    val isFirstIssue = Input(Bool())
295    val dcache = new DCacheLoadIO
296    val dtlb = new TlbRequestIO()
297    val sbuffer = new LoadForwardQueryIO
298    val lsq = new LoadToLsqIO
299    val fastUop = ValidIO(new MicroOp) // early wakup signal generated in load_s1
300  })
301
302  val load_s0 = Module(new LoadUnit_S0)
303  val load_s1 = Module(new LoadUnit_S1)
304  val load_s2 = Module(new LoadUnit_S2)
305
306  load_s0.io.in <> io.ldin
307  load_s0.io.dtlbReq <> io.dtlb.req
308  load_s0.io.dcacheReq <> io.dcache.req
309  load_s0.io.rsIdx := io.rsIdx
310  load_s0.io.isFirstIssue := io.isFirstIssue
311
312  PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush))
313
314  load_s1.io.dtlbResp <> io.dtlb.resp
315  io.dcache.s1_paddr <> load_s1.io.dcachePAddr
316  io.dcache.s1_kill <> load_s1.io.dcacheKill
317  load_s1.io.sbuffer <> io.sbuffer
318  load_s1.io.lsq <> io.lsq.forward
319
320  PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush))
321
322  load_s2.io.dcacheResp <> io.dcache.resp
323  load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData
324  load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask
325  load_s2.io.lsq.dataInvalid <> io.lsq.forward.dataInvalid
326  load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData
327  load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask
328  load_s2.io.sbuffer.dataInvalid <> io.sbuffer.dataInvalid // always false
329  load_s2.io.dataForwarded <> io.lsq.loadDataForwarded
330  io.rsFeedback.bits := RegNext(load_s2.io.rsFeedback.bits)
331  io.rsFeedback.valid := RegNext(load_s2.io.rsFeedback.valid && !load_s2.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush))
332  io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS
333
334  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
335  val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize))
336  io.lsq.forward.sqIdxMask := sqIdxMaskReg
337
338  // // use s2_hit_way to select data received in s1
339  // load_s2.io.dcacheResp.bits.data := Mux1H(RegNext(io.dcache.s1_hit_way), RegNext(io.dcache.s1_data))
340  // assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data)
341
342  io.fastUop.valid := io.dcache.s1_hit_way.orR && !io.dcache.s1_disable_fast_wakeup && load_s1.io.in.valid &&
343    !load_s1.io.dcacheKill && !io.lsq.forward.dataInvalidFast
344  io.fastUop.bits := load_s1.io.out.bits.uop
345
346  XSDebug(load_s0.io.out.valid,
347    p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " +
348    p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n")
349  XSDebug(load_s1.io.out.valid,
350    p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " +
351    p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n")
352
353  // writeback to LSQ
354  // Current dcache use MSHR
355  // Load queue will be updated at s2 for both hit/miss int/fp load
356  io.lsq.loadIn.valid := load_s2.io.out.valid
357  io.lsq.loadIn.bits := load_s2.io.out.bits
358
359  // write to rob and writeback bus
360  val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss && !load_s2.io.out.bits.mmio
361
362  // Int load, if hit, will be writebacked at s2
363  val hitLoadOut = Wire(Valid(new ExuOutput))
364  hitLoadOut.valid := s2_wb_valid
365  hitLoadOut.bits.uop := load_s2.io.out.bits.uop
366  hitLoadOut.bits.data := load_s2.io.out.bits.data
367  hitLoadOut.bits.redirectValid := false.B
368  hitLoadOut.bits.redirect := DontCare
369  hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio
370  hitLoadOut.bits.debug.isPerfCnt := false.B
371  hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr
372  hitLoadOut.bits.fflags := DontCare
373
374  load_s2.io.out.ready := true.B
375
376  io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits)
377  io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid
378
379  io.lsq.ldout.ready := !hitLoadOut.valid
380
381  when(io.ldout.fire()){
382    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
383  }
384}
385