xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision f320e0f01bd645f0a3045a8a740e60dd770734a9)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import xiangshan._
24import xiangshan.backend.decode.ImmUnion
25import xiangshan.cache._
26import xiangshan.cache.mmu.{TlbRequestIO, TlbReq, TlbResp, TlbCmd}
27
28class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
29  val loadIn = ValidIO(new LsPipelineBundle)
30  val ldout = Flipped(DecoupledIO(new ExuOutput))
31  val loadDataForwarded = Output(Bool())
32  val needReplayFromRS = Output(Bool())
33  val forward = new PipeLoadForwardQueryIO
34}
35
36// Load Pipeline Stage 0
37// Generate addr, use addr to query DCache and DTLB
38class LoadUnit_S0(implicit p: Parameters) extends XSModule {
39  val io = IO(new Bundle() {
40    val in = Flipped(Decoupled(new ExuInput))
41    val out = Decoupled(new LsPipelineBundle)
42    val dtlbReq = DecoupledIO(new TlbReq)
43    val dcacheReq = DecoupledIO(new DCacheWordReq)
44    val rsIdx = Input(UInt(log2Up(IssQueSize).W))
45    val isFirstIssue = Input(Bool())
46  })
47
48  val s0_uop = io.in.bits.uop
49  // val s0_vaddr = io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits)
50  // val s0_mask = genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0))
51  val imm12 = WireInit(s0_uop.ctrl.imm(11,0))
52  val s0_vaddr_lo = io.in.bits.src(0)(11,0) + Cat(0.U(1.W), imm12)
53  val s0_vaddr_hi = Mux(s0_vaddr_lo(12),
54    Mux(imm12(11), io.in.bits.src(0)(VAddrBits-1, 12), io.in.bits.src(0)(VAddrBits-1, 12)+1.U),
55    Mux(imm12(11), io.in.bits.src(0)(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12), io.in.bits.src(0)(VAddrBits-1, 12)),
56  )
57  val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0))
58  val s0_mask = genWmask(s0_vaddr_lo, s0_uop.ctrl.fuOpType(1,0))
59
60  // query DTLB
61  io.dtlbReq.valid := io.in.valid
62  io.dtlbReq.bits.vaddr := s0_vaddr
63  io.dtlbReq.bits.cmd := TlbCmd.read
64  io.dtlbReq.bits.roqIdx := s0_uop.roqIdx
65  io.dtlbReq.bits.debug.pc := s0_uop.cf.pc
66  io.dtlbReq.bits.debug.isFirstIssue := io.isFirstIssue
67
68  // query DCache
69  io.dcacheReq.valid := io.in.valid
70  io.dcacheReq.bits.cmd  := MemoryOpConstants.M_XRD
71  io.dcacheReq.bits.addr := s0_vaddr
72  io.dcacheReq.bits.mask := s0_mask
73  io.dcacheReq.bits.data := DontCare
74
75  // TODO: update cache meta
76  io.dcacheReq.bits.id   := DontCare
77
78  val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List(
79    "b00".U   -> true.B,                   //b
80    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
81    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
82    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
83  ))
84
85  io.out.valid := io.in.valid && io.dcacheReq.ready
86
87  io.out.bits := DontCare
88  io.out.bits.vaddr := s0_vaddr
89  io.out.bits.mask := s0_mask
90  io.out.bits.uop := s0_uop
91  io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned
92  io.out.bits.rsIdx := io.rsIdx
93
94  io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready)
95
96  XSDebug(io.dcacheReq.fire(),
97    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
98  )
99  XSPerfAccumulate("in", io.in.valid)
100  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready)
101  XSPerfAccumulate("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready)
102  XSPerfAccumulate("addr_spec_success", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12))
103  XSPerfAccumulate("addr_spec_failed", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12))
104  XSPerfAccumulate("addr_spec_success_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue)
105  XSPerfAccumulate("addr_spec_failed_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue)
106}
107
108
109// Load Pipeline Stage 1
110// TLB resp (send paddr to dcache)
111class LoadUnit_S1(implicit p: Parameters) extends XSModule {
112  val io = IO(new Bundle() {
113    val in = Flipped(Decoupled(new LsPipelineBundle))
114    val out = Decoupled(new LsPipelineBundle)
115    val dtlbResp = Flipped(DecoupledIO(new TlbResp))
116    val dcachePAddr = Output(UInt(PAddrBits.W))
117    val dcacheKill = Output(Bool())
118    val sbuffer = new LoadForwardQueryIO
119    val lsq = new PipeLoadForwardQueryIO
120  })
121
122  val s1_uop = io.in.bits.uop
123  val s1_paddr = io.dtlbResp.bits.paddr
124  val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR
125  val s1_tlb_miss = io.dtlbResp.bits.miss
126  val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio
127  val s1_mask = io.in.bits.mask
128
129  io.out.bits := io.in.bits // forwardXX field will be updated in s1
130
131  io.dtlbResp.ready := true.B
132
133  // TOOD: PMA check
134  io.dcachePAddr := s1_paddr
135  io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio
136
137  // load forward query datapath
138  io.sbuffer.valid := io.in.valid
139  io.sbuffer.paddr := s1_paddr
140  io.sbuffer.uop := s1_uop
141  io.sbuffer.sqIdx := s1_uop.sqIdx
142  io.sbuffer.mask := s1_mask
143  io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it
144
145  io.lsq.valid := io.in.valid
146  io.lsq.paddr := s1_paddr
147  io.lsq.uop := s1_uop
148  io.lsq.sqIdx := s1_uop.sqIdx
149  io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0
150  io.lsq.mask := s1_mask
151  io.lsq.pc := s1_uop.cf.pc // FIXME: remove it
152
153  io.out.valid := io.in.valid// && !s1_tlb_miss
154  io.out.bits.paddr := s1_paddr
155  io.out.bits.mmio := s1_mmio && !s1_exception
156  io.out.bits.tlbMiss := s1_tlb_miss
157  io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld
158  io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld
159  io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack
160  io.out.bits.rsIdx := io.in.bits.rsIdx
161
162  io.in.ready := !io.in.valid || io.out.ready
163
164  XSPerfAccumulate("in", io.in.valid)
165  XSPerfAccumulate("tlb_miss", io.in.valid && s1_tlb_miss)
166  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready)
167}
168
169
170// Load Pipeline Stage 2
171// DCache resp
172class LoadUnit_S2(implicit p: Parameters) extends XSModule with HasLoadHelper {
173  val io = IO(new Bundle() {
174    val in = Flipped(Decoupled(new LsPipelineBundle))
175    val out = Decoupled(new LsPipelineBundle)
176    val rsFeedback = ValidIO(new RSFeedback)
177    val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp))
178    val lsq = new LoadForwardQueryIO
179    val sbuffer = new LoadForwardQueryIO
180    val dataForwarded = Output(Bool())
181    val needReplayFromRS = Output(Bool())
182  })
183
184  val s2_uop = io.in.bits.uop
185  val s2_mask = io.in.bits.mask
186  val s2_paddr = io.in.bits.paddr
187  val s2_tlb_miss = io.in.bits.tlbMiss
188  val s2_data_invalid = io.lsq.dataInvalid
189  val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR
190  val s2_mmio = io.in.bits.mmio && !s2_exception
191  val s2_cache_miss = io.dcacheResp.bits.miss
192  val s2_cache_replay = io.dcacheResp.bits.replay
193
194  io.dcacheResp.ready := true.B
195  val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio)
196  assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost")
197
198  // feedback tlb result to RS
199  io.rsFeedback.valid := io.in.valid
200  io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception) && !s2_data_invalid
201  io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx
202  io.rsFeedback.bits.flushState := io.in.bits.ptwBack
203  io.rsFeedback.bits.sourceType := Mux(s2_tlb_miss, RSFeedbackType.tlbMiss,
204    Mux(io.lsq.dataInvalid,
205      RSFeedbackType.dataInvalid,
206      RSFeedbackType.mshrFull
207    )
208  )
209
210  // s2_cache_replay is quite slow to generate, send it separately to LQ
211  io.needReplayFromRS := s2_cache_replay
212
213  // merge forward result
214  // lsq has higher priority than sbuffer
215  val forwardMask = Wire(Vec(8, Bool()))
216  val forwardData = Wire(Vec(8, UInt(8.W)))
217
218  val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U && !io.lsq.dataInvalid
219  io.lsq := DontCare
220  io.sbuffer := DontCare
221
222  // generate XLEN/8 Muxs
223  for (i <- 0 until XLEN / 8) {
224    forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i)
225    forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i))
226  }
227
228  XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
229    s2_uop.cf.pc,
230    io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt,
231    io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt
232  )
233
234  // data merge
235  val rdataVec = VecInit((0 until XLEN / 8).map(j =>
236    Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j))))
237  val rdata = rdataVec.asUInt
238  val rdataSel = LookupTree(s2_paddr(2, 0), List(
239    "b000".U -> rdata(63, 0),
240    "b001".U -> rdata(63, 8),
241    "b010".U -> rdata(63, 16),
242    "b011".U -> rdata(63, 24),
243    "b100".U -> rdata(63, 32),
244    "b101".U -> rdata(63, 40),
245    "b110".U -> rdata(63, 48),
246    "b111".U -> rdata(63, 56)
247  ))
248  val rdataPartialLoad = rdataHelper(s2_uop, rdataSel)
249
250  io.out.valid := io.in.valid && !s2_tlb_miss && !s2_data_invalid
251  // Inst will be canceled in store queue / lsq,
252  // so we do not need to care about flush in load / store unit's out.valid
253  io.out.bits := io.in.bits
254  io.out.bits.data := rdataPartialLoad
255  // when exception occurs, set it to not miss and let it write back to roq (via int port)
256  io.out.bits.miss := s2_cache_miss && !s2_exception
257  io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception
258  io.out.bits.mmio := s2_mmio
259
260  // For timing reasons, we can not let
261  // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward
262  // We use io.dataForwarded instead. It means forward logic have prepared all data needed,
263  // and dcache query is no longer needed.
264  // Such inst will be writebacked from load queue.
265  io.dataForwarded := s2_cache_miss && fullForward && !s2_exception
266  // io.out.bits.forwardX will be send to lq
267  io.out.bits.forwardMask := forwardMask
268  // data retbrived from dcache is also included in io.out.bits.forwardData
269  io.out.bits.forwardData := rdataVec
270
271  io.in.ready := io.out.ready || !io.in.valid
272
273  XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n",
274    s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data,
275    forwardData.asUInt, forwardMask.asUInt
276  )
277
278  XSPerfAccumulate("in", io.in.valid)
279  XSPerfAccumulate("dcache_miss", io.in.valid && s2_cache_miss)
280  XSPerfAccumulate("full_forward", io.in.valid && fullForward)
281  XSPerfAccumulate("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward)
282  XSPerfAccumulate("replay",  io.rsFeedback.valid && !io.rsFeedback.bits.hit)
283  XSPerfAccumulate("replay_tlb_miss", io.rsFeedback.valid && !io.rsFeedback.bits.hit && s2_tlb_miss)
284  XSPerfAccumulate("replay_cache", io.rsFeedback.valid && !io.rsFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay)
285  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready)
286}
287
288class LoadUnit(implicit p: Parameters) extends XSModule with HasLoadHelper {
289  val io = IO(new Bundle() {
290    val ldin = Flipped(Decoupled(new ExuInput))
291    val ldout = Decoupled(new ExuOutput)
292    val redirect = Flipped(ValidIO(new Redirect))
293    val flush = Input(Bool())
294    val rsFeedback = ValidIO(new RSFeedback)
295    val rsIdx = Input(UInt(log2Up(IssQueSize).W))
296    val isFirstIssue = Input(Bool())
297    val dcache = new DCacheLoadIO
298    val dtlb = new TlbRequestIO()
299    val sbuffer = new LoadForwardQueryIO
300    val lsq = new LoadToLsqIO
301    val fastUop = ValidIO(new MicroOp) // early wakup signal generated in load_s1
302  })
303
304  val load_s0 = Module(new LoadUnit_S0)
305  val load_s1 = Module(new LoadUnit_S1)
306  val load_s2 = Module(new LoadUnit_S2)
307
308  load_s0.io.in <> io.ldin
309  load_s0.io.dtlbReq <> io.dtlb.req
310  load_s0.io.dcacheReq <> io.dcache.req
311  load_s0.io.rsIdx := io.rsIdx
312  load_s0.io.isFirstIssue := io.isFirstIssue
313
314  PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush))
315
316  load_s1.io.dtlbResp <> io.dtlb.resp
317  io.dcache.s1_paddr <> load_s1.io.dcachePAddr
318  io.dcache.s1_kill <> load_s1.io.dcacheKill
319  load_s1.io.sbuffer <> io.sbuffer
320  load_s1.io.lsq <> io.lsq.forward
321
322  PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush))
323
324  load_s2.io.dcacheResp <> io.dcache.resp
325  load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData
326  load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask
327  load_s2.io.lsq.dataInvalid <> io.lsq.forward.dataInvalid
328  load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData
329  load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask
330  load_s2.io.sbuffer.dataInvalid <> io.sbuffer.dataInvalid // always false
331  load_s2.io.dataForwarded <> io.lsq.loadDataForwarded
332  io.rsFeedback.bits := RegNext(load_s2.io.rsFeedback.bits)
333  io.rsFeedback.valid := RegNext(load_s2.io.rsFeedback.valid && !load_s2.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush))
334  io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS
335
336  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
337  val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize))
338  io.lsq.forward.sqIdxMask := sqIdxMaskReg
339
340  // // use s2_hit_way to select data received in s1
341  // load_s2.io.dcacheResp.bits.data := Mux1H(RegNext(io.dcache.s1_hit_way), RegNext(io.dcache.s1_data))
342  // assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data)
343
344  io.fastUop.valid := io.dcache.s1_hit_way.orR && !io.dcache.s1_disable_fast_wakeup && load_s1.io.in.valid &&
345    !load_s1.io.dcacheKill && !io.lsq.forward.dataInvalidFast
346  io.fastUop.bits := load_s1.io.out.bits.uop
347
348  XSDebug(load_s0.io.out.valid,
349    p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " +
350    p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n")
351  XSDebug(load_s1.io.out.valid,
352    p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " +
353    p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n")
354
355  // writeback to LSQ
356  // Current dcache use MSHR
357  // Load queue will be updated at s2 for both hit/miss int/fp load
358  io.lsq.loadIn.valid := load_s2.io.out.valid
359  io.lsq.loadIn.bits := load_s2.io.out.bits
360
361  // write to rob and writeback bus
362  val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss && !load_s2.io.out.bits.mmio
363
364  // Int load, if hit, will be writebacked at s2
365  val hitLoadOut = Wire(Valid(new ExuOutput))
366  hitLoadOut.valid := s2_wb_valid
367  hitLoadOut.bits.uop := load_s2.io.out.bits.uop
368  hitLoadOut.bits.data := load_s2.io.out.bits.data
369  hitLoadOut.bits.redirectValid := false.B
370  hitLoadOut.bits.redirect := DontCare
371  hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio
372  hitLoadOut.bits.debug.isPerfCnt := false.B
373  hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr
374  hitLoadOut.bits.fflags := DontCare
375
376  load_s2.io.out.ready := true.B
377
378  io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits)
379  io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid
380
381  io.lsq.ldout.ready := !hitLoadOut.valid
382
383  when(io.ldout.fire()){
384    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
385  }
386}
387