1package xiangshan.mem 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.decode.ImmUnion 8import xiangshan.cache._ 9// import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants, TlbReq, DCacheLoadReq, DCacheWordResp} 10import xiangshan.backend.LSUOpType 11 12class LoadToLsqIO extends XSBundle { 13 val loadIn = ValidIO(new LsPipelineBundle) 14 val ldout = Flipped(DecoupledIO(new ExuOutput)) 15 val loadDataForwarded = Output(Bool()) 16 val forward = new LoadForwardQueryIO 17} 18 19// Load Pipeline Stage 0 20// Generate addr, use addr to query DCache and DTLB 21class LoadUnit_S0 extends XSModule { 22 val io = IO(new Bundle() { 23 val in = Flipped(Decoupled(new ExuInput)) 24 val out = Decoupled(new LsPipelineBundle) 25 val dtlbReq = DecoupledIO(new TlbReq) 26 val dcacheReq = DecoupledIO(new DCacheWordReq) 27 }) 28 29 val s0_uop = io.in.bits.uop 30 val s0_vaddr_old = io.in.bits.src1 + SignExt(ImmUnion.I.toImm32(s0_uop.ctrl.imm), XLEN) 31 val imm12 = WireInit(s0_uop.ctrl.imm(11,0)) 32 val s0_vaddr_lo = io.in.bits.src1(11,0) + Cat(0.U(1.W), imm12) 33 val s0_vaddr_hi = Mux(imm12(11), 34 Mux((s0_vaddr_lo(12)), io.in.bits.src1(VAddrBits-1, 12), io.in.bits.src1(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12)), 35 Mux((s0_vaddr_lo(12)), io.in.bits.src1(VAddrBits-1, 12)+1.U, io.in.bits.src1(VAddrBits-1, 12)) 36 ) 37 val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0)) 38 when(io.in.fire() && s0_vaddr(VAddrBits-1,0) =/= (io.in.bits.src1 + SignExt(ImmUnion.I.toImm32(s0_uop.ctrl.imm), XLEN))(VAddrBits-1,0)){ 39 printf("s0_vaddr %x s0_vaddr_old %x\n", s0_vaddr, s0_vaddr_old(VAddrBits-1,0)) 40 } 41 val s0_mask = genWmask(s0_vaddr_lo, s0_uop.ctrl.fuOpType(1,0)) 42 43 // query DTLB 44 io.dtlbReq.valid := io.in.valid 45 io.dtlbReq.bits.vaddr := s0_vaddr 46 io.dtlbReq.bits.cmd := TlbCmd.read 47 io.dtlbReq.bits.roqIdx := s0_uop.roqIdx 48 io.dtlbReq.bits.debug.pc := s0_uop.cf.pc 49 50 // query DCache 51 io.dcacheReq.valid := io.in.valid 52 io.dcacheReq.bits.cmd := MemoryOpConstants.M_XRD 53 io.dcacheReq.bits.addr := s0_vaddr 54 io.dcacheReq.bits.mask := s0_mask 55 io.dcacheReq.bits.data := DontCare 56 57 // TODO: update cache meta 58 io.dcacheReq.bits.meta.id := DontCare 59 io.dcacheReq.bits.meta.vaddr := s0_vaddr 60 io.dcacheReq.bits.meta.paddr := DontCare 61 io.dcacheReq.bits.meta.uop := s0_uop 62 io.dcacheReq.bits.meta.mmio := false.B 63 io.dcacheReq.bits.meta.tlb_miss := false.B 64 io.dcacheReq.bits.meta.mask := s0_mask 65 io.dcacheReq.bits.meta.replay := false.B 66 67 val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List( 68 "b00".U -> true.B, //b 69 "b01".U -> (s0_vaddr(0) === 0.U), //h 70 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 71 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 72 )) 73 74 io.out.valid := io.in.valid && io.dcacheReq.ready 75 76 io.out.bits := DontCare 77 io.out.bits.vaddr := s0_vaddr 78 io.out.bits.mask := s0_mask 79 io.out.bits.uop := s0_uop 80 io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned 81 82 io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready) 83 84 XSDebug(io.dcacheReq.fire(), 85 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 86 ) 87} 88 89 90// Load Pipeline Stage 1 91// TLB resp (send paddr to dcache) 92class LoadUnit_S1 extends XSModule { 93 val io = IO(new Bundle() { 94 val in = Flipped(Decoupled(new LsPipelineBundle)) 95 val out = Decoupled(new LsPipelineBundle) 96 val dtlbResp = Flipped(DecoupledIO(new TlbResp)) 97 val dcachePAddr = Output(UInt(PAddrBits.W)) 98 val dcacheKill = Output(Bool()) 99 val sbuffer = new LoadForwardQueryIO 100 val lsq = new LoadForwardQueryIO 101 }) 102 103 val s1_uop = io.in.bits.uop 104 val s1_paddr = io.dtlbResp.bits.paddr 105 val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR 106 val s1_tlb_miss = io.dtlbResp.bits.miss 107 val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio 108 val s1_mask = io.in.bits.mask 109 110 io.out.bits := io.in.bits // forwardXX field will be updated in s1 111 112 io.dtlbResp.ready := true.B 113 114 // TOOD: PMA check 115 io.dcachePAddr := s1_paddr 116 io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio 117 118 // load forward query datapath 119 io.sbuffer.valid := io.in.valid 120 io.sbuffer.paddr := s1_paddr 121 io.sbuffer.uop := s1_uop 122 io.sbuffer.sqIdx := s1_uop.sqIdx 123 io.sbuffer.mask := s1_mask 124 io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it 125 126 io.lsq.valid := io.in.valid 127 io.lsq.paddr := s1_paddr 128 io.lsq.uop := s1_uop 129 io.lsq.sqIdx := s1_uop.sqIdx 130 io.lsq.mask := s1_mask 131 io.lsq.pc := s1_uop.cf.pc // FIXME: remove it 132 133 io.out.valid := io.in.valid// && !s1_tlb_miss 134 io.out.bits.paddr := s1_paddr 135 io.out.bits.mmio := s1_mmio && !s1_exception 136 io.out.bits.tlbMiss := s1_tlb_miss 137 io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld 138 io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld 139 140 io.in.ready := !io.in.valid || io.out.ready 141 142} 143 144 145// Load Pipeline Stage 2 146// DCache resp 147class LoadUnit_S2 extends XSModule with HasLoadHelper { 148 val io = IO(new Bundle() { 149 val in = Flipped(Decoupled(new LsPipelineBundle)) 150 val out = Decoupled(new LsPipelineBundle) 151 val tlbFeedback = ValidIO(new TlbFeedback) 152 val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp)) 153 val lsq = new LoadForwardQueryIO 154 val sbuffer = new LoadForwardQueryIO 155 val dataForwarded = Output(Bool()) 156 }) 157 158 val s2_uop = io.in.bits.uop 159 val s2_mask = io.in.bits.mask 160 val s2_paddr = io.in.bits.paddr 161 val s2_tlb_miss = io.in.bits.tlbMiss 162 val s2_mmio = io.in.bits.mmio 163 val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR 164 val s2_cache_miss = io.dcacheResp.bits.miss 165 val s2_cache_replay = io.dcacheResp.bits.replay 166 167 io.dcacheResp.ready := true.B 168 val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio) 169 assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost") 170 171 // feedback tlb result to RS 172 io.tlbFeedback.valid := io.in.valid 173 io.tlbFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio) 174 io.tlbFeedback.bits.roqIdx := s2_uop.roqIdx 175 176 val forwardMask = io.out.bits.forwardMask 177 val forwardData = io.out.bits.forwardData 178 val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U 179 180 XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 181 s2_uop.cf.pc, 182 io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt, 183 io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt 184 ) 185 186 // data merge 187 val rdata = VecInit((0 until XLEN / 8).map(j => 188 Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))).asUInt 189 val rdataSel = LookupTree(s2_paddr(2, 0), List( 190 "b000".U -> rdata(63, 0), 191 "b001".U -> rdata(63, 8), 192 "b010".U -> rdata(63, 16), 193 "b011".U -> rdata(63, 24), 194 "b100".U -> rdata(63, 32), 195 "b101".U -> rdata(63, 40), 196 "b110".U -> rdata(63, 48), 197 "b111".U -> rdata(63, 56) 198 )) 199 val rdataPartialLoad = rdataHelper(s2_uop, rdataSel) 200 201 // TODO: ECC check 202 203 io.out.valid := io.in.valid && !s2_tlb_miss && (!s2_cache_replay || s2_mmio) 204 // Inst will be canceled in store queue / lsq, 205 // so we do not need to care about flush in load / store unit's out.valid 206 io.out.bits := io.in.bits 207 io.out.bits.data := rdataPartialLoad 208 // when exception occurs, set it to not miss and let it write back to roq (via int port) 209 io.out.bits.miss := s2_cache_miss && !s2_exception 210 io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception 211 io.out.bits.mmio := s2_mmio 212 213 // For timing reasons, we can not let 214 // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward 215 // We use io.dataForwarded instead. It means forward logic have prepared all data needed, 216 // and dcache query is no longer needed. 217 // Such inst will be writebacked from load queue. 218 io.dataForwarded := s2_cache_miss && fullForward && !s2_exception 219 220 io.in.ready := io.out.ready || !io.in.valid 221 222 // merge forward result 223 // lsq has higher priority than sbuffer 224 io.lsq := DontCare 225 io.sbuffer := DontCare 226 // generate XLEN/8 Muxs 227 for (i <- 0 until XLEN / 8) { 228 when (io.sbuffer.forwardMask(i)) { 229 io.out.bits.forwardMask(i) := true.B 230 io.out.bits.forwardData(i) := io.sbuffer.forwardData(i) 231 } 232 when (io.lsq.forwardMask(i)) { 233 io.out.bits.forwardMask(i) := true.B 234 io.out.bits.forwardData(i) := io.lsq.forwardData(i) 235 } 236 } 237 238 XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n", 239 s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data, 240 io.out.bits.forwardData.asUInt, io.out.bits.forwardMask.asUInt 241 ) 242} 243 244class LoadUnit extends XSModule with HasLoadHelper { 245 val io = IO(new Bundle() { 246 val ldin = Flipped(Decoupled(new ExuInput)) 247 val ldout = Decoupled(new ExuOutput) 248 val fpout = Decoupled(new ExuOutput) 249 val redirect = Flipped(ValidIO(new Redirect)) 250 val tlbFeedback = ValidIO(new TlbFeedback) 251 val dcache = new DCacheLoadIO 252 val dtlb = new TlbRequestIO() 253 val sbuffer = new LoadForwardQueryIO 254 val lsq = new LoadToLsqIO 255 }) 256 257 val load_s0 = Module(new LoadUnit_S0) 258 val load_s1 = Module(new LoadUnit_S1) 259 val load_s2 = Module(new LoadUnit_S2) 260 261 load_s0.io.in <> io.ldin 262 load_s0.io.dtlbReq <> io.dtlb.req 263 load_s0.io.dcacheReq <> io.dcache.req 264 265 PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect)) 266 267 load_s1.io.dtlbResp <> io.dtlb.resp 268 io.dcache.s1_paddr <> load_s1.io.dcachePAddr 269 io.dcache.s1_kill <> load_s1.io.dcacheKill 270 load_s1.io.sbuffer <> io.sbuffer 271 load_s1.io.lsq <> io.lsq.forward 272 273 PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect)) 274 275 load_s2.io.tlbFeedback <> io.tlbFeedback 276 load_s2.io.dcacheResp <> io.dcache.resp 277 load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData 278 load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask 279 load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData 280 load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask 281 load_s2.io.dataForwarded <> io.lsq.loadDataForwarded 282 283 XSDebug(load_s0.io.out.valid, 284 p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " + 285 p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n") 286 XSDebug(load_s1.io.out.valid, 287 p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " + 288 p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n") 289 290 // writeback to LSQ 291 // Current dcache use MSHR 292 // Load queue will be updated at s2 for both hit/miss int/fp load 293 io.lsq.loadIn.valid := load_s2.io.out.valid 294 io.lsq.loadIn.bits := load_s2.io.out.bits 295 296 // write to rob and writeback bus 297 val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss 298 val refillFpLoad = io.lsq.ldout.bits.uop.ctrl.fpWen 299 300 // Int load, if hit, will be writebacked at s2 301 val intHitLoadOut = Wire(Valid(new ExuOutput)) 302 intHitLoadOut.valid := s2_wb_valid && !load_s2.io.out.bits.uop.ctrl.fpWen 303 intHitLoadOut.bits.uop := load_s2.io.out.bits.uop 304 intHitLoadOut.bits.data := load_s2.io.out.bits.data 305 intHitLoadOut.bits.redirectValid := false.B 306 intHitLoadOut.bits.redirect := DontCare 307 intHitLoadOut.bits.brUpdate := DontCare 308 intHitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio 309 intHitLoadOut.bits.debug.isPerfCnt := false.B 310 intHitLoadOut.bits.fflags := DontCare 311 312 load_s2.io.out.ready := true.B 313 314 io.ldout.bits := Mux(intHitLoadOut.valid, intHitLoadOut.bits, io.lsq.ldout.bits) 315 io.ldout.valid := intHitLoadOut.valid || io.lsq.ldout.valid && !refillFpLoad 316 317 // Fp load, if hit, will be send to recoder at s2, then it will be recoded & writebacked at s3 318 val fpHitLoadOut = Wire(Valid(new ExuOutput)) 319 fpHitLoadOut.valid := s2_wb_valid && load_s2.io.out.bits.uop.ctrl.fpWen 320 fpHitLoadOut.bits := intHitLoadOut.bits 321 322 val fpLoadOut = Wire(Valid(new ExuOutput)) 323 fpLoadOut.bits := Mux(fpHitLoadOut.valid, fpHitLoadOut.bits, io.lsq.ldout.bits) 324 fpLoadOut.valid := fpHitLoadOut.valid || io.lsq.ldout.valid && refillFpLoad 325 326 val fpLoadOutReg = RegNext(fpLoadOut) 327 io.fpout.bits := fpLoadOutReg.bits 328 io.fpout.bits.data := fpRdataHelper(fpLoadOutReg.bits.uop, fpLoadOutReg.bits.data) // recode 329 io.fpout.valid := RegNext(fpLoadOut.valid) 330 331 io.lsq.ldout.ready := Mux(refillFpLoad, !fpHitLoadOut.valid, !intHitLoadOut.valid) 332 333 when(io.ldout.fire()){ 334 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 335 } 336 337 when(io.fpout.fire()){ 338 XSDebug("fpout %x\n", io.fpout.bits.uop.cf.pc) 339 } 340} 341