1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import xiangshan._ 24import xiangshan.backend.decode.ImmUnion 25import xiangshan.backend.fu.PMPRespBundle 26import xiangshan.cache._ 27import xiangshan.cache.mmu.{TLB, TlbCmd, TlbPtwIO, TlbReq, TlbRequestIO, TlbResp} 28 29class LoadToLsqIO(implicit p: Parameters) extends XSBundle { 30 val loadIn = ValidIO(new LsPipelineBundle) 31 val ldout = Flipped(DecoupledIO(new ExuOutput)) 32 val loadDataForwarded = Output(Bool()) 33 val needReplayFromRS = Output(Bool()) 34 val forward = new PipeLoadForwardQueryIO 35 val loadViolationQuery = new LoadViolationQueryIO 36 val trigger = Flipped(new LqTriggerIO) 37} 38 39class LoadToLoadIO(implicit p: Parameters) extends XSBundle { 40 // load to load fast path is limited to ld (64 bit) used as vaddr src1 only 41 val data = UInt(XLEN.W) 42 val valid = Bool() 43} 44 45class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle { 46 val tdata2 = Input(UInt(64.W)) 47 val matchType = Input(UInt(2.W)) 48 val tEnable = Input(Bool()) // timing is calculated before this 49 val addrHit = Output(Bool()) 50 val lastDataHit = Output(Bool()) 51} 52 53// Load Pipeline Stage 0 54// Generate addr, use addr to query DCache and DTLB 55class LoadUnit_S0(implicit p: Parameters) extends XSModule with HasDCacheParameters{ 56 val io = IO(new Bundle() { 57 val in = Flipped(Decoupled(new ExuInput)) 58 val out = Decoupled(new LsPipelineBundle) 59 val fastpath = Input(Vec(LoadPipelineWidth, new LoadToLoadIO)) 60 val dtlbReq = DecoupledIO(new TlbReq) 61 val dcacheReq = DecoupledIO(new DCacheWordReq) 62 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 63 val isFirstIssue = Input(Bool()) 64 val loadFastMatch = Input(UInt(exuParameters.LduCnt.W)) 65 }) 66 require(LoadPipelineWidth == exuParameters.LduCnt) 67 68 val s0_uop = io.in.bits.uop 69 val imm12 = WireInit(s0_uop.ctrl.imm(11,0)) 70 71 val s0_vaddr = WireInit(io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits)) 72 val s0_mask = WireInit(genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0))) 73 74 if (EnableLoadToLoadForward) { 75 // slow vaddr from non-load insts 76 val slowpath_vaddr = io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 77 val slowpath_mask = genWmask(slowpath_vaddr, s0_uop.ctrl.fuOpType(1,0)) 78 79 // fast vaddr from load insts 80 val fastpath_vaddrs = WireInit(VecInit(List.tabulate(LoadPipelineWidth)(i => { 81 io.fastpath(i).data + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 82 }))) 83 val fastpath_masks = WireInit(VecInit(List.tabulate(LoadPipelineWidth)(i => { 84 genWmask(fastpath_vaddrs(i), s0_uop.ctrl.fuOpType(1,0)) 85 }))) 86 val fastpath_vaddr = Mux1H(io.loadFastMatch, fastpath_vaddrs) 87 val fastpath_mask = Mux1H(io.loadFastMatch, fastpath_masks) 88 89 // select vaddr from 2 alus 90 s0_vaddr := Mux(io.loadFastMatch.orR, fastpath_vaddr, slowpath_vaddr) 91 s0_mask := Mux(io.loadFastMatch.orR, fastpath_mask, slowpath_mask) 92 XSPerfAccumulate("load_to_load_forward", io.loadFastMatch.orR && io.in.fire()) 93 } 94 95 val isSoftPrefetch = LSUOpType.isPrefetch(s0_uop.ctrl.fuOpType) 96 val isSoftPrefetchRead = s0_uop.ctrl.fuOpType === LSUOpType.prefetch_r 97 val isSoftPrefetchWrite = s0_uop.ctrl.fuOpType === LSUOpType.prefetch_w 98 99 // query DTLB 100 io.dtlbReq.valid := io.in.valid 101 io.dtlbReq.bits.vaddr := s0_vaddr 102 io.dtlbReq.bits.cmd := TlbCmd.read 103 io.dtlbReq.bits.size := LSUOpType.size(io.in.bits.uop.ctrl.fuOpType) 104 io.dtlbReq.bits.robIdx := s0_uop.robIdx 105 io.dtlbReq.bits.debug.pc := s0_uop.cf.pc 106 io.dtlbReq.bits.debug.isFirstIssue := io.isFirstIssue 107 108 // query DCache 109 io.dcacheReq.valid := io.in.valid 110 when (isSoftPrefetchRead) { 111 io.dcacheReq.bits.cmd := MemoryOpConstants.M_PFR 112 }.elsewhen (isSoftPrefetchWrite) { 113 io.dcacheReq.bits.cmd := MemoryOpConstants.M_PFW 114 }.otherwise { 115 io.dcacheReq.bits.cmd := MemoryOpConstants.M_XRD 116 } 117 io.dcacheReq.bits.addr := s0_vaddr 118 io.dcacheReq.bits.mask := s0_mask 119 io.dcacheReq.bits.data := DontCare 120 when(isSoftPrefetch) { 121 io.dcacheReq.bits.instrtype := SOFT_PREFETCH.U 122 }.otherwise { 123 io.dcacheReq.bits.instrtype := LOAD_SOURCE.U 124 } 125 126 // TODO: update cache meta 127 io.dcacheReq.bits.id := DontCare 128 129 val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List( 130 "b00".U -> true.B, //b 131 "b01".U -> (s0_vaddr(0) === 0.U), //h 132 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 133 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 134 )) 135 136 io.out.valid := io.in.valid && io.dcacheReq.ready 137 138 io.out.bits := DontCare 139 io.out.bits.vaddr := s0_vaddr 140 io.out.bits.mask := s0_mask 141 io.out.bits.uop := s0_uop 142 io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned 143 io.out.bits.rsIdx := io.rsIdx 144 io.out.bits.isFirstIssue := io.isFirstIssue 145 io.out.bits.isSoftPrefetch := isSoftPrefetch 146 147 io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready) 148 149 XSDebug(io.dcacheReq.fire(), 150 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 151 ) 152 XSPerfAccumulate("in_valid", io.in.valid) 153 XSPerfAccumulate("in_fire", io.in.fire) 154 XSPerfAccumulate("in_fire_first_issue", io.in.valid && io.isFirstIssue) 155 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready) 156 XSPerfAccumulate("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready) 157 XSPerfAccumulate("addr_spec_success", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12)) 158 XSPerfAccumulate("addr_spec_failed", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12)) 159 XSPerfAccumulate("addr_spec_success_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue) 160 XSPerfAccumulate("addr_spec_failed_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue) 161} 162 163 164// Load Pipeline Stage 1 165// TLB resp (send paddr to dcache) 166class LoadUnit_S1(implicit p: Parameters) extends XSModule { 167 val io = IO(new Bundle() { 168 val in = Flipped(Decoupled(new LsPipelineBundle)) 169 val out = Decoupled(new LsPipelineBundle) 170 val dtlbResp = Flipped(DecoupledIO(new TlbResp)) 171 val dcachePAddr = Output(UInt(PAddrBits.W)) 172 val dcacheKill = Output(Bool()) 173 val fastUopKill = Output(Bool()) 174 val dcacheBankConflict = Input(Bool()) 175 val fullForwardFast = Output(Bool()) 176 val sbuffer = new LoadForwardQueryIO 177 val lsq = new PipeLoadForwardQueryIO 178 val loadViolationQueryReq = Decoupled(new LoadViolationQueryReq) 179 val rsFeedback = ValidIO(new RSFeedback) 180 val csrCtrl = Flipped(new CustomCSRCtrlIO) 181 val needLdVioCheckRedo = Output(Bool()) 182 }) 183 184 val s1_uop = io.in.bits.uop 185 val s1_paddr = io.dtlbResp.bits.paddr 186 val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR // af & pf exception were modified below. 187 val s1_tlb_miss = io.dtlbResp.bits.miss 188 val s1_mask = io.in.bits.mask 189 val s1_bank_conflict = io.dcacheBankConflict 190 191 io.out.bits := io.in.bits // forwardXX field will be updated in s1 192 193 io.dtlbResp.ready := true.B 194 195 // TOOD: PMA check 196 io.dcachePAddr := s1_paddr 197 //io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio 198 io.dcacheKill := s1_tlb_miss || s1_exception 199 io.fastUopKill := io.dtlbResp.bits.fast_miss || s1_exception 200 201 // load forward query datapath 202 io.sbuffer.valid := io.in.valid && !(s1_exception || s1_tlb_miss) 203 io.sbuffer.vaddr := io.in.bits.vaddr 204 io.sbuffer.paddr := s1_paddr 205 io.sbuffer.uop := s1_uop 206 io.sbuffer.sqIdx := s1_uop.sqIdx 207 io.sbuffer.mask := s1_mask 208 io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it 209 210 io.lsq.valid := io.in.valid && !(s1_exception || s1_tlb_miss) 211 io.lsq.vaddr := io.in.bits.vaddr 212 io.lsq.paddr := s1_paddr 213 io.lsq.uop := s1_uop 214 io.lsq.sqIdx := s1_uop.sqIdx 215 io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0 216 io.lsq.mask := s1_mask 217 io.lsq.pc := s1_uop.cf.pc // FIXME: remove it 218 219 // ld-ld violation query 220 io.loadViolationQueryReq.valid := io.in.valid && !(s1_exception || s1_tlb_miss) 221 io.loadViolationQueryReq.bits.paddr := s1_paddr 222 io.loadViolationQueryReq.bits.uop := s1_uop 223 224 // Generate forwardMaskFast to wake up insts earlier 225 val forwardMaskFast = io.lsq.forwardMaskFast.asUInt | io.sbuffer.forwardMaskFast.asUInt 226 io.fullForwardFast := (~forwardMaskFast & s1_mask) === 0.U 227 228 // Generate feedback signal caused by: 229 // * dcache bank conflict 230 // * need redo ld-ld violation check 231 val needLdVioCheckRedo = io.loadViolationQueryReq.valid && 232 !io.loadViolationQueryReq.ready && 233 RegNext(io.csrCtrl.ldld_vio_check) 234 io.needLdVioCheckRedo := needLdVioCheckRedo 235 io.rsFeedback.valid := io.in.valid && (s1_bank_conflict || needLdVioCheckRedo) 236 io.rsFeedback.bits.hit := false.B // we have found s1_bank_conflict / re do ld-ld violation check 237 io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx 238 io.rsFeedback.bits.flushState := io.in.bits.ptwBack 239 io.rsFeedback.bits.sourceType := Mux(s1_bank_conflict, RSFeedbackType.bankConflict, RSFeedbackType.ldVioCheckRedo) 240 io.rsFeedback.bits.dataInvalidSqIdx := DontCare 241 242 // if replay is detected in load_s1, 243 // load inst will be canceled immediately 244 io.out.valid := io.in.valid && !io.rsFeedback.valid 245 io.out.bits.paddr := s1_paddr 246 io.out.bits.tlbMiss := s1_tlb_miss 247 248 // current ori test will cause the case of ldest == 0, below will be modifeid in the future. 249 // af & pf exception were modified 250 io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld 251 io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld 252 253 io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack 254 io.out.bits.rsIdx := io.in.bits.rsIdx 255 256 io.out.bits.isSoftPrefetch := io.in.bits.isSoftPrefetch 257 258 io.in.ready := !io.in.valid || io.out.ready 259 260 XSPerfAccumulate("in_valid", io.in.valid) 261 XSPerfAccumulate("in_fire", io.in.fire) 262 XSPerfAccumulate("in_fire_first_issue", io.in.fire && io.in.bits.isFirstIssue) 263 XSPerfAccumulate("tlb_miss", io.in.fire && s1_tlb_miss) 264 XSPerfAccumulate("tlb_miss_first_issue", io.in.fire && s1_tlb_miss && io.in.bits.isFirstIssue) 265 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready) 266} 267 268// Load Pipeline Stage 2 269// DCache resp 270class LoadUnit_S2(implicit p: Parameters) extends XSModule with HasLoadHelper { 271 val io = IO(new Bundle() { 272 val in = Flipped(Decoupled(new LsPipelineBundle)) 273 val out = Decoupled(new LsPipelineBundle) 274 val rsFeedback = ValidIO(new RSFeedback) 275 val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp)) 276 val pmpResp = Flipped(new PMPRespBundle()) 277 val lsq = new LoadForwardQueryIO 278 val dataInvalidSqIdx = Input(UInt()) 279 val sbuffer = new LoadForwardQueryIO 280 val dataForwarded = Output(Bool()) 281 val needReplayFromRS = Output(Bool()) 282 val fullForward = Output(Bool()) 283 val fastpath = Output(new LoadToLoadIO) 284 val dcache_kill = Output(Bool()) 285 val loadViolationQueryResp = Flipped(Valid(new LoadViolationQueryResp)) 286 val csrCtrl = Flipped(new CustomCSRCtrlIO) 287 val sentFastUop = Input(Bool()) 288 }) 289 val isSoftPrefetch = io.in.bits.isSoftPrefetch 290 val excep = WireInit(io.in.bits.uop.cf.exceptionVec) 291 excep(loadAccessFault) := io.in.bits.uop.cf.exceptionVec(loadAccessFault) || io.pmpResp.ld 292 when (isSoftPrefetch) { 293 excep := 0.U.asTypeOf(excep.cloneType) 294 } 295 val s2_exception = selectLoad(excep, false).asUInt.orR 296 297 val actually_mmio = io.pmpResp.mmio 298 val s2_uop = io.in.bits.uop 299 val s2_mask = io.in.bits.mask 300 val s2_paddr = io.in.bits.paddr 301 val s2_tlb_miss = io.in.bits.tlbMiss 302 val s2_data_invalid = io.lsq.dataInvalid 303 val s2_mmio = !isSoftPrefetch && actually_mmio && !s2_exception 304 val s2_cache_miss = io.dcacheResp.bits.miss 305 val s2_cache_replay = io.dcacheResp.bits.replay 306 val s2_is_prefetch = io.in.bits.isSoftPrefetch 307 308 // val cnt = RegInit(127.U) 309 // cnt := cnt + io.in.valid.asUInt 310 // val s2_forward_fail = io.lsq.matchInvalid || io.sbuffer.matchInvalid || cnt === 0.U 311 312 val s2_forward_fail = io.lsq.matchInvalid || io.sbuffer.matchInvalid 313 // assert(!s2_forward_fail) 314 io.dcache_kill := false.B // move pmp resp kill to outside 315 io.dcacheResp.ready := true.B 316 val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio || s2_is_prefetch) 317 assert(!(io.in.valid && (dcacheShouldResp && !io.dcacheResp.valid)), "DCache response got lost") 318 319 // merge forward result 320 // lsq has higher priority than sbuffer 321 val forwardMask = Wire(Vec(8, Bool())) 322 val forwardData = Wire(Vec(8, UInt(8.W))) 323 324 val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U && !io.lsq.dataInvalid 325 io.lsq := DontCare 326 io.sbuffer := DontCare 327 io.fullForward := fullForward 328 329 // generate XLEN/8 Muxs 330 for (i <- 0 until XLEN / 8) { 331 forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i) 332 forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i)) 333 } 334 335 XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 336 s2_uop.cf.pc, 337 io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt, 338 io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt 339 ) 340 341 // data merge 342 val rdataVec = VecInit((0 until XLEN / 8).map(j => 343 Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))) 344 val rdata = rdataVec.asUInt 345 val rdataSel = LookupTree(s2_paddr(2, 0), List( 346 "b000".U -> rdata(63, 0), 347 "b001".U -> rdata(63, 8), 348 "b010".U -> rdata(63, 16), 349 "b011".U -> rdata(63, 24), 350 "b100".U -> rdata(63, 32), 351 "b101".U -> rdata(63, 40), 352 "b110".U -> rdata(63, 48), 353 "b111".U -> rdata(63, 56) 354 )) 355 val rdataPartialLoad = rdataHelper(s2_uop, rdataSel) 356 357 io.out.valid := io.in.valid && !s2_tlb_miss && !s2_data_invalid 358 // Inst will be canceled in store queue / lsq, 359 // so we do not need to care about flush in load / store unit's out.valid 360 io.out.bits := io.in.bits 361 io.out.bits.data := rdataPartialLoad 362 // when exception occurs, set it to not miss and let it write back to rob (via int port) 363 if (EnableFastForward) { 364 io.out.bits.miss := s2_cache_miss && 365 !s2_exception && 366 !s2_forward_fail && 367 !fullForward && 368 !s2_is_prefetch 369 } else { 370 io.out.bits.miss := s2_cache_miss && 371 !s2_exception && 372 !s2_forward_fail && 373 !s2_is_prefetch 374 } 375 io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception 376 // if forward fail, replay this inst from fetch 377 val forwardFailReplay = s2_forward_fail && !s2_mmio 378 // if ld-ld violation is detected, replay from this inst from fetch 379 val ldldVioReplay = io.loadViolationQueryResp.valid && 380 io.loadViolationQueryResp.bits.have_violation && 381 RegNext(io.csrCtrl.ldld_vio_check) 382 io.out.bits.uop.ctrl.replayInst := forwardFailReplay || ldldVioReplay 383 io.out.bits.mmio := s2_mmio 384 io.out.bits.uop.ctrl.flushPipe := io.in.bits.uop.ctrl.flushPipe || (s2_mmio && io.sentFastUop) 385 io.out.bits.uop.cf.exceptionVec := excep 386 387 // For timing reasons, sometimes we can not let 388 // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward 389 // We use io.dataForwarded instead. It means forward logic have prepared all data needed, 390 // and dcache query is no longer needed. 391 // Such inst will be writebacked from load queue. 392 io.dataForwarded := s2_cache_miss && fullForward && !s2_exception && !s2_forward_fail 393 // io.out.bits.forwardX will be send to lq 394 io.out.bits.forwardMask := forwardMask 395 // data retbrived from dcache is also included in io.out.bits.forwardData 396 io.out.bits.forwardData := rdataVec 397 398 io.in.ready := io.out.ready || !io.in.valid 399 400 // feedback tlb result to RS 401 io.rsFeedback.valid := io.in.valid 402 when (io.in.bits.isSoftPrefetch) { 403 io.rsFeedback.bits.hit := (!s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception)) 404 }.otherwise { 405 if (EnableFastForward) { 406 io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception || fullForward) && !s2_data_invalid 407 } else { 408 io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception) && !s2_data_invalid 409 } 410 } 411 io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx 412 io.rsFeedback.bits.flushState := io.in.bits.ptwBack 413 io.rsFeedback.bits.sourceType := Mux(s2_tlb_miss, RSFeedbackType.tlbMiss, 414 Mux(s2_cache_replay, 415 RSFeedbackType.mshrFull, 416 RSFeedbackType.dataInvalid 417 ) 418 ) 419 io.rsFeedback.bits.dataInvalidSqIdx.value := io.dataInvalidSqIdx 420 io.rsFeedback.bits.dataInvalidSqIdx.flag := DontCare 421 422 // s2_cache_replay is quite slow to generate, send it separately to LQ 423 if (EnableFastForward) { 424 io.needReplayFromRS := s2_cache_replay && !fullForward 425 } else { 426 io.needReplayFromRS := s2_cache_replay 427 } 428 429 // fast load to load forward 430 io.fastpath.valid := io.in.valid // for debug only 431 io.fastpath.data := rdata // raw data 432 433 434 XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n", 435 s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data, 436 forwardData.asUInt, forwardMask.asUInt 437 ) 438 439 XSPerfAccumulate("in_valid", io.in.valid) 440 XSPerfAccumulate("in_fire", io.in.fire) 441 XSPerfAccumulate("in_fire_first_issue", io.in.fire && io.in.bits.isFirstIssue) 442 XSPerfAccumulate("dcache_miss", io.in.fire && s2_cache_miss) 443 XSPerfAccumulate("dcache_miss_first_issue", io.in.fire && s2_cache_miss && io.in.bits.isFirstIssue) 444 XSPerfAccumulate("full_forward", io.in.valid && fullForward) 445 XSPerfAccumulate("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward) 446 XSPerfAccumulate("replay", io.rsFeedback.valid && !io.rsFeedback.bits.hit) 447 XSPerfAccumulate("replay_tlb_miss", io.rsFeedback.valid && !io.rsFeedback.bits.hit && s2_tlb_miss) 448 XSPerfAccumulate("replay_cache", io.rsFeedback.valid && !io.rsFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay) 449 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready) 450 XSPerfAccumulate("replay_from_fetch_forward", io.out.valid && forwardFailReplay) 451 XSPerfAccumulate("replay_from_fetch_load_vio", io.out.valid && ldldVioReplay) 452} 453 454class LoadUnit(implicit p: Parameters) extends XSModule with HasLoadHelper { 455 val io = IO(new Bundle() { 456 val ldin = Flipped(Decoupled(new ExuInput)) 457 val ldout = Decoupled(new ExuOutput) 458 val redirect = Flipped(ValidIO(new Redirect)) 459 val feedbackSlow = ValidIO(new RSFeedback) 460 val feedbackFast = ValidIO(new RSFeedback) 461 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 462 val isFirstIssue = Input(Bool()) 463 val dcache = new DCacheLoadIO 464 val sbuffer = new LoadForwardQueryIO 465 val lsq = new LoadToLsqIO 466 val fastUop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1 467 val trigger = Vec(3, new LoadUnitTriggerIO) 468 469 val tlb = new TlbRequestIO 470 val pmp = Flipped(new PMPRespBundle()) // arrive same to tlb now 471 472 val fastpathOut = Output(new LoadToLoadIO) 473 val fastpathIn = Input(Vec(LoadPipelineWidth, new LoadToLoadIO)) 474 val loadFastMatch = Input(UInt(exuParameters.LduCnt.W)) 475 476 val csrCtrl = Flipped(new CustomCSRCtrlIO) 477 }) 478 479 val load_s0 = Module(new LoadUnit_S0) 480 val load_s1 = Module(new LoadUnit_S1) 481 val load_s2 = Module(new LoadUnit_S2) 482 483 load_s0.io.in <> io.ldin 484 load_s0.io.dtlbReq <> io.tlb.req 485 load_s0.io.dcacheReq <> io.dcache.req 486 load_s0.io.rsIdx := io.rsIdx 487 load_s0.io.isFirstIssue := io.isFirstIssue 488 load_s0.io.fastpath := io.fastpathIn 489 load_s0.io.loadFastMatch := io.loadFastMatch 490 491 PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.robIdx.needFlush(io.redirect)) 492 493 load_s1.io.dtlbResp <> io.tlb.resp 494 io.dcache.s1_paddr <> load_s1.io.dcachePAddr 495 io.dcache.s1_kill <> load_s1.io.dcacheKill 496 load_s1.io.sbuffer <> io.sbuffer 497 load_s1.io.lsq <> io.lsq.forward 498 load_s1.io.loadViolationQueryReq <> io.lsq.loadViolationQuery.req 499 load_s1.io.dcacheBankConflict <> io.dcache.s1_bank_conflict 500 load_s1.io.csrCtrl <> io.csrCtrl 501 502 PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.robIdx.needFlush(io.redirect)) 503 504 io.dcache.s2_kill := load_s2.io.dcache_kill || (io.pmp.ld || io.pmp.mmio) // to kill mmio resp which are redirected 505 load_s2.io.dcacheResp <> io.dcache.resp 506 load_s2.io.pmpResp <> io.pmp 507 load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData 508 load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask 509 load_s2.io.lsq.forwardMaskFast <> io.lsq.forward.forwardMaskFast // should not be used in load_s2 510 load_s2.io.lsq.dataInvalid <> io.lsq.forward.dataInvalid 511 load_s2.io.lsq.matchInvalid <> io.lsq.forward.matchInvalid 512 load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData 513 load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask 514 load_s2.io.sbuffer.forwardMaskFast <> io.sbuffer.forwardMaskFast // should not be used in load_s2 515 load_s2.io.sbuffer.dataInvalid <> io.sbuffer.dataInvalid // always false 516 load_s2.io.sbuffer.matchInvalid <> io.sbuffer.matchInvalid 517 load_s2.io.dataForwarded <> io.lsq.loadDataForwarded 518 load_s2.io.fastpath <> io.fastpathOut 519 load_s2.io.dataInvalidSqIdx := io.lsq.forward.dataInvalidSqIdx // provide dataInvalidSqIdx to make wakeup faster 520 load_s2.io.loadViolationQueryResp <> io.lsq.loadViolationQuery.resp 521 load_s2.io.csrCtrl <> io.csrCtrl 522 load_s2.io.sentFastUop := RegEnable(io.fastUop.valid, load_s1.io.out.fire()) // RegNext is also ok 523 io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS 524 525 // feedback tlb miss / dcache miss queue full 526 io.feedbackSlow.bits := RegNext(load_s2.io.rsFeedback.bits) 527 io.feedbackSlow.valid := RegNext(load_s2.io.rsFeedback.valid && !load_s2.io.out.bits.uop.robIdx.needFlush(io.redirect)) 528 529 // feedback bank conflict to rs 530 io.feedbackFast.bits := load_s1.io.rsFeedback.bits 531 io.feedbackFast.valid := load_s1.io.rsFeedback.valid 532 // If replay is reported at load_s1, inst will be canceled (will not enter load_s2), 533 // in that case: 534 // * replay should not be reported twice 535 assert(!(RegNext(RegNext(io.feedbackFast.valid)) && io.feedbackSlow.valid)) 536 // * io.fastUop.valid should not be reported 537 assert(!RegNext(io.feedbackFast.valid && io.fastUop.valid)) 538 539 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 540 val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize)) 541 io.lsq.forward.sqIdxMask := sqIdxMaskReg 542 543 // // use s2_hit_way to select data received in s1 544 // load_s2.io.dcacheResp.bits.data := Mux1H(RegNext(io.dcache.s1_hit_way), RegNext(io.dcache.s1_data)) 545 // assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data) 546 547 io.fastUop.valid := io.dcache.s1_hit_way.orR && // dcache hit 548 !io.dcache.s1_disable_fast_wakeup && // load fast wakeup should be disabled when dcache data read is not ready 549 load_s1.io.in.valid && // valid laod request 550 !load_s1.io.fastUopKill && // not mmio or tlb miss 551 !io.lsq.forward.dataInvalidFast && // forward failed 552 !load_s1.io.needLdVioCheckRedo // load-load violation check: load paddr cam struct hazard 553 io.fastUop.bits := load_s1.io.out.bits.uop 554 555 XSDebug(load_s0.io.out.valid, 556 p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " + 557 p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n") 558 XSDebug(load_s1.io.out.valid, 559 p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " + 560 p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n") 561 562 // writeback to LSQ 563 // Current dcache use MSHR 564 // Load queue will be updated at s2 for both hit/miss int/fp load 565 io.lsq.loadIn.valid := load_s2.io.out.valid 566 io.lsq.loadIn.bits := load_s2.io.out.bits 567 568 // write to rob and writeback bus 569 val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss && !load_s2.io.out.bits.mmio 570 571 // Int load, if hit, will be writebacked at s2 572 val hitLoadOut = Wire(Valid(new ExuOutput)) 573 hitLoadOut.valid := s2_wb_valid 574 hitLoadOut.bits.uop := load_s2.io.out.bits.uop 575 hitLoadOut.bits.data := load_s2.io.out.bits.data 576 hitLoadOut.bits.redirectValid := false.B 577 hitLoadOut.bits.redirect := DontCare 578 hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio 579 hitLoadOut.bits.debug.isPerfCnt := false.B 580 hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr 581 hitLoadOut.bits.debug.vaddr := load_s2.io.out.bits.vaddr 582 hitLoadOut.bits.fflags := DontCare 583 584 load_s2.io.out.ready := true.B 585 586 io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits) 587 io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid 588 589 io.lsq.ldout.ready := !hitLoadOut.valid 590 591 val lastValidData = RegEnable(io.ldout.bits.data, io.ldout.fire()) 592 val hitLoadAddrTriggerHitVec = Wire(Vec(3, Bool())) 593 val lqLoadAddrTriggerHitVec = io.lsq.trigger.lqLoadAddrTriggerHitVec 594 (0 until 3).map{i => { 595 val tdata2 = io.trigger(i).tdata2 596 val matchType = io.trigger(i).matchType 597 val tEnable = io.trigger(i).tEnable 598 hitLoadAddrTriggerHitVec(i) := TriggerCmp(io.ldout.bits.debug.vaddr, tdata2, matchType, tEnable) 599 io.trigger(i).addrHit := Mux(hitLoadOut.valid, hitLoadAddrTriggerHitVec(i), lqLoadAddrTriggerHitVec(i)) 600 io.trigger(i).lastDataHit := TriggerCmp(lastValidData, tdata2, matchType, tEnable) 601 }} 602 io.lsq.trigger.hitLoadAddrTriggerHitVec := hitLoadAddrTriggerHitVec 603 604 val perfinfo = IO(new Bundle(){ 605 val perfEvents = Output(new PerfEventsBundle(12)) 606 }) 607 608 val perfEvents = Seq( 609 ("load_s0_in_fire ", load_s0.io.in.fire() ), 610 ("load_to_load_forward ", load_s0.io.loadFastMatch.orR && load_s0.io.in.fire() ), 611 ("stall_dcache ", load_s0.io.out.valid && load_s0.io.out.ready && !load_s0.io.dcacheReq.ready ), 612 ("addr_spec_success ", load_s0.io.out.fire() && load_s0.io.dtlbReq.bits.vaddr(VAddrBits-1, 12) === load_s0.io.in.bits.src(0)(VAddrBits-1, 12) ), 613 ("addr_spec_failed ", load_s0.io.out.fire() && load_s0.io.dtlbReq.bits.vaddr(VAddrBits-1, 12) =/= load_s0.io.in.bits.src(0)(VAddrBits-1, 12) ), 614 ("load_s1_in_fire ", load_s1.io.in.fire ), 615 ("load_s1_tlb_miss ", load_s1.io.in.fire && load_s1.io.dtlbResp.bits.miss ), 616 ("load_s2_in_fire ", load_s2.io.in.fire ), 617 ("load_s2_dcache_miss ", load_s2.io.in.fire && load_s2.io.dcacheResp.bits.miss ), 618 ("load_s2_replay ", load_s2.io.rsFeedback.valid && !load_s2.io.rsFeedback.bits.hit ), 619 ("load_s2_replay_tlb_miss ", load_s2.io.rsFeedback.valid && !load_s2.io.rsFeedback.bits.hit && load_s2.io.in.bits.tlbMiss ), 620 ("load_s2_replay_cache ", load_s2.io.rsFeedback.valid && !load_s2.io.rsFeedback.bits.hit && !load_s2.io.in.bits.tlbMiss && load_s2.io.dcacheResp.bits.miss), 621 ) 622 623 for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) { 624 perf_out.incr_step := RegNext(perf) 625 } 626 627 when(io.ldout.fire()){ 628 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 629 } 630} 631