1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences 4* Copyright (c) 2020-2021 Peng Cheng Laboratory 5* 6* XiangShan is licensed under Mulan PSL v2. 7* You can use this software according to the terms and conditions of the Mulan PSL v2. 8* You may obtain a copy of Mulan PSL v2 at: 9* http://license.coscl.org.cn/MulanPSL2 10* 11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 14* 15* See the Mulan PSL v2 for more details. 16***************************************************************************************/ 17 18package xiangshan.mem 19 20 21import org.chipsalliance.cde.config.Parameters 22import chisel3._ 23import chisel3.util._ 24import utility._ 25import utils._ 26import xiangshan._ 27import xiangshan.backend.Bundles.{DynInst, MemExuInput} 28import xiangshan.backend.rob.RobPtr 29import xiangshan.cache._ 30import xiangshan.backend.fu.FenceToSbuffer 31import xiangshan.cache.wpu.ReplayCarry 32import xiangshan.mem.prefetch.PrefetchReqBundle 33import math._ 34 35object genWmask { 36 def apply(addr: UInt, sizeEncode: UInt): UInt = { 37 (LookupTree(sizeEncode, List( 38 "b00".U -> 0x1.U, //0001 << addr(2:0) 39 "b01".U -> 0x3.U, //0011 40 "b10".U -> 0xf.U, //1111 41 "b11".U -> 0xff.U //11111111 42 )) << addr(2, 0)).asUInt 43 } 44} 45 46object genVWmask { 47 def apply(addr: UInt, sizeEncode: UInt): UInt = { 48 (LookupTree(sizeEncode, List( 49 "b00".U -> 0x1.U, //0001 << addr(2:0) 50 "b01".U -> 0x3.U, //0011 51 "b10".U -> 0xf.U, //1111 52 "b11".U -> 0xff.U //11111111 53 )) << addr(3, 0)).asUInt 54 } 55} 56 57object genBasemask { 58 /** 59 * 60 * @param addr 61 * @param sizeEncode 62 * @return Return 16-byte aligned mask. 63 * 64 * Example: 65 * Address: 0x80000003 Encoding size: ‘b11 66 * Return: 0xff 67 */ 68 def apply(addr: UInt, sizeEncode: UInt): UInt = { 69 LookupTree(sizeEncode, List( 70 "b00".U -> 0x1.U, 71 "b01".U -> 0x3.U, 72 "b10".U -> 0xf.U, 73 "b11".U -> 0xff.U 74 )) 75 } 76} 77 78object shiftDataToLow { 79 def apply(addr: UInt, data : UInt): UInt = { 80 Mux(addr(3), (data >> 64).asUInt, data) 81 } 82} 83object shiftMaskToLow { 84 def apply(addr: UInt, mask: UInt): UInt = { 85 Mux(addr(3), (mask >> 8).asUInt, mask) 86 } 87} 88object shiftDataToHigh { 89 def apply(addr: UInt, data : UInt): UInt = { 90 Mux(addr(3), (data << 64).asUInt, data) 91 } 92} 93object shiftMaskToHigh { 94 def apply(addr: UInt, mask: UInt): UInt = { 95 Mux(addr(3), (mask << 8).asUInt, mask) 96 } 97} 98 99class LsPipelineBundle(implicit p: Parameters) extends XSBundle 100 with HasDCacheParameters 101 with HasVLSUParameters { 102 val uop = new DynInst 103 val vaddr = UInt(VAddrBits.W) 104 // For exception vaddr generate 105 val fullva = UInt(XLEN.W) 106 val vaNeedExt = Bool() 107 val isHyper = Bool() 108 val paddr = UInt(PAddrBits.W) 109 val gpaddr = UInt(XLEN.W) 110 val isForVSnonLeafPTE = Bool() 111 // val func = UInt(6.W) 112 val mask = UInt((VLEN/8).W) 113 val data = UInt((VLEN+1).W) 114 val wlineflag = Bool() // store write the whole cache line 115 116 val miss = Bool() 117 val tlbMiss = Bool() 118 val ptwBack = Bool() 119 val af = Bool() 120 val nc = Bool() 121 val mmio = Bool() 122 val atomic = Bool() 123 val hasException = Bool() 124 125 val forwardMask = Vec(VLEN/8, Bool()) 126 val forwardData = Vec(VLEN/8, UInt(8.W)) 127 128 // prefetch 129 val isPrefetch = Bool() 130 val isHWPrefetch = Bool() 131 def isSWPrefetch = isPrefetch && !isHWPrefetch 132 133 // misalignBuffer 134 val isFrmMisAlignBuf = Bool() 135 136 // vector 137 val isvec = Bool() 138 val isLastElem = Bool() 139 val is128bit = Bool() 140 val uop_unit_stride_fof = Bool() 141 val usSecondInv = Bool() 142 val elemIdx = UInt(elemIdxBits.W) 143 val alignedType = UInt(alignTypeBits.W) 144 val mbIndex = UInt(max(vlmBindexBits, vsmBindexBits).W) 145 // val rob_idx_valid = Vec(2,Bool()) 146 // val inner_idx = Vec(2,UInt(3.W)) 147 // val rob_idx = Vec(2,new RobPtr) 148 val reg_offset = UInt(vOffsetBits.W) 149 val elemIdxInsideVd = UInt(elemIdxBits.W) 150 // val offset = Vec(2,UInt(4.W)) 151 val vecActive = Bool() // 1: vector active element or scala mem operation, 0: vector not active element 152 val is_first_ele = Bool() 153 val vecBaseVaddr = UInt(VAddrBits.W) 154 val vecVaddrOffset = UInt(VAddrBits.W) 155 val vecTriggerMask = UInt((VLEN/8).W) 156 // val flowPtr = new VlflowPtr() // VLFlowQueue ptr 157 // val sflowPtr = new VsFlowPtr() // VSFlowQueue ptr 158 159 // For debug usage 160 val isFirstIssue = Bool() 161 val hasROBEntry = Bool() 162 163 // For load replay 164 val isLoadReplay = Bool() 165 val isFastPath = Bool() 166 val isFastReplay = Bool() 167 val replayCarry = new ReplayCarry(nWays) 168 169 // For dcache miss load 170 val mshrid = UInt(log2Up(cfg.nMissEntries).W) 171 val handledByMSHR = Bool() 172 val replacementUpdated = Bool() 173 val missDbUpdated = Bool() 174 175 val forward_tlDchannel = Bool() 176 val dcacheRequireReplay = Bool() 177 val delayedLoadError = Bool() 178 val lateKill = Bool() 179 val feedbacked = Bool() 180 val ldCancel = ValidUndirectioned(UInt(log2Ceil(LoadPipelineWidth).W)) 181 // loadQueueReplay index. 182 val schedIndex = UInt(log2Up(LoadQueueReplaySize).W) 183 // hardware prefetch and fast replay no need to query tlb 184 val tlbNoQuery = Bool() 185 186 // misalign 187 val isMisalign = Bool() 188 val isFinalSplit = Bool() 189 val misalignWith16Byte = Bool() 190 val misalignNeedWakeUp = Bool() 191 val updateAddrValid = Bool() 192} 193 194class LdPrefetchTrainBundle(implicit p: Parameters) extends LsPipelineBundle { 195 val meta_prefetch = UInt(L1PfSourceBits.W) 196 val meta_access = Bool() 197 198 def fromLsPipelineBundle(input: LsPipelineBundle, latch: Boolean = false, enable: Bool = true.B) = { 199 if (latch) vaddr := RegEnable(input.vaddr, enable) else vaddr := input.vaddr 200 if (latch) fullva := RegEnable(input.fullva, enable) else fullva := input.fullva 201 if (latch) vaNeedExt := RegEnable(input.vaNeedExt, enable) else vaNeedExt := input.vaNeedExt 202 if (latch) isHyper := RegEnable(input.isHyper, enable) else isHyper := input.isHyper 203 if (latch) paddr := RegEnable(input.paddr, enable) else paddr := input.paddr 204 if (latch) gpaddr := RegEnable(input.gpaddr, enable) else gpaddr := input.gpaddr 205 if (latch) isForVSnonLeafPTE := RegEnable(input.isForVSnonLeafPTE, enable) else isForVSnonLeafPTE := input.isForVSnonLeafPTE 206 if (latch) mask := RegEnable(input.mask, enable) else mask := input.mask 207 if (latch) data := RegEnable(input.data, enable) else data := input.data 208 if (latch) uop := RegEnable(input.uop, enable) else uop := input.uop 209 if (latch) wlineflag := RegEnable(input.wlineflag, enable) else wlineflag := input.wlineflag 210 if (latch) miss := RegEnable(input.miss, enable) else miss := input.miss 211 if (latch) tlbMiss := RegEnable(input.tlbMiss, enable) else tlbMiss := input.tlbMiss 212 if (latch) ptwBack := RegEnable(input.ptwBack, enable) else ptwBack := input.ptwBack 213 if (latch) af := RegEnable(input.af, enable) else af := input.af 214 if (latch) nc := RegEnable(input.nc, enable) else nc := input.nc 215 if (latch) mmio := RegEnable(input.mmio, enable) else mmio := input.mmio 216 if (latch) forwardMask := RegEnable(input.forwardMask, enable) else forwardMask := input.forwardMask 217 if (latch) forwardData := RegEnable(input.forwardData, enable) else forwardData := input.forwardData 218 if (latch) isPrefetch := RegEnable(input.isPrefetch, enable) else isPrefetch := input.isPrefetch 219 if (latch) isHWPrefetch := RegEnable(input.isHWPrefetch, enable) else isHWPrefetch := input.isHWPrefetch 220 if (latch) isFrmMisAlignBuf := RegEnable(input.isFrmMisAlignBuf, enable) else isFrmMisAlignBuf := input.isFrmMisAlignBuf 221 if (latch) isFirstIssue := RegEnable(input.isFirstIssue, enable) else isFirstIssue := input.isFirstIssue 222 if (latch) hasROBEntry := RegEnable(input.hasROBEntry, enable) else hasROBEntry := input.hasROBEntry 223 if (latch) dcacheRequireReplay := RegEnable(input.dcacheRequireReplay, enable) else dcacheRequireReplay := input.dcacheRequireReplay 224 if (latch) schedIndex := RegEnable(input.schedIndex, enable) else schedIndex := input.schedIndex 225 if (latch) tlbNoQuery := RegEnable(input.tlbNoQuery, enable) else tlbNoQuery := input.tlbNoQuery 226 if (latch) isvec := RegEnable(input.isvec, enable) else isvec := input.isvec 227 if (latch) isLastElem := RegEnable(input.isLastElem, enable) else isLastElem := input.isLastElem 228 if (latch) is128bit := RegEnable(input.is128bit, enable) else is128bit := input.is128bit 229 if (latch) vecActive := RegEnable(input.vecActive, enable) else vecActive := input.vecActive 230 if (latch) is_first_ele := RegEnable(input.is_first_ele, enable) else is_first_ele := input.is_first_ele 231 if (latch) uop_unit_stride_fof := RegEnable(input.uop_unit_stride_fof, enable) else uop_unit_stride_fof := input.uop_unit_stride_fof 232 if (latch) usSecondInv := RegEnable(input.usSecondInv, enable) else usSecondInv := input.usSecondInv 233 if (latch) reg_offset := RegEnable(input.reg_offset, enable) else reg_offset := input.reg_offset 234 if (latch) elemIdx := RegEnable(input.elemIdx, enable) else elemIdx := input.elemIdx 235 if (latch) alignedType := RegEnable(input.alignedType, enable) else alignedType := input.alignedType 236 if (latch) mbIndex := RegEnable(input.mbIndex, enable) else mbIndex := input.mbIndex 237 if (latch) elemIdxInsideVd := RegEnable(input.elemIdxInsideVd, enable) else elemIdxInsideVd := input.elemIdxInsideVd 238 if (latch) vecBaseVaddr := RegEnable(input.vecBaseVaddr, enable) else vecBaseVaddr := input.vecBaseVaddr 239 if (latch) vecVaddrOffset := RegEnable(input.vecVaddrOffset, enable) else vecVaddrOffset := input.vecVaddrOffset 240 if (latch) vecTriggerMask := RegEnable(input.vecTriggerMask, enable) else vecTriggerMask := input.vecTriggerMask 241 // if (latch) flowPtr := RegEnable(input.flowPtr, enable) else flowPtr := input.flowPtr 242 // if (latch) sflowPtr := RegEnable(input.sflowPtr, enable) else sflowPtr := input.sflowPtr 243 244 meta_prefetch := DontCare 245 meta_access := DontCare 246 forward_tlDchannel := DontCare 247 mshrid := DontCare 248 replayCarry := DontCare 249 atomic := DontCare 250 isLoadReplay := DontCare 251 isFastPath := DontCare 252 isFastReplay := DontCare 253 handledByMSHR := DontCare 254 replacementUpdated := DontCare 255 missDbUpdated := DontCare 256 delayedLoadError := DontCare 257 lateKill := DontCare 258 feedbacked := DontCare 259 ldCancel := DontCare 260 } 261 262 def asPrefetchReqBundle(): PrefetchReqBundle = { 263 val res = Wire(new PrefetchReqBundle) 264 res.vaddr := this.vaddr 265 res.paddr := this.paddr 266 res.pc := this.uop.pc 267 res.miss := this.miss 268 res.pfHitStream := isFromStream(this.meta_prefetch) 269 270 res 271 } 272} 273 274class StPrefetchTrainBundle(implicit p: Parameters) extends LdPrefetchTrainBundle {} 275 276class LqWriteBundle(implicit p: Parameters) extends LsPipelineBundle { 277 // load inst replay informations 278 val rep_info = new LoadToLsqReplayIO 279 // queue entry data, except flag bits, will be updated if writeQueue is true, 280 // valid bit in LqWriteBundle will be ignored 281 val data_wen_dup = Vec(6, Bool()) // dirty reg dup 282 283 284 def fromLsPipelineBundle(input: LsPipelineBundle, latch: Boolean = false, enable: Bool = true.B) = { 285 if(latch) vaddr := RegEnable(input.vaddr, enable) else vaddr := input.vaddr 286 if(latch) fullva := RegEnable(input.fullva, enable) else fullva := input.fullva 287 if(latch) vaNeedExt := RegEnable(input.vaNeedExt, enable) else vaNeedExt := input.vaNeedExt 288 if(latch) isHyper := RegEnable(input.isHyper, enable) else isHyper := input.isHyper 289 if(latch) paddr := RegEnable(input.paddr, enable) else paddr := input.paddr 290 if(latch) gpaddr := RegEnable(input.gpaddr, enable) else gpaddr := input.gpaddr 291 if(latch) isForVSnonLeafPTE := RegEnable(input.isForVSnonLeafPTE, enable) else isForVSnonLeafPTE := input.isForVSnonLeafPTE 292 if(latch) mask := RegEnable(input.mask, enable) else mask := input.mask 293 if(latch) data := RegEnable(input.data, enable) else data := input.data 294 if(latch) uop := RegEnable(input.uop, enable) else uop := input.uop 295 if(latch) wlineflag := RegEnable(input.wlineflag, enable) else wlineflag := input.wlineflag 296 if(latch) miss := RegEnable(input.miss, enable) else miss := input.miss 297 if(latch) tlbMiss := RegEnable(input.tlbMiss, enable) else tlbMiss := input.tlbMiss 298 if(latch) ptwBack := RegEnable(input.ptwBack, enable) else ptwBack := input.ptwBack 299 if(latch) mmio := RegEnable(input.mmio, enable) else mmio := input.mmio 300 if(latch) atomic := RegEnable(input.atomic, enable) else atomic := input.atomic 301 if(latch) forwardMask := RegEnable(input.forwardMask, enable) else forwardMask := input.forwardMask 302 if(latch) forwardData := RegEnable(input.forwardData, enable) else forwardData := input.forwardData 303 if(latch) isPrefetch := RegEnable(input.isPrefetch, enable) else isPrefetch := input.isPrefetch 304 if(latch) isHWPrefetch := RegEnable(input.isHWPrefetch, enable) else isHWPrefetch := input.isHWPrefetch 305 if(latch) isFrmMisAlignBuf := RegEnable(input.isFrmMisAlignBuf, enable) else isFrmMisAlignBuf := input.isFrmMisAlignBuf 306 if(latch) isFirstIssue := RegEnable(input.isFirstIssue, enable) else isFirstIssue := input.isFirstIssue 307 if(latch) hasROBEntry := RegEnable(input.hasROBEntry, enable) else hasROBEntry := input.hasROBEntry 308 if(latch) isLoadReplay := RegEnable(input.isLoadReplay, enable) else isLoadReplay := input.isLoadReplay 309 if(latch) isFastPath := RegEnable(input.isFastPath, enable) else isFastPath := input.isFastPath 310 if(latch) isFastReplay := RegEnable(input.isFastReplay, enable) else isFastReplay := input.isFastReplay 311 if(latch) mshrid := RegEnable(input.mshrid, enable) else mshrid := input.mshrid 312 if(latch) forward_tlDchannel := RegEnable(input.forward_tlDchannel, enable) else forward_tlDchannel := input.forward_tlDchannel 313 if(latch) replayCarry := RegEnable(input.replayCarry, enable) else replayCarry := input.replayCarry 314 if(latch) dcacheRequireReplay := RegEnable(input.dcacheRequireReplay, enable) else dcacheRequireReplay := input.dcacheRequireReplay 315 if(latch) schedIndex := RegEnable(input.schedIndex, enable) else schedIndex := input.schedIndex 316 if(latch) handledByMSHR := RegEnable(input.handledByMSHR, enable) else handledByMSHR := input.handledByMSHR 317 if(latch) replacementUpdated := RegEnable(input.replacementUpdated, enable) else replacementUpdated := input.replacementUpdated 318 if(latch) missDbUpdated := RegEnable(input.missDbUpdated, enable) else missDbUpdated := input.missDbUpdated 319 if(latch) delayedLoadError := RegEnable(input.delayedLoadError, enable) else delayedLoadError := input.delayedLoadError 320 if(latch) lateKill := RegEnable(input.lateKill, enable) else lateKill := input.lateKill 321 if(latch) feedbacked := RegEnable(input.feedbacked, enable) else feedbacked := input.feedbacked 322 if(latch) isvec := RegEnable(input.isvec, enable) else isvec := input.isvec 323 if(latch) is128bit := RegEnable(input.is128bit, enable) else is128bit := input.is128bit 324 if(latch) vecActive := RegEnable(input.vecActive, enable) else vecActive := input.vecActive 325 if(latch) uop_unit_stride_fof := RegEnable(input.uop_unit_stride_fof, enable) else uop_unit_stride_fof := input.uop_unit_stride_fof 326 if(latch) reg_offset := RegEnable(input.reg_offset, enable) else reg_offset := input.reg_offset 327 if(latch) mbIndex := RegEnable(input.mbIndex, enable) else mbIndex := input.mbIndex 328 if(latch) elemIdxInsideVd := RegEnable(input.elemIdxInsideVd, enable) else elemIdxInsideVd := input.elemIdxInsideVd 329 330 rep_info := DontCare 331 data_wen_dup := DontCare 332 } 333} 334 335class SqWriteBundle(implicit p: Parameters) extends LsPipelineBundle { 336 val need_rep = Bool() 337} 338 339class LoadForwardQueryIO(implicit p: Parameters) extends XSBundle { 340 val vaddr = Output(UInt(VAddrBits.W)) 341 val paddr = Output(UInt(PAddrBits.W)) 342 val mask = Output(UInt((VLEN/8).W)) 343 val uop = Output(new DynInst) // for replay 344 val pc = Output(UInt(VAddrBits.W)) //for debug 345 val valid = Output(Bool()) 346 347 val forwardMaskFast = Input(Vec((VLEN/8), Bool())) // resp to load_s1 348 val forwardMask = Input(Vec((VLEN/8), Bool())) // resp to load_s2 349 val forwardData = Input(Vec((VLEN/8), UInt(8.W))) // resp to load_s2 350 351 // val lqIdx = Output(UInt(LoadQueueIdxWidth.W)) 352 val sqIdx = Output(new SqPtr) 353 354 // dataInvalid suggests store to load forward found forward should happen, 355 // but data is not available for now. If dataInvalid, load inst should 356 // be replayed from RS. Feedback type should be RSFeedbackType.dataInvalid 357 val dataInvalid = Input(Bool()) // Addr match, but data is not valid for now 358 359 // matchInvalid suggests in store to load forward logic, paddr cam result does 360 // to equal to vaddr cam result. If matchInvalid, a microarchitectural exception 361 // should be raised to flush SQ and committed sbuffer. 362 val matchInvalid = Input(Bool()) // resp to load_s2 363 364 // addrInvalid suggests store to load forward found forward should happen, 365 // but address (SSID) is not available for now. If addrInvalid, load inst should 366 // be replayed from RS. Feedback type should be RSFeedbackType.addrInvalid 367 val addrInvalid = Input(Bool()) 368} 369 370// LoadForwardQueryIO used in load pipeline 371// 372// Difference between PipeLoadForwardQueryIO and LoadForwardQueryIO: 373// PipeIO use predecoded sqIdxMask for better forward timing 374class PipeLoadForwardQueryIO(implicit p: Parameters) extends LoadForwardQueryIO { 375 // val sqIdx = Output(new SqPtr) // for debug, should not be used in pipeline for timing reasons 376 // sqIdxMask is calcuated in earlier stage for better timing 377 val sqIdxMask = Output(UInt(StoreQueueSize.W)) 378 379 // dataInvalid: addr match, but data is not valid for now 380 val dataInvalidFast = Input(Bool()) // resp to load_s1 381 // val dataInvalid = Input(Bool()) // resp to load_s2 382 val dataInvalidSqIdx = Input(new SqPtr) // resp to load_s2, sqIdx 383 val addrInvalidSqIdx = Input(new SqPtr) // resp to load_s2, sqIdx 384} 385 386// Query load queue for ld-ld violation 387// 388// Req should be send in load_s1 389// Resp will be generated 1 cycle later 390// 391// Note that query req may be !ready, as dcache is releasing a block 392// If it happens, a replay from rs is needed. 393class LoadNukeQueryReq(implicit p: Parameters) extends XSBundle { // provide lqIdx 394 val uop = new DynInst 395 // mask: load's data mask. 396 val mask = UInt((VLEN/8).W) 397 398 // paddr: load's paddr. 399 val paddr = UInt(PAddrBits.W) 400 // dataInvalid: load data is invalid. 401 val data_valid = Bool() 402 // nc: is NC access 403 val is_nc = Bool() 404} 405 406class LoadNukeQueryResp(implicit p: Parameters) extends XSBundle { 407 // rep_frm_fetch: ld-ld violation check success, replay from fetch. 408 val rep_frm_fetch = Bool() 409} 410 411class LoadNukeQueryIO(implicit p: Parameters) extends XSBundle { 412 val req = Decoupled(new LoadNukeQueryReq) 413 val resp = Flipped(Valid(new LoadNukeQueryResp)) 414 val revoke = Output(Bool()) 415} 416 417class StoreNukeQueryIO(implicit p: Parameters) extends XSBundle { 418 // robIdx: Requestor's (a store instruction) rob index for match logic. 419 val robIdx = new RobPtr 420 421 // paddr: requestor's (a store instruction) physical address for match logic. 422 val paddr = UInt(PAddrBits.W) 423 424 // mask: requestor's (a store instruction) data width mask for match logic. 425 val mask = UInt((VLEN/8).W) 426 427 // matchLine: if store is vector 128-bits, load unit need to compare 128-bits vaddr. 428 val matchLine = Bool() 429} 430 431class StoreMaBufToSqControlIO(implicit p: Parameters) extends XSBundle { 432 // from storeMisalignBuffer to storeQueue, control it's sbuffer write 433 val toStoreQueue = Output(new XSBundle { 434 // This entry is a cross page 435 val crossPageWithHit = Bool() 436 val crossPageCanDeq = Bool() 437 // High page Paddr 438 val paddr = UInt(PAddrBits.W) 439 440 val withSameUop = Bool() 441 }) 442 // from storeQueue to storeMisalignBuffer, provide detail info of this store 443 val toStoreMisalignBuffer = Input(new XSBundle { 444 val sqPtr = new SqPtr 445 val doDeq = Bool() 446 447 val uop = new DynInst() 448 }) 449} 450 451class StoreMaBufToVecStoreMergeBufferIO(implicit p: Parameters) extends VLSUBundle{ 452 val mbIndex = Output(UInt(vsmBindexBits.W)) 453 val flush = Output(Bool()) 454} 455 456// Store byte valid mask write bundle 457// 458// Store byte valid mask write to SQ takes 2 cycles 459class StoreMaskBundle(implicit p: Parameters) extends XSBundle { 460 val sqIdx = new SqPtr 461 val mask = UInt((VLEN/8).W) 462} 463 464class LoadDataFromDcacheBundle(implicit p: Parameters) extends DCacheBundle { 465 // old dcache: optimize data sram read fanout 466 // val bankedDcacheData = Vec(DCacheBanks, UInt(64.W)) 467 // val bank_oh = UInt(DCacheBanks.W) 468 469 // new dcache 470 val respDcacheData = UInt(VLEN.W) 471 val forwardMask = Vec(VLEN/8, Bool()) 472 val forwardData = Vec(VLEN/8, UInt(8.W)) 473 val uop = new DynInst // for data selection, only fwen and fuOpType are used 474 val addrOffset = UInt(4.W) // for data selection 475 476 // forward tilelink D channel 477 val forward_D = Bool() 478 val forwardData_D = Vec(VLEN/8, UInt(8.W)) 479 480 // forward mshr data 481 val forward_mshr = Bool() 482 val forwardData_mshr = Vec(VLEN/8, UInt(8.W)) 483 484 val forward_result_valid = Bool() 485 486 def mergeTLData(): UInt = { 487 // merge TL D or MSHR data at load s2 488 val dcache_data = respDcacheData 489 val use_D = forward_D && forward_result_valid 490 val use_mshr = forward_mshr && forward_result_valid 491 Mux( 492 use_D || use_mshr, 493 Mux( 494 use_D, 495 forwardData_D.asUInt, 496 forwardData_mshr.asUInt 497 ), 498 dcache_data 499 ) 500 } 501 502 def mergeLsqFwdData(dcacheData: UInt): UInt = { 503 // merge dcache and lsq forward data at load s3 504 val rdataVec = VecInit((0 until VLEN / 8).map(j => 505 Mux(forwardMask(j), forwardData(j), dcacheData(8*(j+1)-1, 8*j)) 506 )) 507 rdataVec.asUInt 508 } 509} 510 511// Load writeback data from load queue (refill) 512class LoadDataFromLQBundle(implicit p: Parameters) extends XSBundle { 513 val lqData = UInt(64.W) // load queue has merged data 514 val uop = new DynInst // for data selection, only fwen and fuOpType are used 515 val addrOffset = UInt(3.W) // for data selection 516 517 def mergedData(): UInt = { 518 lqData 519 } 520} 521 522// Bundle for load / store wait waking up 523class MemWaitUpdateReq(implicit p: Parameters) extends XSBundle { 524 val robIdx = Vec(backendParams.StaExuCnt, ValidIO(new RobPtr)) 525 val sqIdx = Vec(backendParams.StdCnt, ValidIO(new SqPtr)) 526} 527 528object AddPipelineReg { 529 class PipelineRegModule[T <: Data](gen: T) extends Module { 530 val io = IO(new Bundle() { 531 val in = Flipped(DecoupledIO(gen.cloneType)) 532 val out = DecoupledIO(gen.cloneType) 533 val isFlush = Input(Bool()) 534 }) 535 536 val valid = RegInit(false.B) 537 valid.suggestName("pipeline_reg_valid") 538 when (io.out.fire) { valid := false.B } 539 when (io.in.fire) { valid := true.B } 540 when (io.isFlush) { valid := false.B } 541 542 io.in.ready := !valid || io.out.ready 543 io.out.bits := RegEnable(io.in.bits, io.in.fire) 544 io.out.valid := valid //&& !isFlush 545 } 546 547 def apply[T <: Data] 548 (left: DecoupledIO[T], right: DecoupledIO[T], isFlush: Bool, 549 moduleName: Option[String] = None 550 ): Unit = { 551 val pipelineReg = Module(new PipelineRegModule[T](left.bits.cloneType)) 552 if(moduleName.nonEmpty) pipelineReg.suggestName(moduleName.get) 553 pipelineReg.io.in <> left 554 right <> pipelineReg.io.out 555 pipelineReg.io.isFlush := isFlush 556 } 557}