1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences 4* Copyright (c) 2020-2021 Peng Cheng Laboratory 5* 6* XiangShan is licensed under Mulan PSL v2. 7* You can use this software according to the terms and conditions of the Mulan PSL v2. 8* You may obtain a copy of Mulan PSL v2 at: 9* http://license.coscl.org.cn/MulanPSL2 10* 11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 14* 15* See the Mulan PSL v2 for more details. 16***************************************************************************************/ 17 18package xiangshan.mem 19 20 21import org.chipsalliance.cde.config.Parameters 22import chisel3._ 23import chisel3.util._ 24import utility._ 25import utils._ 26import xiangshan._ 27import xiangshan.backend.Bundles.{DynInst, MemExuInput} 28import xiangshan.backend.rob.RobPtr 29import xiangshan.cache._ 30import xiangshan.backend.fu.FenceToSbuffer 31import xiangshan.cache.wpu.ReplayCarry 32import xiangshan.mem.prefetch.PrefetchReqBundle 33import math._ 34 35object genWmask { 36 def apply(addr: UInt, sizeEncode: UInt): UInt = { 37 (LookupTree(sizeEncode, List( 38 "b00".U -> 0x1.U, //0001 << addr(2:0) 39 "b01".U -> 0x3.U, //0011 40 "b10".U -> 0xf.U, //1111 41 "b11".U -> 0xff.U //11111111 42 )) << addr(2, 0)).asUInt 43 } 44} 45 46object genVWmask { 47 def apply(addr: UInt, sizeEncode: UInt): UInt = { 48 (LookupTree(sizeEncode, List( 49 "b00".U -> 0x1.U, //0001 << addr(2:0) 50 "b01".U -> 0x3.U, //0011 51 "b10".U -> 0xf.U, //1111 52 "b11".U -> 0xff.U //11111111 53 )) << addr(3, 0)).asUInt 54 } 55} 56 57object genWdata { 58 def apply(data: UInt, sizeEncode: UInt): UInt = { 59 LookupTree(sizeEncode, List( 60 "b00".U -> Fill(16, data(7, 0)), 61 "b01".U -> Fill(8, data(15, 0)), 62 "b10".U -> Fill(4, data(31, 0)), 63 "b11".U -> Fill(2, data(63,0)) 64 )) 65 } 66} 67 68object shiftDataToLow { 69 def apply(addr: UInt,data : UInt): UInt = { 70 Mux(addr(3), (data >> 64).asUInt,data) 71 } 72} 73object shiftMaskToLow { 74 def apply(addr: UInt,mask: UInt): UInt = { 75 Mux(addr(3),(mask >> 8).asUInt,mask) 76 } 77} 78 79class LsPipelineBundle(implicit p: Parameters) extends XSBundle 80 with HasDCacheParameters 81 with HasVLSUParameters { 82 val uop = new DynInst 83 val vaddr = UInt(VAddrBits.W) 84 val paddr = UInt(PAddrBits.W) 85 val gpaddr = UInt(GPAddrBits.W) 86 // val func = UInt(6.W) 87 val mask = UInt((VLEN/8).W) 88 val data = UInt((VLEN+1).W) 89 val wlineflag = Bool() // store write the whole cache line 90 91 val miss = Bool() 92 val tlbMiss = Bool() 93 val ptwBack = Bool() 94 val af = Bool() 95 val mmio = Bool() 96 val atomic = Bool() 97 98 val forwardMask = Vec(VLEN/8, Bool()) 99 val forwardData = Vec(VLEN/8, UInt(8.W)) 100 101 // prefetch 102 val isPrefetch = Bool() 103 val isHWPrefetch = Bool() 104 def isSWPrefetch = isPrefetch && !isHWPrefetch 105 106 // vector 107 val isvec = Bool() 108 val isLastElem = Bool() 109 val is128bit = Bool() 110 val uop_unit_stride_fof = Bool() 111 val usSecondInv = Bool() 112 val elemIdx = UInt(elemIdxBits.W) 113 val alignedType = UInt(alignTypeBits.W) 114 val mbIndex = UInt(max(vlmBindexBits, vsmBindexBits).W) 115 // val rob_idx_valid = Vec(2,Bool()) 116 // val inner_idx = Vec(2,UInt(3.W)) 117 // val rob_idx = Vec(2,new RobPtr) 118 val reg_offset = UInt(vOffsetBits.W) 119 val elemIdxInsideVd = UInt(elemIdxBits.W) 120 // val offset = Vec(2,UInt(4.W)) 121 val vecActive = Bool() // 1: vector active element or scala mem operation, 0: vector not active element 122 val is_first_ele = Bool() 123 // val flowPtr = new VlflowPtr() // VLFlowQueue ptr 124 // val sflowPtr = new VsFlowPtr() // VSFlowQueue ptr 125 126 // For debug usage 127 val isFirstIssue = Bool() 128 val hasROBEntry = Bool() 129 130 // For load replay 131 val isLoadReplay = Bool() 132 val isFastPath = Bool() 133 val isFastReplay = Bool() 134 val replayCarry = new ReplayCarry(nWays) 135 136 // For dcache miss load 137 val mshrid = UInt(log2Up(cfg.nMissEntries).W) 138 val handledByMSHR = Bool() 139 val replacementUpdated = Bool() 140 val missDbUpdated = Bool() 141 142 val forward_tlDchannel = Bool() 143 val dcacheRequireReplay = Bool() 144 val delayedLoadError = Bool() 145 val lateKill = Bool() 146 val feedbacked = Bool() 147 val ldCancel = ValidUndirectioned(UInt(log2Ceil(LoadPipelineWidth).W)) 148 // loadQueueReplay index. 149 val schedIndex = UInt(log2Up(LoadQueueReplaySize).W) 150} 151 152class LdPrefetchTrainBundle(implicit p: Parameters) extends LsPipelineBundle { 153 val meta_prefetch = UInt(L1PfSourceBits.W) 154 val meta_access = Bool() 155 156 def fromLsPipelineBundle(input: LsPipelineBundle, latch: Boolean = false, enable: Bool = true.B) = { 157 if (latch) vaddr := RegEnable(input.vaddr, enable) else vaddr := input.vaddr 158 if (latch) paddr := RegEnable(input.paddr, enable) else paddr := input.paddr 159 if (latch) gpaddr := RegEnable(input.gpaddr, enable) else gpaddr := input.gpaddr 160 if (latch) mask := RegEnable(input.mask, enable) else mask := input.mask 161 if (latch) data := RegEnable(input.data, enable) else data := input.data 162 if (latch) uop := RegEnable(input.uop, enable) else uop := input.uop 163 if (latch) wlineflag := RegEnable(input.wlineflag, enable) else wlineflag := input.wlineflag 164 if (latch) miss := RegEnable(input.miss, enable) else miss := input.miss 165 if (latch) tlbMiss := RegEnable(input.tlbMiss, enable) else tlbMiss := input.tlbMiss 166 if (latch) ptwBack := RegEnable(input.ptwBack, enable) else ptwBack := input.ptwBack 167 if (latch) af := RegEnable(input.af, enable) else af := input.af 168 if (latch) mmio := RegEnable(input.mmio, enable) else mmio := input.mmio 169 if (latch) forwardMask := RegEnable(input.forwardMask, enable) else forwardMask := input.forwardMask 170 if (latch) forwardData := RegEnable(input.forwardData, enable) else forwardData := input.forwardData 171 if (latch) isPrefetch := RegEnable(input.isPrefetch, enable) else isPrefetch := input.isPrefetch 172 if (latch) isHWPrefetch := RegEnable(input.isHWPrefetch, enable) else isHWPrefetch := input.isHWPrefetch 173 if (latch) isFirstIssue := RegEnable(input.isFirstIssue, enable) else isFirstIssue := input.isFirstIssue 174 if (latch) hasROBEntry := RegEnable(input.hasROBEntry, enable) else hasROBEntry := input.hasROBEntry 175 if (latch) dcacheRequireReplay := RegEnable(input.dcacheRequireReplay, enable) else dcacheRequireReplay := input.dcacheRequireReplay 176 if (latch) schedIndex := RegEnable(input.schedIndex, enable) else schedIndex := input.schedIndex 177 if (latch) isvec := RegEnable(input.isvec, enable) else isvec := input.isvec 178 if (latch) isLastElem := RegEnable(input.isLastElem, enable) else isLastElem := input.isLastElem 179 if (latch) is128bit := RegEnable(input.is128bit, enable) else is128bit := input.is128bit 180 if (latch) vecActive := RegEnable(input.vecActive, enable) else vecActive := input.vecActive 181 if (latch) is_first_ele := RegEnable(input.is_first_ele, enable) else is_first_ele := input.is_first_ele 182 if (latch) uop_unit_stride_fof := RegEnable(input.uop_unit_stride_fof, enable) else uop_unit_stride_fof := input.uop_unit_stride_fof 183 if (latch) usSecondInv := RegEnable(input.usSecondInv, enable) else usSecondInv := input.usSecondInv 184 if (latch) reg_offset := RegEnable(input.reg_offset, enable) else reg_offset := input.reg_offset 185 if (latch) elemIdx := RegEnable(input.elemIdx, enable) else elemIdx := input.elemIdx 186 if (latch) alignedType := RegEnable(input.alignedType, enable) else alignedType := input.alignedType 187 if (latch) mbIndex := RegEnable(input.mbIndex, enable) else mbIndex := input.mbIndex 188 if (latch) elemIdxInsideVd := RegEnable(input.elemIdxInsideVd, enable) else elemIdxInsideVd := input.elemIdxInsideVd 189 // if (latch) flowPtr := RegEnable(input.flowPtr, enable) else flowPtr := input.flowPtr 190 // if (latch) sflowPtr := RegEnable(input.sflowPtr, enable) else sflowPtr := input.sflowPtr 191 192 meta_prefetch := DontCare 193 meta_access := DontCare 194 forward_tlDchannel := DontCare 195 mshrid := DontCare 196 replayCarry := DontCare 197 atomic := DontCare 198 isLoadReplay := DontCare 199 isFastPath := DontCare 200 isFastReplay := DontCare 201 handledByMSHR := DontCare 202 replacementUpdated := DontCare 203 missDbUpdated := DontCare 204 delayedLoadError := DontCare 205 lateKill := DontCare 206 feedbacked := DontCare 207 ldCancel := DontCare 208 } 209 210 def asPrefetchReqBundle(): PrefetchReqBundle = { 211 val res = Wire(new PrefetchReqBundle) 212 res.vaddr := this.vaddr 213 res.paddr := this.paddr 214 res.pc := this.uop.pc 215 res.miss := this.miss 216 217 res 218 } 219} 220 221class StPrefetchTrainBundle(implicit p: Parameters) extends LdPrefetchTrainBundle {} 222 223class LqWriteBundle(implicit p: Parameters) extends LsPipelineBundle { 224 // load inst replay informations 225 val rep_info = new LoadToLsqReplayIO 226 // queue entry data, except flag bits, will be updated if writeQueue is true, 227 // valid bit in LqWriteBundle will be ignored 228 val data_wen_dup = Vec(6, Bool()) // dirty reg dup 229 230 231 def fromLsPipelineBundle(input: LsPipelineBundle, latch: Boolean = false, enable: Bool = true.B) = { 232 if(latch) vaddr := RegEnable(input.vaddr, enable) else vaddr := input.vaddr 233 if(latch) paddr := RegEnable(input.paddr, enable) else paddr := input.paddr 234 if(latch) gpaddr := RegEnable(input.gpaddr, enable) else gpaddr := input.gpaddr 235 if(latch) mask := RegEnable(input.mask, enable) else mask := input.mask 236 if(latch) data := RegEnable(input.data, enable) else data := input.data 237 if(latch) uop := RegEnable(input.uop, enable) else uop := input.uop 238 if(latch) wlineflag := RegEnable(input.wlineflag, enable) else wlineflag := input.wlineflag 239 if(latch) miss := RegEnable(input.miss, enable) else miss := input.miss 240 if(latch) tlbMiss := RegEnable(input.tlbMiss, enable) else tlbMiss := input.tlbMiss 241 if(latch) ptwBack := RegEnable(input.ptwBack, enable) else ptwBack := input.ptwBack 242 if(latch) mmio := RegEnable(input.mmio, enable) else mmio := input.mmio 243 if(latch) atomic := RegEnable(input.atomic, enable) else atomic := input.atomic 244 if(latch) forwardMask := RegEnable(input.forwardMask, enable) else forwardMask := input.forwardMask 245 if(latch) forwardData := RegEnable(input.forwardData, enable) else forwardData := input.forwardData 246 if(latch) isPrefetch := RegEnable(input.isPrefetch, enable) else isPrefetch := input.isPrefetch 247 if(latch) isHWPrefetch := RegEnable(input.isHWPrefetch, enable) else isHWPrefetch := input.isHWPrefetch 248 if(latch) isFirstIssue := RegEnable(input.isFirstIssue, enable) else isFirstIssue := input.isFirstIssue 249 if(latch) hasROBEntry := RegEnable(input.hasROBEntry, enable) else hasROBEntry := input.hasROBEntry 250 if(latch) isLoadReplay := RegEnable(input.isLoadReplay, enable) else isLoadReplay := input.isLoadReplay 251 if(latch) isFastPath := RegEnable(input.isFastPath, enable) else isFastPath := input.isFastPath 252 if(latch) isFastReplay := RegEnable(input.isFastReplay, enable) else isFastReplay := input.isFastReplay 253 if(latch) mshrid := RegEnable(input.mshrid, enable) else mshrid := input.mshrid 254 if(latch) forward_tlDchannel := RegEnable(input.forward_tlDchannel, enable) else forward_tlDchannel := input.forward_tlDchannel 255 if(latch) replayCarry := RegEnable(input.replayCarry, enable) else replayCarry := input.replayCarry 256 if(latch) dcacheRequireReplay := RegEnable(input.dcacheRequireReplay, enable) else dcacheRequireReplay := input.dcacheRequireReplay 257 if(latch) schedIndex := RegEnable(input.schedIndex, enable) else schedIndex := input.schedIndex 258 if(latch) handledByMSHR := RegEnable(input.handledByMSHR, enable) else handledByMSHR := input.handledByMSHR 259 if(latch) replacementUpdated := RegEnable(input.replacementUpdated, enable) else replacementUpdated := input.replacementUpdated 260 if(latch) missDbUpdated := RegEnable(input.missDbUpdated, enable) else missDbUpdated := input.missDbUpdated 261 if(latch) delayedLoadError := RegEnable(input.delayedLoadError, enable) else delayedLoadError := input.delayedLoadError 262 if(latch) lateKill := RegEnable(input.lateKill, enable) else lateKill := input.lateKill 263 if(latch) feedbacked := RegEnable(input.feedbacked, enable) else feedbacked := input.feedbacked 264 if(latch) isvec := RegEnable(input.isvec, enable) else isvec := input.isvec 265 if(latch) is128bit := RegEnable(input.is128bit, enable) else is128bit := input.is128bit 266 if(latch) vecActive := RegEnable(input.vecActive, enable) else vecActive := input.vecActive 267 if(latch) uop_unit_stride_fof := RegEnable(input.uop_unit_stride_fof, enable) else uop_unit_stride_fof := input.uop_unit_stride_fof 268 if(latch) reg_offset := RegEnable(input.reg_offset, enable) else reg_offset := input.reg_offset 269 if(latch) mbIndex := RegEnable(input.mbIndex, enable) else mbIndex := input.mbIndex 270 if(latch) elemIdxInsideVd := RegEnable(input.elemIdxInsideVd, enable) else elemIdxInsideVd := input.elemIdxInsideVd 271 272 rep_info := DontCare 273 data_wen_dup := DontCare 274 } 275} 276 277class LoadForwardQueryIO(implicit p: Parameters) extends XSBundle { 278 val vaddr = Output(UInt(VAddrBits.W)) 279 val paddr = Output(UInt(PAddrBits.W)) 280 val mask = Output(UInt((VLEN/8).W)) 281 val uop = Output(new DynInst) // for replay 282 val pc = Output(UInt(VAddrBits.W)) //for debug 283 val valid = Output(Bool()) 284 285 val forwardMaskFast = Input(Vec((VLEN/8), Bool())) // resp to load_s1 286 val forwardMask = Input(Vec((VLEN/8), Bool())) // resp to load_s2 287 val forwardData = Input(Vec((VLEN/8), UInt(8.W))) // resp to load_s2 288 289 // val lqIdx = Output(UInt(LoadQueueIdxWidth.W)) 290 val sqIdx = Output(new SqPtr) 291 292 // dataInvalid suggests store to load forward found forward should happen, 293 // but data is not available for now. If dataInvalid, load inst should 294 // be replayed from RS. Feedback type should be RSFeedbackType.dataInvalid 295 val dataInvalid = Input(Bool()) // Addr match, but data is not valid for now 296 297 // matchInvalid suggests in store to load forward logic, paddr cam result does 298 // to equal to vaddr cam result. If matchInvalid, a microarchitectural exception 299 // should be raised to flush SQ and committed sbuffer. 300 val matchInvalid = Input(Bool()) // resp to load_s2 301 302 // addrInvalid suggests store to load forward found forward should happen, 303 // but address (SSID) is not available for now. If addrInvalid, load inst should 304 // be replayed from RS. Feedback type should be RSFeedbackType.addrInvalid 305 val addrInvalid = Input(Bool()) 306} 307 308// LoadForwardQueryIO used in load pipeline 309// 310// Difference between PipeLoadForwardQueryIO and LoadForwardQueryIO: 311// PipeIO use predecoded sqIdxMask for better forward timing 312class PipeLoadForwardQueryIO(implicit p: Parameters) extends LoadForwardQueryIO { 313 // val sqIdx = Output(new SqPtr) // for debug, should not be used in pipeline for timing reasons 314 // sqIdxMask is calcuated in earlier stage for better timing 315 val sqIdxMask = Output(UInt(StoreQueueSize.W)) 316 317 // dataInvalid: addr match, but data is not valid for now 318 val dataInvalidFast = Input(Bool()) // resp to load_s1 319 // val dataInvalid = Input(Bool()) // resp to load_s2 320 val dataInvalidSqIdx = Input(new SqPtr) // resp to load_s2, sqIdx 321 val addrInvalidSqIdx = Input(new SqPtr) // resp to load_s2, sqIdx 322} 323 324// Query load queue for ld-ld violation 325// 326// Req should be send in load_s1 327// Resp will be generated 1 cycle later 328// 329// Note that query req may be !ready, as dcache is releasing a block 330// If it happens, a replay from rs is needed. 331class LoadNukeQueryReq(implicit p: Parameters) extends XSBundle { // provide lqIdx 332 val uop = new DynInst 333 // mask: load's data mask. 334 val mask = UInt((VLEN/8).W) 335 336 // paddr: load's paddr. 337 val paddr = UInt(PAddrBits.W) 338 // dataInvalid: load data is invalid. 339 val data_valid = Bool() 340} 341 342class LoadNukeQueryResp(implicit p: Parameters) extends XSBundle { 343 // rep_frm_fetch: ld-ld violation check success, replay from fetch. 344 val rep_frm_fetch = Bool() 345} 346 347class LoadNukeQueryIO(implicit p: Parameters) extends XSBundle { 348 val req = Decoupled(new LoadNukeQueryReq) 349 val resp = Flipped(Valid(new LoadNukeQueryResp)) 350 val revoke = Output(Bool()) 351} 352 353class StoreNukeQueryIO(implicit p: Parameters) extends XSBundle { 354 // robIdx: Requestor's (a store instruction) rob index for match logic. 355 val robIdx = new RobPtr 356 357 // paddr: requestor's (a store instruction) physical address for match logic. 358 val paddr = UInt(PAddrBits.W) 359 360 // mask: requestor's (a store instruction) data width mask for match logic. 361 val mask = UInt((VLEN/8).W) 362 363 // matchLine: if store is vector 128-bits, load unit need to compare 128-bits vaddr. 364 val matchLine = Bool() 365} 366 367// Store byte valid mask write bundle 368// 369// Store byte valid mask write to SQ takes 2 cycles 370class StoreMaskBundle(implicit p: Parameters) extends XSBundle { 371 val sqIdx = new SqPtr 372 val mask = UInt((VLEN/8).W) 373} 374 375class LoadDataFromDcacheBundle(implicit p: Parameters) extends DCacheBundle { 376 // old dcache: optimize data sram read fanout 377 // val bankedDcacheData = Vec(DCacheBanks, UInt(64.W)) 378 // val bank_oh = UInt(DCacheBanks.W) 379 380 // new dcache 381 val respDcacheData = UInt(VLEN.W) 382 val forwardMask = Vec(VLEN/8, Bool()) 383 val forwardData = Vec(VLEN/8, UInt(8.W)) 384 val uop = new DynInst // for data selection, only fwen and fuOpType are used 385 val addrOffset = UInt(4.W) // for data selection 386 387 // forward tilelink D channel 388 val forward_D = Bool() 389 val forwardData_D = Vec(VLEN/8, UInt(8.W)) 390 391 // forward mshr data 392 val forward_mshr = Bool() 393 val forwardData_mshr = Vec(VLEN/8, UInt(8.W)) 394 395 val forward_result_valid = Bool() 396 397 def dcacheData(): UInt = { 398 // old dcache 399 // val dcache_data = Mux1H(bank_oh, bankedDcacheData) 400 // new dcache 401 val dcache_data = respDcacheData 402 val use_D = forward_D && forward_result_valid 403 val use_mshr = forward_mshr && forward_result_valid 404 Mux(use_D, forwardData_D.asUInt, Mux(use_mshr, forwardData_mshr.asUInt, dcache_data)) 405 } 406 407 def mergedData(): UInt = { 408 val rdataVec = VecInit((0 until VLEN / 8).map(j => 409 Mux(forwardMask(j), forwardData(j), dcacheData()(8*(j+1)-1, 8*j)) 410 )) 411 rdataVec.asUInt 412 } 413} 414 415// Load writeback data from load queue (refill) 416class LoadDataFromLQBundle(implicit p: Parameters) extends XSBundle { 417 val lqData = UInt(64.W) // load queue has merged data 418 val uop = new DynInst // for data selection, only fwen and fuOpType are used 419 val addrOffset = UInt(3.W) // for data selection 420 421 def mergedData(): UInt = { 422 lqData 423 } 424} 425 426// Bundle for load / store wait waking up 427class MemWaitUpdateReq(implicit p: Parameters) extends XSBundle { 428 val robIdx = Vec(backendParams.StaExuCnt, ValidIO(new RobPtr)) 429 val sqIdx = Vec(backendParams.StdCnt, ValidIO(new SqPtr)) 430} 431 432object AddPipelineReg { 433 class PipelineRegModule[T <: Data](gen: T) extends Module { 434 val io = IO(new Bundle() { 435 val in = Flipped(DecoupledIO(gen.cloneType)) 436 val out = DecoupledIO(gen.cloneType) 437 val isFlush = Input(Bool()) 438 }) 439 440 val valid = RegInit(false.B) 441 valid.suggestName("pipeline_reg_valid") 442 when (io.out.fire) { valid := false.B } 443 when (io.in.fire) { valid := true.B } 444 when (io.isFlush) { valid := false.B } 445 446 io.in.ready := !valid || io.out.ready 447 io.out.bits := RegEnable(io.in.bits, io.in.fire) 448 io.out.valid := valid //&& !isFlush 449 } 450 451 def apply[T <: Data] 452 (left: DecoupledIO[T], right: DecoupledIO[T], isFlush: Bool, 453 moduleName: Option[String] = None 454 ): Unit = { 455 val pipelineReg = Module(new PipelineRegModule[T](left.bits.cloneType)) 456 if(moduleName.nonEmpty) pipelineReg.suggestName(moduleName.get) 457 pipelineReg.io.in <> left 458 right <> pipelineReg.io.out 459 pipelineReg.io.isFlush := isFlush 460 } 461}