1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19 20import org.chipsalliance.cde.config.Parameters 21import chisel3._ 22import chisel3.util._ 23import utility._ 24import utils._ 25import xiangshan._ 26import xiangshan.backend.Bundles.{DynInst, MemExuInput} 27import xiangshan.backend.rob.RobPtr 28import xiangshan.cache._ 29import xiangshan.backend.fu.FenceToSbuffer 30import xiangshan.cache.wpu.ReplayCarry 31import xiangshan.mem.prefetch.PrefetchReqBundle 32import math._ 33 34object genWmask { 35 def apply(addr: UInt, sizeEncode: UInt): UInt = { 36 (LookupTree(sizeEncode, List( 37 "b00".U -> 0x1.U, //0001 << addr(2:0) 38 "b01".U -> 0x3.U, //0011 39 "b10".U -> 0xf.U, //1111 40 "b11".U -> 0xff.U //11111111 41 )) << addr(2, 0)).asUInt 42 } 43} 44 45object genVWmask { 46 def apply(addr: UInt, sizeEncode: UInt): UInt = { 47 (LookupTree(sizeEncode, List( 48 "b00".U -> 0x1.U, //0001 << addr(2:0) 49 "b01".U -> 0x3.U, //0011 50 "b10".U -> 0xf.U, //1111 51 "b11".U -> 0xff.U //11111111 52 )) << addr(3, 0)).asUInt 53 } 54} 55 56object genWdata { 57 def apply(data: UInt, sizeEncode: UInt): UInt = { 58 LookupTree(sizeEncode, List( 59 "b00".U -> Fill(16, data(7, 0)), 60 "b01".U -> Fill(8, data(15, 0)), 61 "b10".U -> Fill(4, data(31, 0)), 62 "b11".U -> Fill(2, data(63,0)) 63 )) 64 } 65} 66 67object shiftDataToLow { 68 def apply(addr: UInt,data : UInt): UInt = { 69 Mux(addr(3), (data >> 64).asUInt,data) 70 } 71} 72object shiftMaskToLow { 73 def apply(addr: UInt,mask: UInt): UInt = { 74 Mux(addr(3),(mask >> 8).asUInt,mask) 75 } 76} 77 78class LsPipelineBundle(implicit p: Parameters) extends XSBundle 79 with HasDCacheParameters 80 with HasVLSUParameters { 81 val uop = new DynInst 82 val vaddr = UInt(VAddrBits.W) 83 val paddr = UInt(PAddrBits.W) 84 val gpaddr = UInt(GPAddrBits.W) 85 // val func = UInt(6.W) 86 val mask = UInt((VLEN/8).W) 87 val data = UInt((VLEN+1).W) 88 val wlineflag = Bool() // store write the whole cache line 89 90 val miss = Bool() 91 val tlbMiss = Bool() 92 val ptwBack = Bool() 93 val af = Bool() 94 val mmio = Bool() 95 val atomic = Bool() 96 97 val forwardMask = Vec(VLEN/8, Bool()) 98 val forwardData = Vec(VLEN/8, UInt(8.W)) 99 100 // prefetch 101 val isPrefetch = Bool() 102 val isHWPrefetch = Bool() 103 def isSWPrefetch = isPrefetch && !isHWPrefetch 104 105 // vector 106 val isvec = Bool() 107 val isLastElem = Bool() 108 val is128bit = Bool() 109 val uop_unit_stride_fof = Bool() 110 val usSecondInv = Bool() 111 val elemIdx = UInt(elemIdxBits.W) 112 val alignedType = UInt(alignTypeBits.W) 113 val mbIndex = UInt(max(vlmBindexBits, vsmBindexBits).W) 114 // val rob_idx_valid = Vec(2,Bool()) 115 // val inner_idx = Vec(2,UInt(3.W)) 116 // val rob_idx = Vec(2,new RobPtr) 117 val reg_offset = UInt(vOffsetBits.W) 118 val elemIdxInsideVd = UInt(elemIdxBits.W) 119 // val offset = Vec(2,UInt(4.W)) 120 val vecActive = Bool() // 1: vector active element or scala mem operation, 0: vector not active element 121 val is_first_ele = Bool() 122 // val flowPtr = new VlflowPtr() // VLFlowQueue ptr 123 // val sflowPtr = new VsFlowPtr() // VSFlowQueue ptr 124 125 // For debug usage 126 val isFirstIssue = Bool() 127 val hasROBEntry = Bool() 128 129 // For load replay 130 val isLoadReplay = Bool() 131 val isFastPath = Bool() 132 val isFastReplay = Bool() 133 val replayCarry = new ReplayCarry(nWays) 134 135 // For dcache miss load 136 val mshrid = UInt(log2Up(cfg.nMissEntries).W) 137 val handledByMSHR = Bool() 138 val replacementUpdated = Bool() 139 val missDbUpdated = Bool() 140 141 val forward_tlDchannel = Bool() 142 val dcacheRequireReplay = Bool() 143 val delayedLoadError = Bool() 144 val lateKill = Bool() 145 val feedbacked = Bool() 146 val ldCancel = ValidUndirectioned(UInt(log2Ceil(LoadPipelineWidth).W)) 147 // loadQueueReplay index. 148 val schedIndex = UInt(log2Up(LoadQueueReplaySize).W) 149} 150 151class LdPrefetchTrainBundle(implicit p: Parameters) extends LsPipelineBundle { 152 val meta_prefetch = UInt(L1PfSourceBits.W) 153 val meta_access = Bool() 154 155 def fromLsPipelineBundle(input: LsPipelineBundle, latch: Boolean = false, enable: Bool = true.B) = { 156 if (latch) vaddr := RegEnable(input.vaddr, enable) else vaddr := input.vaddr 157 if (latch) paddr := RegEnable(input.paddr, enable) else paddr := input.paddr 158 if (latch) gpaddr := RegEnable(input.gpaddr, enable) else gpaddr := input.gpaddr 159 if (latch) mask := RegEnable(input.mask, enable) else mask := input.mask 160 if (latch) data := RegEnable(input.data, enable) else data := input.data 161 if (latch) uop := RegEnable(input.uop, enable) else uop := input.uop 162 if (latch) wlineflag := RegEnable(input.wlineflag, enable) else wlineflag := input.wlineflag 163 if (latch) miss := RegEnable(input.miss, enable) else miss := input.miss 164 if (latch) tlbMiss := RegEnable(input.tlbMiss, enable) else tlbMiss := input.tlbMiss 165 if (latch) ptwBack := RegEnable(input.ptwBack, enable) else ptwBack := input.ptwBack 166 if (latch) af := RegEnable(input.af, enable) else af := input.af 167 if (latch) mmio := RegEnable(input.mmio, enable) else mmio := input.mmio 168 if (latch) forwardMask := RegEnable(input.forwardMask, enable) else forwardMask := input.forwardMask 169 if (latch) forwardData := RegEnable(input.forwardData, enable) else forwardData := input.forwardData 170 if (latch) isPrefetch := RegEnable(input.isPrefetch, enable) else isPrefetch := input.isPrefetch 171 if (latch) isHWPrefetch := RegEnable(input.isHWPrefetch, enable) else isHWPrefetch := input.isHWPrefetch 172 if (latch) isFirstIssue := RegEnable(input.isFirstIssue, enable) else isFirstIssue := input.isFirstIssue 173 if (latch) hasROBEntry := RegEnable(input.hasROBEntry, enable) else hasROBEntry := input.hasROBEntry 174 if (latch) dcacheRequireReplay := RegEnable(input.dcacheRequireReplay, enable) else dcacheRequireReplay := input.dcacheRequireReplay 175 if (latch) schedIndex := RegEnable(input.schedIndex, enable) else schedIndex := input.schedIndex 176 if (latch) isvec := RegEnable(input.isvec, enable) else isvec := input.isvec 177 if (latch) isLastElem := RegEnable(input.isLastElem, enable) else isLastElem := input.isLastElem 178 if (latch) is128bit := RegEnable(input.is128bit, enable) else is128bit := input.is128bit 179 if (latch) vecActive := RegEnable(input.vecActive, enable) else vecActive := input.vecActive 180 if (latch) is_first_ele := RegEnable(input.is_first_ele, enable) else is_first_ele := input.is_first_ele 181 if (latch) uop_unit_stride_fof := RegEnable(input.uop_unit_stride_fof, enable) else uop_unit_stride_fof := input.uop_unit_stride_fof 182 if (latch) usSecondInv := RegEnable(input.usSecondInv, enable) else usSecondInv := input.usSecondInv 183 if (latch) reg_offset := RegEnable(input.reg_offset, enable) else reg_offset := input.reg_offset 184 if (latch) elemIdx := RegEnable(input.elemIdx, enable) else elemIdx := input.elemIdx 185 if (latch) alignedType := RegEnable(input.alignedType, enable) else alignedType := input.alignedType 186 if (latch) mbIndex := RegEnable(input.mbIndex, enable) else mbIndex := input.mbIndex 187 if (latch) elemIdxInsideVd := RegEnable(input.elemIdxInsideVd, enable) else elemIdxInsideVd := input.elemIdxInsideVd 188 // if (latch) flowPtr := RegEnable(input.flowPtr, enable) else flowPtr := input.flowPtr 189 // if (latch) sflowPtr := RegEnable(input.sflowPtr, enable) else sflowPtr := input.sflowPtr 190 191 meta_prefetch := DontCare 192 meta_access := DontCare 193 forward_tlDchannel := DontCare 194 mshrid := DontCare 195 replayCarry := DontCare 196 atomic := DontCare 197 isLoadReplay := DontCare 198 isFastPath := DontCare 199 isFastReplay := DontCare 200 handledByMSHR := DontCare 201 replacementUpdated := DontCare 202 missDbUpdated := DontCare 203 delayedLoadError := DontCare 204 lateKill := DontCare 205 feedbacked := DontCare 206 ldCancel := DontCare 207 } 208 209 def asPrefetchReqBundle(): PrefetchReqBundle = { 210 val res = Wire(new PrefetchReqBundle) 211 res.vaddr := this.vaddr 212 res.paddr := this.paddr 213 res.pc := this.uop.pc 214 res.miss := this.miss 215 216 res 217 } 218} 219 220class StPrefetchTrainBundle(implicit p: Parameters) extends LdPrefetchTrainBundle {} 221 222class LqWriteBundle(implicit p: Parameters) extends LsPipelineBundle { 223 // load inst replay informations 224 val rep_info = new LoadToLsqReplayIO 225 // queue entry data, except flag bits, will be updated if writeQueue is true, 226 // valid bit in LqWriteBundle will be ignored 227 val data_wen_dup = Vec(6, Bool()) // dirty reg dup 228 229 230 def fromLsPipelineBundle(input: LsPipelineBundle, latch: Boolean = false, enable: Bool = true.B) = { 231 if(latch) vaddr := RegEnable(input.vaddr, enable) else vaddr := input.vaddr 232 if(latch) paddr := RegEnable(input.paddr, enable) else paddr := input.paddr 233 if(latch) gpaddr := RegEnable(input.gpaddr, enable) else gpaddr := input.gpaddr 234 if(latch) mask := RegEnable(input.mask, enable) else mask := input.mask 235 if(latch) data := RegEnable(input.data, enable) else data := input.data 236 if(latch) uop := RegEnable(input.uop, enable) else uop := input.uop 237 if(latch) wlineflag := RegEnable(input.wlineflag, enable) else wlineflag := input.wlineflag 238 if(latch) miss := RegEnable(input.miss, enable) else miss := input.miss 239 if(latch) tlbMiss := RegEnable(input.tlbMiss, enable) else tlbMiss := input.tlbMiss 240 if(latch) ptwBack := RegEnable(input.ptwBack, enable) else ptwBack := input.ptwBack 241 if(latch) mmio := RegEnable(input.mmio, enable) else mmio := input.mmio 242 if(latch) atomic := RegEnable(input.atomic, enable) else atomic := input.atomic 243 if(latch) forwardMask := RegEnable(input.forwardMask, enable) else forwardMask := input.forwardMask 244 if(latch) forwardData := RegEnable(input.forwardData, enable) else forwardData := input.forwardData 245 if(latch) isPrefetch := RegEnable(input.isPrefetch, enable) else isPrefetch := input.isPrefetch 246 if(latch) isHWPrefetch := RegEnable(input.isHWPrefetch, enable) else isHWPrefetch := input.isHWPrefetch 247 if(latch) isFirstIssue := RegEnable(input.isFirstIssue, enable) else isFirstIssue := input.isFirstIssue 248 if(latch) hasROBEntry := RegEnable(input.hasROBEntry, enable) else hasROBEntry := input.hasROBEntry 249 if(latch) isLoadReplay := RegEnable(input.isLoadReplay, enable) else isLoadReplay := input.isLoadReplay 250 if(latch) isFastPath := RegEnable(input.isFastPath, enable) else isFastPath := input.isFastPath 251 if(latch) isFastReplay := RegEnable(input.isFastReplay, enable) else isFastReplay := input.isFastReplay 252 if(latch) mshrid := RegEnable(input.mshrid, enable) else mshrid := input.mshrid 253 if(latch) forward_tlDchannel := RegEnable(input.forward_tlDchannel, enable) else forward_tlDchannel := input.forward_tlDchannel 254 if(latch) replayCarry := RegEnable(input.replayCarry, enable) else replayCarry := input.replayCarry 255 if(latch) dcacheRequireReplay := RegEnable(input.dcacheRequireReplay, enable) else dcacheRequireReplay := input.dcacheRequireReplay 256 if(latch) schedIndex := RegEnable(input.schedIndex, enable) else schedIndex := input.schedIndex 257 if(latch) handledByMSHR := RegEnable(input.handledByMSHR, enable) else handledByMSHR := input.handledByMSHR 258 if(latch) replacementUpdated := RegEnable(input.replacementUpdated, enable) else replacementUpdated := input.replacementUpdated 259 if(latch) missDbUpdated := RegEnable(input.missDbUpdated, enable) else missDbUpdated := input.missDbUpdated 260 if(latch) delayedLoadError := RegEnable(input.delayedLoadError, enable) else delayedLoadError := input.delayedLoadError 261 if(latch) lateKill := RegEnable(input.lateKill, enable) else lateKill := input.lateKill 262 if(latch) feedbacked := RegEnable(input.feedbacked, enable) else feedbacked := input.feedbacked 263 if(latch) isvec := RegEnable(input.isvec, enable) else isvec := input.isvec 264 if(latch) is128bit := RegEnable(input.is128bit, enable) else is128bit := input.is128bit 265 if(latch) vecActive := RegEnable(input.vecActive, enable) else vecActive := input.vecActive 266 if(latch) uop_unit_stride_fof := RegEnable(input.uop_unit_stride_fof, enable) else uop_unit_stride_fof := input.uop_unit_stride_fof 267 if(latch) reg_offset := RegEnable(input.reg_offset, enable) else reg_offset := input.reg_offset 268 if(latch) mbIndex := RegEnable(input.mbIndex, enable) else mbIndex := input.mbIndex 269 if(latch) elemIdxInsideVd := RegEnable(input.elemIdxInsideVd, enable) else elemIdxInsideVd := input.elemIdxInsideVd 270 271 rep_info := DontCare 272 data_wen_dup := DontCare 273 } 274} 275 276class LoadForwardQueryIO(implicit p: Parameters) extends XSBundle { 277 val vaddr = Output(UInt(VAddrBits.W)) 278 val paddr = Output(UInt(PAddrBits.W)) 279 val mask = Output(UInt((VLEN/8).W)) 280 val uop = Output(new DynInst) // for replay 281 val pc = Output(UInt(VAddrBits.W)) //for debug 282 val valid = Output(Bool()) 283 284 val forwardMaskFast = Input(Vec((VLEN/8), Bool())) // resp to load_s1 285 val forwardMask = Input(Vec((VLEN/8), Bool())) // resp to load_s2 286 val forwardData = Input(Vec((VLEN/8), UInt(8.W))) // resp to load_s2 287 288 // val lqIdx = Output(UInt(LoadQueueIdxWidth.W)) 289 val sqIdx = Output(new SqPtr) 290 291 // dataInvalid suggests store to load forward found forward should happen, 292 // but data is not available for now. If dataInvalid, load inst should 293 // be replayed from RS. Feedback type should be RSFeedbackType.dataInvalid 294 val dataInvalid = Input(Bool()) // Addr match, but data is not valid for now 295 296 // matchInvalid suggests in store to load forward logic, paddr cam result does 297 // to equal to vaddr cam result. If matchInvalid, a microarchitectural exception 298 // should be raised to flush SQ and committed sbuffer. 299 val matchInvalid = Input(Bool()) // resp to load_s2 300 301 // addrInvalid suggests store to load forward found forward should happen, 302 // but address (SSID) is not available for now. If addrInvalid, load inst should 303 // be replayed from RS. Feedback type should be RSFeedbackType.addrInvalid 304 val addrInvalid = Input(Bool()) 305} 306 307// LoadForwardQueryIO used in load pipeline 308// 309// Difference between PipeLoadForwardQueryIO and LoadForwardQueryIO: 310// PipeIO use predecoded sqIdxMask for better forward timing 311class PipeLoadForwardQueryIO(implicit p: Parameters) extends LoadForwardQueryIO { 312 // val sqIdx = Output(new SqPtr) // for debug, should not be used in pipeline for timing reasons 313 // sqIdxMask is calcuated in earlier stage for better timing 314 val sqIdxMask = Output(UInt(StoreQueueSize.W)) 315 316 // dataInvalid: addr match, but data is not valid for now 317 val dataInvalidFast = Input(Bool()) // resp to load_s1 318 // val dataInvalid = Input(Bool()) // resp to load_s2 319 val dataInvalidSqIdx = Input(new SqPtr) // resp to load_s2, sqIdx 320 val addrInvalidSqIdx = Input(new SqPtr) // resp to load_s2, sqIdx 321} 322 323// Query load queue for ld-ld violation 324// 325// Req should be send in load_s1 326// Resp will be generated 1 cycle later 327// 328// Note that query req may be !ready, as dcache is releasing a block 329// If it happens, a replay from rs is needed. 330class LoadNukeQueryReq(implicit p: Parameters) extends XSBundle { // provide lqIdx 331 val uop = new DynInst 332 // mask: load's data mask. 333 val mask = UInt((VLEN/8).W) 334 335 // paddr: load's paddr. 336 val paddr = UInt(PAddrBits.W) 337 // dataInvalid: load data is invalid. 338 val data_valid = Bool() 339} 340 341class LoadNukeQueryResp(implicit p: Parameters) extends XSBundle { 342 // rep_frm_fetch: ld-ld violation check success, replay from fetch. 343 val rep_frm_fetch = Bool() 344} 345 346class LoadNukeQueryIO(implicit p: Parameters) extends XSBundle { 347 val req = Decoupled(new LoadNukeQueryReq) 348 val resp = Flipped(Valid(new LoadNukeQueryResp)) 349 val revoke = Output(Bool()) 350} 351 352class StoreNukeQueryIO(implicit p: Parameters) extends XSBundle { 353 // robIdx: Requestor's (a store instruction) rob index for match logic. 354 val robIdx = new RobPtr 355 356 // paddr: requestor's (a store instruction) physical address for match logic. 357 val paddr = UInt(PAddrBits.W) 358 359 // mask: requestor's (a store instruction) data width mask for match logic. 360 val mask = UInt((VLEN/8).W) 361 362 // matchLine: if store is vector 128-bits, load unit need to compare 128-bits vaddr. 363 val matchLine = Bool() 364} 365 366// Store byte valid mask write bundle 367// 368// Store byte valid mask write to SQ takes 2 cycles 369class StoreMaskBundle(implicit p: Parameters) extends XSBundle { 370 val sqIdx = new SqPtr 371 val mask = UInt((VLEN/8).W) 372} 373 374class LoadDataFromDcacheBundle(implicit p: Parameters) extends DCacheBundle { 375 // old dcache: optimize data sram read fanout 376 // val bankedDcacheData = Vec(DCacheBanks, UInt(64.W)) 377 // val bank_oh = UInt(DCacheBanks.W) 378 379 // new dcache 380 val respDcacheData = UInt(VLEN.W) 381 val forwardMask = Vec(VLEN/8, Bool()) 382 val forwardData = Vec(VLEN/8, UInt(8.W)) 383 val uop = new DynInst // for data selection, only fwen and fuOpType are used 384 val addrOffset = UInt(4.W) // for data selection 385 386 // forward tilelink D channel 387 val forward_D = Bool() 388 val forwardData_D = Vec(VLEN/8, UInt(8.W)) 389 390 // forward mshr data 391 val forward_mshr = Bool() 392 val forwardData_mshr = Vec(VLEN/8, UInt(8.W)) 393 394 val forward_result_valid = Bool() 395 396 def dcacheData(): UInt = { 397 // old dcache 398 // val dcache_data = Mux1H(bank_oh, bankedDcacheData) 399 // new dcache 400 val dcache_data = respDcacheData 401 val use_D = forward_D && forward_result_valid 402 val use_mshr = forward_mshr && forward_result_valid 403 Mux(use_D, forwardData_D.asUInt, Mux(use_mshr, forwardData_mshr.asUInt, dcache_data)) 404 } 405 406 def mergedData(): UInt = { 407 val rdataVec = VecInit((0 until VLEN / 8).map(j => 408 Mux(forwardMask(j), forwardData(j), dcacheData()(8*(j+1)-1, 8*j)) 409 )) 410 rdataVec.asUInt 411 } 412} 413 414// Load writeback data from load queue (refill) 415class LoadDataFromLQBundle(implicit p: Parameters) extends XSBundle { 416 val lqData = UInt(64.W) // load queue has merged data 417 val uop = new DynInst // for data selection, only fwen and fuOpType are used 418 val addrOffset = UInt(3.W) // for data selection 419 420 def mergedData(): UInt = { 421 lqData 422 } 423} 424 425// Bundle for load / store wait waking up 426class MemWaitUpdateReq(implicit p: Parameters) extends XSBundle { 427 val robIdx = Vec(backendParams.StaExuCnt, ValidIO(new RobPtr)) 428 val sqIdx = Vec(backendParams.StdCnt, ValidIO(new SqPtr)) 429} 430 431object AddPipelineReg { 432 class PipelineRegModule[T <: Data](gen: T) extends Module { 433 val io = IO(new Bundle() { 434 val in = Flipped(DecoupledIO(gen.cloneType)) 435 val out = DecoupledIO(gen.cloneType) 436 val isFlush = Input(Bool()) 437 }) 438 439 val valid = RegInit(false.B) 440 valid.suggestName("pipeline_reg_valid") 441 when (io.out.fire) { valid := false.B } 442 when (io.in.fire) { valid := true.B } 443 when (io.isFlush) { valid := false.B } 444 445 io.in.ready := !valid || io.out.ready 446 io.out.bits := RegEnable(io.in.bits, io.in.fire) 447 io.out.valid := valid //&& !isFlush 448 } 449 450 def apply[T <: Data] 451 (left: DecoupledIO[T], right: DecoupledIO[T], isFlush: Bool, 452 moduleName: Option[String] = None 453 ){ 454 val pipelineReg = Module(new PipelineRegModule[T](left.bits.cloneType)) 455 if(moduleName.nonEmpty) pipelineReg.suggestName(moduleName.get) 456 pipelineReg.io.in <> left 457 right <> pipelineReg.io.out 458 pipelineReg.io.isFlush := isFlush 459 } 460}