1package xiangshan.mem 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.cache._ 8import xiangshan.cache.{DCacheWordIO, DCacheLineIO, TlbRequestIO, MemoryOpConstants} 9import xiangshan.backend.LSUOpType 10import xiangshan.mem._ 11import xiangshan.backend.roq.RoqPtr 12 13class LQDataEntry extends XSBundle { 14 // val vaddr = UInt(VAddrBits.W) 15 val paddr = UInt(PAddrBits.W) 16 val mask = UInt(8.W) 17 val data = UInt(XLEN.W) 18 val fwdMask = Vec(8, Bool()) 19} 20 21// Data module define 22// These data modules are like SyncDataModuleTemplate, but support cam-like ops 23class LQPaddrModule(numEntries: Int, numRead: Int, numWrite: Int) extends XSModule with HasDCacheParameters { 24 val io = IO(new Bundle { 25 val raddr = Input(Vec(numRead, UInt(log2Up(numEntries).W))) 26 val rdata = Output(Vec(numRead, UInt((PAddrBits).W))) 27 val wen = Input(Vec(numWrite, Bool())) 28 val waddr = Input(Vec(numWrite, UInt(log2Up(numEntries).W))) 29 val wdata = Input(Vec(numWrite, UInt((PAddrBits).W))) 30 val violationMdata = Input(Vec(2, UInt((PAddrBits).W))) 31 val violationMmask = Output(Vec(2, Vec(numEntries, Bool()))) 32 val refillMdata = Input(UInt((PAddrBits).W)) 33 val refillMmask = Output(Vec(numEntries, Bool())) 34 }) 35 36 val data = Reg(Vec(numEntries, UInt((PAddrBits).W))) 37 38 // read ports 39 for (i <- 0 until numRead) { 40 io.rdata(i) := data(RegNext(io.raddr(i))) 41 } 42 43 // below is the write ports (with priorities) 44 for (i <- 0 until numWrite) { 45 when (io.wen(i)) { 46 data(io.waddr(i)) := io.wdata(i) 47 } 48 } 49 50 // content addressed match 51 for (i <- 0 until 2) { 52 for (j <- 0 until numEntries) { 53 io.violationMmask(i)(j) := io.violationMdata(i)(PAddrBits-1, 3) === data(j)(PAddrBits-1, 3) 54 } 55 } 56 57 for (j <- 0 until numEntries) { 58 io.refillMmask(j) := get_block_addr(io.refillMdata) === get_block_addr(data(j)) 59 } 60 61 // DataModuleTemplate should not be used when there're any write conflicts 62 for (i <- 0 until numWrite) { 63 for (j <- i+1 until numWrite) { 64 assert(!(io.wen(i) && io.wen(j) && io.waddr(i) === io.waddr(j))) 65 } 66 } 67} 68 69class MaskModule(numEntries: Int, numRead: Int, numWrite: Int) extends XSModule { 70 val io = IO(new Bundle { 71 val raddr = Input(Vec(numRead, UInt(log2Up(numEntries).W))) 72 val rdata = Output(Vec(numRead, UInt(8.W))) 73 val wen = Input(Vec(numWrite, Bool())) 74 val waddr = Input(Vec(numWrite, UInt(log2Up(numEntries).W))) 75 val wdata = Input(Vec(numWrite, UInt(8.W))) 76 val violationMdata = Input(Vec(2, UInt((PAddrBits).W))) 77 val violationMmask = Output(Vec(2, Vec(numEntries, Bool()))) 78 }) 79 80 val data = Reg(Vec(numEntries, UInt(8.W))) 81 82 // read ports 83 for (i <- 0 until numRead) { 84 io.rdata(i) := data(RegNext(io.raddr(i))) 85 } 86 87 // below is the write ports (with priorities) 88 for (i <- 0 until numWrite) { 89 when (io.wen(i)) { 90 data(io.waddr(i)) := io.wdata(i) 91 } 92 } 93 94 // content addressed match 95 for (i <- 0 until 2) { 96 for (j <- 0 until numEntries) { 97 io.violationMmask(i)(j) := (io.violationMdata(i) & data(j)).orR 98 } 99 } 100 101 // DataModuleTemplate should not be used when there're any write conflicts 102 for (i <- 0 until numWrite) { 103 for (j <- i+1 until numWrite) { 104 assert(!(io.wen(i) && io.wen(j) && io.waddr(i) === io.waddr(j))) 105 } 106 } 107} 108 109// class LQData8Module(numEntries: Int, numRead: Int, numWrite: Int) extends XSModule with HasDCacheParameters { 110// val io = IO(new Bundle { 111// // read 112// val raddr = Input(Vec(numRead, UInt(log2Up(numEntries).W))) 113// val rdata = Output(Vec(numRead, UInt(8.W))) 114// // address indexed write 115// val wen = Input(Vec(numWrite, Bool())) 116// val waddr = Input(Vec(numWrite, UInt(log2Up(numEntries).W))) 117// val wdata = Input(Vec(numWrite, UInt(8.W))) 118// // masked write 119// val mwmask = Input(Vec(blockWords, Vec(numEntries, Bool()))) 120// val mwdata = Input(Vec(blockWords, UInt(8.W))) 121// }) 122 123// val data = Reg(Vec(numEntries, UInt(8.W))) 124 125// // read ports 126// for (i <- 0 until numRead) { 127// io.rdata(i) := data(RegNext(io.raddr(i))) 128// } 129 130// // below is the write ports (with priorities) 131// for (i <- 0 until numWrite) { 132// when (io.wen(i)) { 133// data(io.waddr(i)) := io.wdata(i) 134// } 135// } 136 137// // masked write 138// for (j <- 0 until numEntries) { 139// val wen = VecInit((0 until blockWords).map(i => io.mwmask(i)(j))).asUInt.orR 140// when (wen) { 141// data(j) := VecInit((0 until blockWords).map(i => { 142// Mux(io.mwmask(i)(j), io.mwdata(i), 0.U) 143// })).reduce(_ | _) 144// } 145// } 146 147// // DataModuleTemplate should not be used when there're any write conflicts 148// for (i <- 0 until numWrite) { 149// for (j <- i+1 until numWrite) { 150// assert(!(io.wen(i) && io.wen(j) && io.waddr(i) === io.waddr(j))) 151// } 152// } 153// } 154 155class CoredataModule(numEntries: Int, numRead: Int, numWrite: Int) extends XSModule with HasDCacheParameters { 156 val io = IO(new Bundle { 157 // data io 158 // read 159 val raddr = Input(Vec(numRead, UInt(log2Up(numEntries).W))) 160 val rdata = Output(Vec(numRead, UInt(XLEN.W))) 161 // address indexed write 162 val wen = Input(Vec(numWrite, Bool())) 163 val waddr = Input(Vec(numWrite, UInt(log2Up(numEntries).W))) 164 val wdata = Input(Vec(numWrite, UInt(XLEN.W))) 165 // masked write 166 val mwmask = Input(Vec(numEntries, Bool())) 167 val refillData = Input(UInt((cfg.blockBytes * 8).W)) 168 169 // fwdMask io 170 val fwdMaskWdata = Input(Vec(numWrite, UInt(8.W))) 171 val fwdMaskWen = Input(Vec(numWrite, Bool())) 172 // fwdMaskWaddr = waddr 173 174 // paddr io 175 // 3 bits in paddr need to be stored in CoredataModule for refilling 176 val paddrWdata = Input(Vec(numWrite, UInt((PAddrBits).W))) 177 val paddrWen = Input(Vec(numWrite, Bool())) 178 }) 179 180 val data8 = Seq.fill(8)(Module(new MaskedSyncDataModuleTemplate(UInt(8.W), numEntries, numRead, numWrite, numMWrite = blockWords))) 181 val fwdMask = Reg(Vec(numEntries, UInt(8.W))) 182 val wordIndex = Reg(Vec(numEntries, UInt((blockOffBits - wordOffBits).W))) 183 184 // read ports 185 for (i <- 0 until numRead) { 186 for (j <- 0 until 8) { 187 data8(j).io.raddr(i) := io.raddr(i) 188 } 189 io.rdata(i) := VecInit((0 until 8).map(j => data8(j).io.rdata(i))).asUInt 190 } 191 192 // below is the write ports (with priorities) 193 for (i <- 0 until numWrite) { 194 // write to data8 195 for (j <- 0 until 8) { 196 data8(j).io.waddr(i) := io.waddr(i) 197 data8(j).io.wdata(i) := io.wdata(i)(8*(j+1)-1, 8*j) 198 data8(j).io.wen(i) := io.wen(i) 199 } 200 201 // write ctrl info 202 when (io.fwdMaskWen(i)) { 203 fwdMask(io.waddr(i)) := io.fwdMaskWdata(i) 204 } 205 when (io.paddrWen(i)) { 206 wordIndex(io.waddr(i)) := get_word(io.paddrWdata(i)) 207 } 208 } 209 210 // write refilled data to data8 211 212 // select refill data 213 // split dcache result into words 214 val words = VecInit((0 until blockWords) map { i => io.refillData(DataBits * (i + 1) - 1, DataBits * i)}) 215 // select refill data according to wordIndex (paddr) 216 for (i <- 0 until 8) { 217 for (j <- 0 until blockWords) { 218 data8(i).io.mwdata(j) := words(j)(8*(i+1)-1, 8*i) 219 } 220 } 221 222 // gen refill wmask 223 for (j <- 0 until blockWords) { 224 for (k <- 0 until numEntries) { 225 val wordMatch = wordIndex(k) === j.U 226 for (i <- 0 until 8) { 227 data8(i).io.mwmask(j)(k) := wordMatch && io.mwmask(k) && !fwdMask(k)(i) 228 } 229 } 230 } 231 232 // DataModuleTemplate should not be used when there're any write conflicts 233 for (i <- 0 until numWrite) { 234 for (j <- i+1 until numWrite) { 235 assert(!(io.wen(i) && io.wen(j) && io.waddr(i) === io.waddr(j))) 236 } 237 } 238} 239 240class LoadQueueData(size: Int, wbNumRead: Int, wbNumWrite: Int) extends XSModule with HasDCacheParameters with HasCircularQueuePtrHelper { 241 val io = IO(new Bundle() { 242 val wb = new Bundle() { 243 val wen = Vec(wbNumWrite, Input(Bool())) 244 val waddr = Input(Vec(wbNumWrite, UInt(log2Up(size).W))) 245 val wdata = Input(Vec(wbNumWrite, new LQDataEntry)) 246 val raddr = Input(Vec(wbNumRead, UInt(log2Up(size).W))) 247 val rdata = Output(Vec(wbNumRead, new LQDataEntry)) 248 } 249 val uncache = new Bundle() { 250 val wen = Input(Bool()) 251 val waddr = Input(UInt(log2Up(size).W)) 252 val wdata = Input(UInt(XLEN.W)) // only write back uncache data 253 val raddr = Input(UInt(log2Up(size).W)) 254 val rdata = Output(new LQDataEntry) 255 } 256 val refill = new Bundle() { 257 val valid = Input(Bool()) 258 val paddr = Input(UInt(PAddrBits.W)) 259 val data = Input(UInt((cfg.blockBytes * 8).W)) 260 val refillMask = Input(Vec(size, Bool())) 261 val matchMask = Output(Vec(size, Bool())) 262 } 263 val violation = Vec(StorePipelineWidth, new Bundle() { 264 val paddr = Input(UInt(PAddrBits.W)) 265 val mask = Input(UInt(8.W)) 266 val violationMask = Output(Vec(size, Bool())) 267 }) 268 val debug = Output(Vec(size, new LQDataEntry)) 269 270 def wbWrite(channel: Int, waddr: UInt, wdata: LQDataEntry): Unit = { 271 require(channel < wbNumWrite && wbNumWrite >= 0) 272 // need extra "this.wb(channel).wen := true.B" 273 this.wb.waddr(channel) := waddr 274 this.wb.wdata(channel) := wdata 275 } 276 277 def uncacheWrite(waddr: UInt, wdata: UInt): Unit = { 278 // need extra "this.uncache.wen := true.B" 279 this.uncache.waddr := waddr 280 this.uncache.wdata := wdata 281 } 282 283 // def refillWrite(ldIdx: Int): Unit = { 284 // } 285 // use "this.refill.wen(ldIdx) := true.B" instead 286 }) 287 288 // val data = Reg(Vec(size, new LQDataEntry)) 289 // data module 290 val paddrModule = Module(new LQPaddrModule(size, numRead = 3, numWrite = 2)) 291 val maskModule = Module(new MaskModule(size, numRead = 3, numWrite = 2)) 292 val coredataModule = Module(new CoredataModule(size, numRead = 3, numWrite = 3)) 293 294 // read data 295 // read port 0 -> wbNumRead-1 296 (0 until wbNumRead).map(i => { 297 paddrModule.io.raddr(i) := io.wb.raddr(i) 298 maskModule.io.raddr(i) := io.wb.raddr(i) 299 coredataModule.io.raddr(i) := io.wb.raddr(i) 300 301 io.wb.rdata(i).paddr := paddrModule.io.rdata(i) 302 io.wb.rdata(i).mask := maskModule.io.rdata(i) 303 io.wb.rdata(i).data := coredataModule.io.rdata(i) 304 io.wb.rdata(i).fwdMask := DontCare 305 }) 306 307 // read port wbNumRead 308 paddrModule.io.raddr(wbNumRead) := io.uncache.raddr 309 maskModule.io.raddr(wbNumRead) := io.uncache.raddr 310 coredataModule.io.raddr(wbNumRead) := io.uncache.raddr 311 312 io.uncache.rdata.paddr := paddrModule.io.rdata(wbNumRead) 313 io.uncache.rdata.mask := maskModule.io.rdata(wbNumRead) 314 io.uncache.rdata.data := coredataModule.io.rdata(wbNumRead) 315 io.uncache.rdata.fwdMask := DontCare 316 317 // write data 318 // write port 0 -> wbNumWrite-1 319 (0 until wbNumWrite).map(i => { 320 paddrModule.io.wen(i) := false.B 321 maskModule.io.wen(i) := false.B 322 coredataModule.io.wen(i) := false.B 323 coredataModule.io.fwdMaskWen(i) := false.B 324 coredataModule.io.paddrWen(i) := false.B 325 326 paddrModule.io.waddr(i) := io.wb.waddr(i) 327 maskModule.io.waddr(i) := io.wb.waddr(i) 328 coredataModule.io.waddr(i) := io.wb.waddr(i) 329 330 paddrModule.io.wdata(i) := io.wb.wdata(i).paddr 331 maskModule.io.wdata(i) := io.wb.wdata(i).mask 332 coredataModule.io.wdata(i) := io.wb.wdata(i).data 333 coredataModule.io.fwdMaskWdata(i) := io.wb.wdata(i).fwdMask.asUInt 334 coredataModule.io.paddrWdata(i) := io.wb.wdata(i).paddr 335 336 when(io.wb.wen(i)){ 337 paddrModule.io.wen(i) := true.B 338 maskModule.io.wen(i) := true.B 339 coredataModule.io.wen(i) := true.B 340 coredataModule.io.fwdMaskWen(i) := true.B 341 coredataModule.io.paddrWen(i) := true.B 342 } 343 }) 344 345 // write port wbNumWrite 346 // exceptionModule.io.wen(wbNumWrite) := false.B 347 coredataModule.io.wen(wbNumWrite) := io.uncache.wen 348 coredataModule.io.fwdMaskWen(wbNumWrite) := false.B 349 coredataModule.io.paddrWen(wbNumWrite) := false.B 350 351 coredataModule.io.waddr(wbNumWrite) := io.uncache.waddr 352 353 coredataModule.io.fwdMaskWdata(wbNumWrite) := DontCare 354 coredataModule.io.paddrWdata(wbNumWrite) := DontCare 355 coredataModule.io.wdata(wbNumWrite) := io.uncache.wdata 356 357 // mem access violation check, gen violationMask 358 (0 until StorePipelineWidth).map(i => { 359 paddrModule.io.violationMdata(i) := io.violation(i).paddr 360 maskModule.io.violationMdata(i) := io.violation(i).mask 361 io.violation(i).violationMask := (paddrModule.io.violationMmask(i).asUInt & maskModule.io.violationMmask(i).asUInt).asBools 362 // VecInit((0 until size).map(j => { 363 // val addrMatch = io.violation(i).paddr(PAddrBits - 1, 3) === data(j).paddr(PAddrBits - 1, 3) 364 // val violationVec = (0 until 8).map(k => data(j).mask(k) && io.violation(i).mask(k)) 365 // Cat(violationVec).orR() && addrMatch 366 // })) 367 }) 368 369 // refill missed load 370 def mergeRefillData(refill: UInt, fwd: UInt, fwdMask: UInt): UInt = { 371 val res = Wire(Vec(8, UInt(8.W))) 372 (0 until 8).foreach(i => { 373 res(i) := Mux(fwdMask(i), fwd(8 * (i + 1) - 1, 8 * i), refill(8 * (i + 1) - 1, 8 * i)) 374 }) 375 res.asUInt 376 } 377 378 // gen paddr match mask 379 paddrModule.io.refillMdata := io.refill.paddr 380 (0 until size).map(i => { 381 io.refill.matchMask := paddrModule.io.refillMmask 382 // io.refill.matchMask(i) := get_block_addr(data(i).paddr) === get_block_addr(io.refill.paddr) 383 }) 384 385 // refill data according to matchMask, refillMask and refill.valid 386 coredataModule.io.refillData := io.refill.data 387 (0 until size).map(i => { 388 coredataModule.io.mwmask(i) := io.refill.valid && io.refill.matchMask(i) && io.refill.refillMask(i) 389 }) 390 391 // debug data read 392 io.debug := DontCare 393} 394