1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utility.{XSDebug, XSInfo} 23import xiangshan._ 24import xiangshan.cache.{DCacheLineIO, DCacheWordReq, MemoryOpConstants, DCacheWordReqWithVaddr} 25 26// Fake Store buffer for XiangShan Out of Order LSU 27// 28// Note: fake store buffer is out of date, as store buffer is now 29// used as extended dcache miss queue for store 30class FakeSbuffer(implicit p: Parameters) extends XSModule { 31 val io = IO(new Bundle() { 32 val in = Vec(StorePipelineWidth, Flipped(Decoupled(new DCacheWordReqWithVaddr))) 33 val dcache = new DCacheLineIO 34 val forward = Vec(LoadPipelineWidth, Flipped(new LoadForwardQueryIO)) 35 }) 36 37 assert(!(io.in(1).valid && !io.in(0).valid)) 38 39 // assign default values to signals 40 io.in(1).ready := false.B 41 42 io.dcache.req.valid := false.B 43 io.dcache.req.bits := DontCare 44 io.dcache.resp.ready := false.B 45 46 val s_invalid :: s_req :: s_resp :: Nil = Enum(3) 47 48 val state = RegInit(s_invalid) 49 50 val req = Reg(new DCacheWordReqWithVaddr) 51 52 XSDebug("state: %d\n", state) 53 54 io.in(0).ready := state === s_invalid 55 56 def word_addr(addr: UInt) = (addr >> 3) << 3 57 def block_addr(addr: UInt) = (addr >> 6) << 6 58 59 // -------------------------------------------- 60 // s_invalid: receive requests 61 when (state === s_invalid) { 62 when (io.in(0).fire) { 63 req := io.in(0).bits 64 state := s_req 65 } 66 } 67 68 val wdataVec = WireInit(VecInit(Seq.fill(8)(0.U(64.W)))) 69 val wmaskVec = WireInit(VecInit(Seq.fill(8)(0.U(8.W)))) 70 wdataVec(req.addr(5,3)) := req.data 71 wmaskVec(req.addr(5,3)) := req.mask 72 73 when (state === s_req) { 74 val dcache_req = io.dcache.req 75 dcache_req.valid := true.B 76 dcache_req.bits.cmd := MemoryOpConstants.M_XWR 77 dcache_req.bits.addr := block_addr(req.addr) 78 dcache_req.bits.data := wdataVec.asUInt 79 dcache_req.bits.mask := wmaskVec.asUInt 80 dcache_req.bits.id := DontCare 81 82 when (dcache_req.fire) { 83 state := s_resp 84 } 85 } 86 87 when (state === s_resp) { 88 io.dcache.resp.ready := true.B 89 when (io.dcache.resp.fire) { 90 state := s_invalid 91 } 92 } 93 94 // do forwarding here 95 for (i <- 0 until LoadPipelineWidth) { 96 val addr_match = word_addr(io.forward(i).paddr) === word_addr(req.addr) 97 val mask = io.forward(i).mask & req.mask(7, 0) 98 val mask_match = mask =/= 0.U 99 val need_forward = state =/= s_invalid && addr_match && mask_match 100 101 io.forward(i).forwardMask := Mux(need_forward, VecInit(mask.asBools), 102 VecInit(0.U(8.W).asBools)) 103 io.forward(i).forwardData := VecInit((0 until 8) map {i => req.data((i + 1) * 8 - 1, i * 8)}) 104 } 105 106 XSInfo(io.in(0).fire, "ensbuffer addr 0x%x wdata 0x%x mask %b\n", io.in(0).bits.addr, io.in(0).bits.data, io.in(0).bits.mask) 107 XSInfo(io.in(1).fire, "ensbuffer addr 0x%x wdata 0x%x mask %b\n", io.in(1).bits.addr, io.in(1).bits.data, io.in(0).bits.mask) 108 XSInfo(io.dcache.req.fire, "desbuffer addr 0x%x wdata 0x%x mask %b\n", io.dcache.req.bits.addr, io.dcache.req.bits.data, io.dcache.req.bits.mask) 109} 110