1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import xiangshan._ 24import xiangshan.cache._ 25import xiangshan.cache.{DCacheWordIO, DCacheLineIO, MemoryOpConstants} 26import xiangshan.cache.mmu.{TlbRequestIO} 27import xiangshan.mem._ 28import xiangshan.backend.rob.RobLsqIO 29 30class ExceptionAddrIO(implicit p: Parameters) extends XSBundle { 31 val lsIdx = Input(new LSIdx) 32 val isStore = Input(Bool()) 33 val vaddr = Output(UInt(VAddrBits.W)) 34} 35 36class FwdEntry extends Bundle { 37 val validFast = Bool() // validFast is generated the same cycle with query 38 val valid = Bool() // valid is generated 1 cycle after query request 39 val data = UInt(8.W) // data is generated 1 cycle after query request 40} 41 42// inflight miss block reqs 43class InflightBlockInfo(implicit p: Parameters) extends XSBundle { 44 val block_addr = UInt(PAddrBits.W) 45 val valid = Bool() 46} 47 48class LsqEnqIO(implicit p: Parameters) extends XSBundle { 49 val canAccept = Output(Bool()) 50 val needAlloc = Vec(exuParameters.LsExuCnt, Input(UInt(2.W))) 51 val req = Vec(exuParameters.LsExuCnt, Flipped(ValidIO(new MicroOp))) 52 val resp = Vec(exuParameters.LsExuCnt, Output(new LSIdx)) 53} 54 55// Load / Store Queue Wrapper for XiangShan Out of Order LSU 56class LsqWrappper(implicit p: Parameters) extends XSModule with HasDCacheParameters { 57 val io = IO(new Bundle() { 58 val enq = new LsqEnqIO 59 val brqRedirect = Flipped(ValidIO(new Redirect)) 60 val loadIn = Vec(LoadPipelineWidth, Flipped(Valid(new LsPipelineBundle))) 61 val storeIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle))) 62 val storeDataIn = Vec(StorePipelineWidth, Flipped(Valid(new StoreDataBundle))) // store data, send to sq from rs 63 val loadDataForwarded = Vec(LoadPipelineWidth, Input(Bool())) 64 val needReplayFromRS = Vec(LoadPipelineWidth, Input(Bool())) 65 val sbuffer = Vec(StorePipelineWidth, Decoupled(new DCacheWordReqWithVaddr)) 66 val ldout = Vec(2, DecoupledIO(new ExuOutput)) // writeback int load 67 val mmioStout = DecoupledIO(new ExuOutput) // writeback uncached store 68 val forward = Vec(LoadPipelineWidth, Flipped(new PipeLoadForwardQueryIO)) 69 val loadViolationQuery = Vec(LoadPipelineWidth, Flipped(new LoadViolationQueryIO)) 70 val rob = Flipped(new RobLsqIO) 71 val rollback = Output(Valid(new Redirect)) 72 val dcache = Flipped(ValidIO(new Refill)) 73 val release = Flipped(ValidIO(new Release)) 74 val uncache = new DCacheWordIO 75 val exceptionAddr = new ExceptionAddrIO 76 val sqempty = Output(Bool()) 77 val issuePtrExt = Output(new SqPtr) 78 val sqFull = Output(Bool()) 79 val lqFull = Output(Bool()) 80 }) 81 82 val loadQueue = Module(new LoadQueue) 83 val storeQueue = Module(new StoreQueue) 84 85 // io.enq logic 86 // LSQ: send out canAccept when both load queue and store queue are ready 87 // Dispatch: send instructions to LSQ only when they are ready 88 io.enq.canAccept := loadQueue.io.enq.canAccept && storeQueue.io.enq.canAccept 89 loadQueue.io.enq.sqCanAccept := storeQueue.io.enq.canAccept 90 storeQueue.io.enq.lqCanAccept := loadQueue.io.enq.canAccept 91 for (i <- io.enq.req.indices) { 92 loadQueue.io.enq.needAlloc(i) := io.enq.needAlloc(i)(0) 93 loadQueue.io.enq.req(i).valid := io.enq.needAlloc(i)(0) && io.enq.req(i).valid 94 loadQueue.io.enq.req(i).bits := io.enq.req(i).bits 95 loadQueue.io.enq.req(i).bits.sqIdx := storeQueue.io.enq.resp(i) 96 97 storeQueue.io.enq.needAlloc(i) := io.enq.needAlloc(i)(1) 98 storeQueue.io.enq.req(i).valid := io.enq.needAlloc(i)(1) && io.enq.req(i).valid 99 storeQueue.io.enq.req(i).bits := io.enq.req(i).bits 100 storeQueue.io.enq.req(i).bits := io.enq.req(i).bits 101 storeQueue.io.enq.req(i).bits.lqIdx := loadQueue.io.enq.resp(i) 102 103 io.enq.resp(i).lqIdx := loadQueue.io.enq.resp(i) 104 io.enq.resp(i).sqIdx := storeQueue.io.enq.resp(i) 105 } 106 107 // load queue wiring 108 loadQueue.io.brqRedirect <> io.brqRedirect 109 loadQueue.io.loadIn <> io.loadIn 110 loadQueue.io.storeIn <> io.storeIn 111 loadQueue.io.loadDataForwarded <> io.loadDataForwarded 112 loadQueue.io.needReplayFromRS <> io.needReplayFromRS 113 loadQueue.io.ldout <> io.ldout 114 loadQueue.io.rob <> io.rob 115 loadQueue.io.rollback <> io.rollback 116 loadQueue.io.dcache <> io.dcache 117 loadQueue.io.release <> io.release 118 loadQueue.io.exceptionAddr.lsIdx := io.exceptionAddr.lsIdx 119 loadQueue.io.exceptionAddr.isStore := DontCare 120 121 // store queue wiring 122 // storeQueue.io <> DontCare 123 storeQueue.io.brqRedirect <> io.brqRedirect 124 storeQueue.io.storeIn <> io.storeIn 125 storeQueue.io.storeDataIn <> io.storeDataIn 126 storeQueue.io.sbuffer <> io.sbuffer 127 storeQueue.io.mmioStout <> io.mmioStout 128 storeQueue.io.rob <> io.rob 129 storeQueue.io.exceptionAddr.lsIdx := io.exceptionAddr.lsIdx 130 storeQueue.io.exceptionAddr.isStore := DontCare 131 storeQueue.io.issuePtrExt <> io.issuePtrExt 132 133 loadQueue.io.load_s1 <> io.forward 134 storeQueue.io.forward <> io.forward // overlap forwardMask & forwardData, DO NOT CHANGE SEQUENCE 135 136 loadQueue.io.loadViolationQuery <> io.loadViolationQuery 137 138 storeQueue.io.sqempty <> io.sqempty 139 140 io.exceptionAddr.vaddr := Mux(io.exceptionAddr.isStore, storeQueue.io.exceptionAddr.vaddr, loadQueue.io.exceptionAddr.vaddr) 141 142 // naive uncache arbiter 143 val s_idle :: s_load :: s_store :: Nil = Enum(3) 144 val pendingstate = RegInit(s_idle) 145 146 switch(pendingstate){ 147 is(s_idle){ 148 when(io.uncache.req.fire()){ 149 pendingstate := Mux(loadQueue.io.uncache.req.valid, s_load, s_store) 150 } 151 } 152 is(s_load){ 153 when(io.uncache.resp.fire()){ 154 pendingstate := s_idle 155 } 156 } 157 is(s_store){ 158 when(io.uncache.resp.fire()){ 159 pendingstate := s_idle 160 } 161 } 162 } 163 164 loadQueue.io.uncache := DontCare 165 storeQueue.io.uncache := DontCare 166 loadQueue.io.uncache.resp.valid := false.B 167 storeQueue.io.uncache.resp.valid := false.B 168 when(loadQueue.io.uncache.req.valid){ 169 io.uncache.req <> loadQueue.io.uncache.req 170 }.otherwise{ 171 io.uncache.req <> storeQueue.io.uncache.req 172 } 173 when(pendingstate === s_load){ 174 io.uncache.resp <> loadQueue.io.uncache.resp 175 }.otherwise{ 176 io.uncache.resp <> storeQueue.io.uncache.resp 177 } 178 179 assert(!(loadQueue.io.uncache.req.valid && storeQueue.io.uncache.req.valid)) 180 assert(!(loadQueue.io.uncache.resp.valid && storeQueue.io.uncache.resp.valid)) 181 assert(!((loadQueue.io.uncache.resp.valid || storeQueue.io.uncache.resp.valid) && pendingstate === s_idle)) 182 183 io.lqFull := loadQueue.io.lqFull 184 io.sqFull := storeQueue.io.sqFull 185 186 val ldq_perf = loadQueue.perfEvents.map(_._1).zip(loadQueue.perfinfo.perfEvents.perf_events) 187 val stq_perf = storeQueue.perfEvents.map(_._1).zip(storeQueue.perfinfo.perfEvents.perf_events) 188 val perfEvents = ldq_perf ++ stq_perf 189 val perf_list = storeQueue.perfinfo.perfEvents.perf_events ++ loadQueue.perfinfo.perfEvents.perf_events 190 val perfinfo = IO(new Bundle(){ 191 val perfEvents = Output(new PerfEventsBundle(perf_list.length)) 192 }) 193 perfinfo.perfEvents.perf_events := perf_list 194} 195