1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* 4* XiangShan is licensed under Mulan PSL v2. 5* You can use this software according to the terms and conditions of the Mulan PSL v2. 6* You may obtain a copy of Mulan PSL v2 at: 7* http://license.coscl.org.cn/MulanPSL2 8* 9* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 10* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 11* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 12* 13* See the Mulan PSL v2 for more details. 14***************************************************************************************/ 15 16package xiangshan.mem 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import utils._ 22import xiangshan._ 23import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants} 24import difftest._ 25 26class AtomicsUnit(implicit p: Parameters) extends XSModule with MemoryOpConstants{ 27 val io = IO(new Bundle() { 28 val in = Flipped(Decoupled(new ExuInput)) 29 val storeDataIn = Flipped(Valid(new StoreDataBundle)) // src2 from rs 30 val out = Decoupled(new ExuOutput) 31 val dcache = new DCacheWordIO 32 val dtlb = new TlbRequestIO 33 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 34 val flush_sbuffer = new SbufferFlushBundle 35 val rsFeedback = ValidIO(new RSFeedback) 36 val redirect = Flipped(ValidIO(new Redirect)) 37 val flush = Input(Bool()) 38 val exceptionAddr = ValidIO(UInt(VAddrBits.W)) 39 }) 40 41 //------------------------------------------------------- 42 // Atomics Memory Accsess FSM 43 //------------------------------------------------------- 44 val s_invalid :: s_tlb :: s_flush_sbuffer_req :: s_flush_sbuffer_resp :: s_cache_req :: s_cache_resp :: s_finish :: Nil = Enum(7) 45 val state = RegInit(s_invalid) 46 val addr_valid = RegInit(false.B) 47 val data_valid = RegInit(false.B) 48 val in = Reg(new ExuInput()) 49 val exceptionVec = RegInit(0.U.asTypeOf(ExceptionVec())) 50 val atom_override_xtval = RegInit(false.B) 51 // paddr after translation 52 val paddr = Reg(UInt()) 53 val is_mmio = Reg(Bool()) 54 // dcache response data 55 val resp_data = Reg(UInt()) 56 val resp_data_wire = WireInit(0.U) 57 val is_lrsc_valid = Reg(Bool()) 58 59 // Difftest signals 60 val paddr_reg = Reg(UInt(64.W)) 61 val data_reg = Reg(UInt(64.W)) 62 val mask_reg = Reg(UInt(8.W)) 63 val fuop_reg = Reg(UInt(8.W)) 64 65 io.exceptionAddr.valid := atom_override_xtval 66 io.exceptionAddr.bits := in.src(0) 67 68 // assign default value to output signals 69 io.in.ready := false.B 70 io.out.valid := false.B 71 io.out.bits := DontCare 72 73 io.dcache.req.valid := false.B 74 io.dcache.req.bits := DontCare 75 io.dcache.resp.ready := false.B 76 77 io.dtlb.req.valid := false.B 78 io.dtlb.req.bits := DontCare 79 io.dtlb.resp.ready := false.B 80 81 io.flush_sbuffer.valid := false.B 82 83 XSDebug("state: %d\n", state) 84 85 when (state === s_invalid) { 86 io.in.ready := true.B 87 when (io.in.fire()) { 88 in := io.in.bits 89 in.src(1) := in.src(1) // leave src2 unchanged 90 addr_valid := true.B 91 } 92 when (io.storeDataIn.fire()) { 93 in.src(1) := io.storeDataIn.bits.data 94 data_valid := true.B 95 } 96 when(data_valid && addr_valid) { 97 state := s_tlb 98 addr_valid := false.B 99 data_valid := false.B 100 } 101 } 102 103 104 // Send TLB feedback to store issue queue 105 // we send feedback right after we receives request 106 // also, we always treat amo as tlb hit 107 // since we will continue polling tlb all by ourself 108 io.rsFeedback.valid := RegNext(RegNext(io.in.valid)) 109 io.rsFeedback.bits.hit := true.B 110 io.rsFeedback.bits.rsIdx := RegEnable(io.rsIdx, io.in.valid) 111 io.rsFeedback.bits.flushState := DontCare 112 io.rsFeedback.bits.sourceType := DontCare 113 114 // tlb translation, manipulating signals && deal with exception 115 when (state === s_tlb) { 116 // send req to dtlb 117 // keep firing until tlb hit 118 io.dtlb.req.valid := true.B 119 io.dtlb.req.bits.vaddr := in.src(0) 120 io.dtlb.req.bits.roqIdx := in.uop.roqIdx 121 io.dtlb.resp.ready := true.B 122 val is_lr = in.uop.ctrl.fuOpType === LSUOpType.lr_w || in.uop.ctrl.fuOpType === LSUOpType.lr_d 123 io.dtlb.req.bits.cmd := Mux(is_lr, TlbCmd.atom_read, TlbCmd.atom_write) 124 io.dtlb.req.bits.debug.pc := in.uop.cf.pc 125 io.dtlb.req.bits.debug.isFirstIssue := false.B 126 127 when(io.dtlb.resp.fire && !io.dtlb.resp.bits.miss){ 128 // exception handling 129 val addrAligned = LookupTree(in.uop.ctrl.fuOpType(1,0), List( 130 "b00".U -> true.B, //b 131 "b01".U -> (in.src(0)(0) === 0.U), //h 132 "b10".U -> (in.src(0)(1,0) === 0.U), //w 133 "b11".U -> (in.src(0)(2,0) === 0.U) //d 134 )) 135 exceptionVec(storeAddrMisaligned) := !addrAligned 136 exceptionVec(storePageFault) := io.dtlb.resp.bits.excp.pf.st 137 exceptionVec(loadPageFault) := io.dtlb.resp.bits.excp.pf.ld 138 exceptionVec(storeAccessFault) := io.dtlb.resp.bits.excp.af.st 139 exceptionVec(loadAccessFault) := io.dtlb.resp.bits.excp.af.ld 140 val exception = !addrAligned || 141 io.dtlb.resp.bits.excp.pf.st || 142 io.dtlb.resp.bits.excp.pf.ld || 143 io.dtlb.resp.bits.excp.af.st || 144 io.dtlb.resp.bits.excp.af.ld 145 is_mmio := io.dtlb.resp.bits.mmio 146 when (exception) { 147 // check for exceptions 148 // if there are exceptions, no need to execute it 149 state := s_finish 150 atom_override_xtval := true.B 151 } .otherwise { 152 paddr := io.dtlb.resp.bits.paddr 153 state := s_flush_sbuffer_req 154 } 155 } 156 } 157 158 159 when (state === s_flush_sbuffer_req) { 160 io.flush_sbuffer.valid := true.B 161 state := s_flush_sbuffer_resp 162 } 163 164 when (state === s_flush_sbuffer_resp) { 165 when (io.flush_sbuffer.empty) { 166 state := s_cache_req 167 } 168 } 169 170 when (state === s_cache_req) { 171 io.dcache.req.valid := true.B 172 io.dcache.req.bits.cmd := LookupTree(in.uop.ctrl.fuOpType, List( 173 LSUOpType.lr_w -> M_XLR, 174 LSUOpType.sc_w -> M_XSC, 175 LSUOpType.amoswap_w -> M_XA_SWAP, 176 LSUOpType.amoadd_w -> M_XA_ADD, 177 LSUOpType.amoxor_w -> M_XA_XOR, 178 LSUOpType.amoand_w -> M_XA_AND, 179 LSUOpType.amoor_w -> M_XA_OR, 180 LSUOpType.amomin_w -> M_XA_MIN, 181 LSUOpType.amomax_w -> M_XA_MAX, 182 LSUOpType.amominu_w -> M_XA_MINU, 183 LSUOpType.amomaxu_w -> M_XA_MAXU, 184 185 LSUOpType.lr_d -> M_XLR, 186 LSUOpType.sc_d -> M_XSC, 187 LSUOpType.amoswap_d -> M_XA_SWAP, 188 LSUOpType.amoadd_d -> M_XA_ADD, 189 LSUOpType.amoxor_d -> M_XA_XOR, 190 LSUOpType.amoand_d -> M_XA_AND, 191 LSUOpType.amoor_d -> M_XA_OR, 192 LSUOpType.amomin_d -> M_XA_MIN, 193 LSUOpType.amomax_d -> M_XA_MAX, 194 LSUOpType.amominu_d -> M_XA_MINU, 195 LSUOpType.amomaxu_d -> M_XA_MAXU 196 )) 197 198 io.dcache.req.bits.addr := paddr 199 io.dcache.req.bits.data := genWdata(in.src(1), in.uop.ctrl.fuOpType(1,0)) 200 // TODO: atomics do need mask: fix mask 201 io.dcache.req.bits.mask := genWmask(paddr, in.uop.ctrl.fuOpType(1,0)) 202 io.dcache.req.bits.id := DontCare 203 204 when(io.dcache.req.fire()){ 205 state := s_cache_resp 206 paddr_reg := io.dcache.req.bits.addr 207 data_reg := io.dcache.req.bits.data 208 mask_reg := io.dcache.req.bits.mask 209 fuop_reg := in.uop.ctrl.fuOpType 210 } 211 } 212 213 when (state === s_cache_resp) { 214 io.dcache.resp.ready := true.B 215 when(io.dcache.resp.fire()) { 216 is_lrsc_valid := io.dcache.resp.bits.id 217 val rdata = io.dcache.resp.bits.data 218 val rdataSel = LookupTree(paddr(2, 0), List( 219 "b000".U -> rdata(63, 0), 220 "b001".U -> rdata(63, 8), 221 "b010".U -> rdata(63, 16), 222 "b011".U -> rdata(63, 24), 223 "b100".U -> rdata(63, 32), 224 "b101".U -> rdata(63, 40), 225 "b110".U -> rdata(63, 48), 226 "b111".U -> rdata(63, 56) 227 )) 228 229 resp_data_wire := LookupTree(in.uop.ctrl.fuOpType, List( 230 LSUOpType.lr_w -> SignExt(rdataSel(31, 0), XLEN), 231 LSUOpType.sc_w -> rdata, 232 LSUOpType.amoswap_w -> SignExt(rdataSel(31, 0), XLEN), 233 LSUOpType.amoadd_w -> SignExt(rdataSel(31, 0), XLEN), 234 LSUOpType.amoxor_w -> SignExt(rdataSel(31, 0), XLEN), 235 LSUOpType.amoand_w -> SignExt(rdataSel(31, 0), XLEN), 236 LSUOpType.amoor_w -> SignExt(rdataSel(31, 0), XLEN), 237 LSUOpType.amomin_w -> SignExt(rdataSel(31, 0), XLEN), 238 LSUOpType.amomax_w -> SignExt(rdataSel(31, 0), XLEN), 239 LSUOpType.amominu_w -> SignExt(rdataSel(31, 0), XLEN), 240 LSUOpType.amomaxu_w -> SignExt(rdataSel(31, 0), XLEN), 241 242 LSUOpType.lr_d -> SignExt(rdataSel(63, 0), XLEN), 243 LSUOpType.sc_d -> rdata, 244 LSUOpType.amoswap_d -> SignExt(rdataSel(63, 0), XLEN), 245 LSUOpType.amoadd_d -> SignExt(rdataSel(63, 0), XLEN), 246 LSUOpType.amoxor_d -> SignExt(rdataSel(63, 0), XLEN), 247 LSUOpType.amoand_d -> SignExt(rdataSel(63, 0), XLEN), 248 LSUOpType.amoor_d -> SignExt(rdataSel(63, 0), XLEN), 249 LSUOpType.amomin_d -> SignExt(rdataSel(63, 0), XLEN), 250 LSUOpType.amomax_d -> SignExt(rdataSel(63, 0), XLEN), 251 LSUOpType.amominu_d -> SignExt(rdataSel(63, 0), XLEN), 252 LSUOpType.amomaxu_d -> SignExt(rdataSel(63, 0), XLEN) 253 )) 254 255 resp_data := resp_data_wire 256 state := s_finish 257 } 258 } 259 260 when (state === s_finish) { 261 io.out.valid := true.B 262 io.out.bits.uop := in.uop 263 io.out.bits.uop.cf.exceptionVec := exceptionVec 264 io.out.bits.uop.diffTestDebugLrScValid := is_lrsc_valid 265 io.out.bits.data := resp_data 266 io.out.bits.redirectValid := false.B 267 io.out.bits.redirect := DontCare 268 io.out.bits.debug.isMMIO := is_mmio 269 io.out.bits.debug.paddr := paddr 270 when (io.out.fire()) { 271 XSDebug("atomics writeback: pc %x data %x\n", io.out.bits.uop.cf.pc, io.dcache.resp.bits.data) 272 state := s_invalid 273 } 274 } 275 276 when(io.redirect.valid || io.flush){ 277 atom_override_xtval := false.B 278 } 279 280 if (!env.FPGAPlatform) { 281 val difftest = Module(new DifftestAtomicEvent) 282 difftest.io.clock := clock 283 difftest.io.coreid := hardId.U 284 difftest.io.atomicResp := io.dcache.resp.fire() 285 difftest.io.atomicAddr := paddr_reg 286 difftest.io.atomicData := data_reg 287 difftest.io.atomicMask := mask_reg 288 difftest.io.atomicFuop := fuop_reg 289 difftest.io.atomicOut := resp_data_wire 290 } 291} 292