1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* 4* XiangShan is licensed under Mulan PSL v2. 5* You can use this software according to the terms and conditions of the Mulan PSL v2. 6* You may obtain a copy of Mulan PSL v2 at: 7* http://license.coscl.org.cn/MulanPSL2 8* 9* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 10* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 11* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 12* 13* See the Mulan PSL v2 for more details. 14***************************************************************************************/ 15 16package xiangshan.frontend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import xiangshan._ 22import utils._ 23import chisel3.experimental.chiselName 24 25class RASEntry()(implicit p: Parameters) extends XSBundle { 26 val retAddr = UInt(VAddrBits.W) 27 val ctr = UInt(8.W) // layer of nested call functions 28} 29 30@chiselName 31class RAS(implicit p: Parameters) extends BasePredictor 32{ 33 class RASResp extends Resp 34 { 35 val target =UInt(VAddrBits.W) 36 } 37 38 class RASBranchInfo extends Meta 39 { 40 val rasSp = UInt(log2Up(RasSize).W) 41 val rasTop = new RASEntry 42 } 43 44 class RASIO extends DefaultBasePredictorIO 45 { 46 val is_ret = Input(Bool()) 47 val callIdx = Flipped(ValidIO(UInt(log2Ceil(PredictWidth).W))) 48 val isRVC = Input(Bool()) 49 val isLastHalfRVI = Input(Bool()) 50 val redirect = Flipped(ValidIO(new Redirect)) 51 val out = Output(new RASResp) 52 val meta = Output(new RASBranchInfo) 53 } 54 55 56 def rasEntry() = new RASEntry 57 58 object RASEntry { 59 def apply(retAddr: UInt, ctr: UInt): RASEntry = { 60 val e = Wire(rasEntry()) 61 e.retAddr := retAddr 62 e.ctr := ctr 63 e 64 } 65 } 66 67 override val io = IO(new RASIO) 68 override val debug = true 69 70 @chiselName 71 class RASStack(val rasSize: Int) extends XSModule { 72 val io = IO(new Bundle { 73 val push_valid = Input(Bool()) 74 val pop_valid = Input(Bool()) 75 val spec_new_addr = Input(UInt(VAddrBits.W)) 76 77 val recover_sp = Input(UInt(log2Up(rasSize).W)) 78 val recover_top = Input(rasEntry()) 79 val recover_valid = Input(Bool()) 80 val recover_push = Input(Bool()) 81 val recover_pop = Input(Bool()) 82 val recover_new_addr = Input(UInt(VAddrBits.W)) 83 84 val sp = Output(UInt(log2Up(rasSize).W)) 85 val top = Output(rasEntry()) 86 }) 87 val debugIO = IO(new Bundle{ 88 val push_entry = Output(rasEntry()) 89 val alloc_new = Output(Bool()) 90 val sp = Output(UInt(log2Up(rasSize).W)) 91 val topRegister = Output(rasEntry()) 92 val out_mem = Output(Vec(RasSize, rasEntry())) 93 }) 94 95 val stack = Mem(RasSize, new RASEntry) 96 val sp = RegInit(0.U(log2Up(rasSize).W)) 97 val top = RegInit(0.U.asTypeOf(new RASEntry)) 98 val topPtr = RegInit(0.U(log2Up(rasSize).W)) 99 100 def ptrInc(ptr: UInt) = Mux(ptr === (rasSize-1).U, 0.U, ptr + 1.U) 101 def ptrDec(ptr: UInt) = Mux(ptr === 0.U, (rasSize-1).U, ptr - 1.U) 102 103 val alloc_new = io.spec_new_addr =/= top.retAddr || top.ctr.andR 104 val recover_alloc_new = io.recover_new_addr =/= io.recover_top.retAddr || io.recover_top.ctr.andR 105 106 // TODO: fix overflow and underflow bugs 107 def update(recover: Bool)(do_push: Bool, do_pop: Bool, do_alloc_new: Bool, 108 do_sp: UInt, do_top_ptr: UInt, do_new_addr: UInt, 109 do_top: RASEntry) = { 110 when (do_push) { 111 when (do_alloc_new) { 112 sp := ptrInc(do_sp) 113 topPtr := do_sp 114 top.retAddr := do_new_addr 115 top.ctr := 1.U 116 stack.write(do_sp, RASEntry(do_new_addr, 1.U)) 117 }.otherwise { 118 when (recover) { 119 sp := do_sp 120 topPtr := do_top_ptr 121 top.retAddr := do_top.retAddr 122 } 123 top.ctr := do_top.ctr + 1.U 124 stack.write(do_top_ptr, RASEntry(do_new_addr, do_top.ctr + 1.U)) 125 } 126 }.elsewhen (do_pop) { 127 when (do_top.ctr === 1.U) { 128 sp := ptrDec(do_sp) 129 topPtr := ptrDec(do_top_ptr) 130 top := stack.read(ptrDec(do_top_ptr)) 131 }.otherwise { 132 when (recover) { 133 sp := do_sp 134 topPtr := do_top_ptr 135 top.retAddr := do_top.retAddr 136 } 137 top.ctr := do_top.ctr - 1.U 138 stack.write(do_top_ptr, RASEntry(do_top.retAddr, do_top.ctr - 1.U)) 139 } 140 }.otherwise { 141 when (recover) { 142 sp := do_sp 143 topPtr := do_top_ptr 144 top := do_top 145 stack.write(do_top_ptr, do_top) 146 } 147 } 148 XSPerfAccumulate("ras_overflow", do_push && do_alloc_new && ptrInc(do_sp) === 0.U) 149 XSPerfAccumulate("ras_underflow", do_pop && do_top.ctr === 1.U && ptrDec(do_sp) === (rasSize-1).U) 150 } 151 152 update(io.recover_valid)( 153 Mux(io.recover_valid, io.recover_push, io.push_valid), 154 Mux(io.recover_valid, io.recover_pop, io.pop_valid), 155 Mux(io.recover_valid, recover_alloc_new, alloc_new), 156 Mux(io.recover_valid, io.recover_sp, sp), 157 Mux(io.recover_valid, io.recover_sp - 1.U, topPtr), 158 Mux(io.recover_valid, io.recover_new_addr, io.spec_new_addr), 159 Mux(io.recover_valid, io.recover_top, top)) 160 161 io.sp := sp 162 io.top := top 163 164 debugIO.push_entry := RASEntry(io.spec_new_addr, Mux(alloc_new, 1.U, top.ctr + 1.U)) 165 debugIO.alloc_new := alloc_new 166 debugIO.sp := sp 167 debugIO.topRegister := top 168 for (i <- 0 until RasSize) { 169 debugIO.out_mem(i) := stack.read(i.U) 170 } 171 172 } 173 174 val spec = Module(new RASStack(RasSize)) 175 val spec_ras = spec.io 176 177 178 val spec_push = WireInit(false.B) 179 val spec_pop = WireInit(false.B) 180 val jump_is_first = io.callIdx.bits === 0.U 181 val call_is_last_half = io.isLastHalfRVI && jump_is_first 182 val spec_new_addr = packetAligned(io.pc.bits) + (io.callIdx.bits << instOffsetBits.U) + Mux( (io.isRVC | call_is_last_half) && HasCExtension.B, 2.U, 4.U) 183 spec_ras.push_valid := spec_push 184 spec_ras.pop_valid := spec_pop 185 spec_ras.spec_new_addr := spec_new_addr 186 val spec_top_addr = spec_ras.top.retAddr 187 188 spec_push := io.callIdx.valid && io.pc.valid 189 spec_pop := io.is_ret && io.pc.valid 190 191 val redirect = RegNext(io.redirect) 192 val copy_valid = redirect.valid 193 val recover_cfi = redirect.bits.cfiUpdate 194 195 val retMissPred = copy_valid && redirect.bits.level === 0.U && recover_cfi.pd.isRet 196 val callMissPred = copy_valid && redirect.bits.level === 0.U && recover_cfi.pd.isCall 197 // when we mispredict a call, we must redo a push operation 198 // similarly, when we mispredict a return, we should redo a pop 199 spec_ras.recover_valid := copy_valid 200 spec_ras.recover_push := callMissPred 201 spec_ras.recover_pop := retMissPred 202 203 spec_ras.recover_sp := recover_cfi.rasSp 204 spec_ras.recover_top := recover_cfi.rasEntry 205 spec_ras.recover_new_addr := recover_cfi.pc + Mux(recover_cfi.pd.isRVC, 2.U, 4.U) 206 207 io.meta.rasSp := spec_ras.sp 208 io.meta.rasTop := spec_ras.top 209 210 io.out.target := spec_top_addr 211 // TODO: back-up stack for ras 212 // use checkpoint to recover RAS 213 214 if (BPUDebug && debug) { 215 val spec_debug = spec.debugIO 216 XSDebug("----------------RAS----------------\n") 217 XSDebug(" TopRegister: 0x%x %d \n",spec_debug.topRegister.retAddr,spec_debug.topRegister.ctr) 218 XSDebug(" index addr ctr \n") 219 for(i <- 0 until RasSize){ 220 XSDebug(" (%d) 0x%x %d",i.U,spec_debug.out_mem(i).retAddr,spec_debug.out_mem(i).ctr) 221 when(i.U === spec_debug.sp){XSDebug(false,true.B," <----sp")} 222 XSDebug(false,true.B,"\n") 223 } 224 XSDebug(spec_push, "(spec_ras)push inAddr: 0x%x inCtr: %d | allocNewEntry:%d | sp:%d \n", 225 spec_new_addr,spec_debug.push_entry.ctr,spec_debug.alloc_new,spec_debug.sp.asUInt) 226 XSDebug(spec_pop, "(spec_ras)pop outAddr: 0x%x \n",io.out.target) 227 val redirectUpdate = redirect.bits.cfiUpdate 228 XSDebug("copyValid:%d recover(SP:%d retAddr:%x ctr:%d) \n", 229 copy_valid,redirectUpdate.rasSp,redirectUpdate.rasEntry.retAddr,redirectUpdate.rasEntry.ctr) 230 } 231 232} 233