1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import utils._ 7import xiangshan._ 8import xiangshan.backend.JumpOpType 9 10trait HasIFUConst { this: XSModule => 11 val resetVector = 0x80000000L//TODO: set reset vec 12 val groupAlign = log2Up(FetchWidth * 4) 13 def groupPC(pc: UInt): UInt = Cat(pc(VAddrBits-1, groupAlign), 0.U(groupAlign.W)) 14 def snpc(pc: UInt): UInt = pc + (1 << groupAlign).U 15 def maskExp(mask: UInt): UInt = Cat(mask.asBools.map(Fill(2,_)).reverse) 16 17} 18 19class IFUIO extends XSBundle 20{ 21 val fetchPacket = DecoupledIO(new FetchPacket) 22 val redirectInfo = Input(new RedirectInfo) 23 val icacheReq = DecoupledIO(new FakeIcacheReq) 24 val icacheResp = Flipped(DecoupledIO(new FakeIcacheResp)) 25} 26 27class FakeBPU extends XSModule{ 28 val io = IO(new Bundle() { 29 val redirectInfo = Input(new RedirectInfo) 30 val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) } 31 val btbOut = ValidIO(new BranchPrediction) 32 val tageOut = ValidIO(new BranchPrediction) 33 val predecode = Flipped(ValidIO(new Predecode)) 34 }) 35 36 io.btbOut.valid := true.B 37 io.btbOut.bits <> DontCare 38 io.btbOut.bits.redirect := false.B 39 io.tageOut.valid := false.B 40 io.tageOut.bits <> DontCare 41} 42 43 44class IFU extends XSModule with HasIFUConst 45{ 46 val io = IO(new IFUIO) 47 val bpu = if(EnableBPU) Module(new BPU) else Module(new FakeBPU) 48 49 //------------------------- 50 // IF1 PC update 51 //------------------------- 52 //local 53 val if1_npc = WireInit(0.U(VAddrBits.W)) 54 val if1_valid = !reset.asBool 55 val if1_pc = RegInit(resetVector.U(VAddrBits.W)) 56 //next 57 val if2_ready = WireInit(false.B) 58 val if2_snpc = snpc(if1_pc) 59 val needflush = WireInit(false.B) 60 61 //pipe fire 62 val if1_fire = if1_valid && if2_ready || needflush 63 val if1_pcUpdate = if1_fire || needflush 64 65 when(RegNext(reset.asBool) && !reset.asBool){ 66 if1_npc := resetVector.U(VAddrBits.W) 67 } .otherwise{ 68 if1_npc := if2_snpc 69 } 70 71 when(if1_pcUpdate) 72 { 73 if1_pc := if1_npc 74 } 75 76 bpu.io.in.pc.valid := if1_fire 77 bpu.io.in.pc.bits := if1_npc 78 bpu.io.redirectInfo := io.redirectInfo 79 80 //------------------------- 81 // IF2 btb response 82 // icache visit 83 //------------------------- 84 //local 85 val if2_valid = RegEnable(next=if1_valid,init=false.B,enable=if1_fire) 86 val if2_pc = if1_pc 87 val if2_btb_taken = bpu.io.btbOut.valid && bpu.io.btbOut.bits.redirect 88 val if2_btb_insMask = bpu.io.btbOut.bits.instrValid 89 val if2_btb_target = bpu.io.btbOut.bits.target 90 //next 91 val if3_ready = WireInit(false.B) 92 93 //pipe fire 94 val if2_fire = if2_valid && if3_ready && io.icacheReq.fire() 95 if2_ready := (if2_fire) || !if2_valid 96 97 io.icacheReq.valid := if2_valid 98 io.icacheReq.bits.addr := if2_pc 99 100 when(if2_valid && if2_btb_taken) 101 { 102 if1_npc := if2_btb_target 103 } 104 //------------------------- 105 // IF3 icache hit check 106 //------------------------- 107 //local 108 val if3_valid = RegEnable(next=if2_valid,init=false.B,enable=if2_fire) 109 val if3_pc = RegEnable(if2_pc,if2_fire) 110 val if3_btb_target = RegEnable(if2_btb_target,if2_fire) 111 val if3_btb_taken = RegEnable(if2_btb_taken,if2_fire) 112 val if3_btb_insMask = RegEnable(if2_btb_insMask, if2_fire) 113 //next 114 val if4_ready = WireInit(false.B) 115 116 //pipe fire 117 val if3_fire = if3_valid && if4_ready 118 if3_ready := if3_fire || !if3_valid 119 //------------------------- 120 // IF4 icache response 121 // RAS result 122 // taget result 123 //------------------------- 124 val if4_valid = RegEnable(next=if3_valid,init=false.B,enable=if3_fire) 125 val if4_pc = RegEnable(if3_pc,if3_fire) 126 val if4_btb_target = RegEnable(if3_btb_target,if3_fire) 127 val if4_btb_taken = RegEnable(if3_btb_taken,if3_fire) 128 val if4_btb_insMask = RegEnable(if3_btb_insMask, if3_fire) 129 val if4_tage_target = bpu.io.tageOut.bits.target 130 val if4_tage_taken = bpu.io.tageOut.valid && bpu.io.tageOut.bits.redirect 131 val if4_tage_insMask = bpu.io.tageOut.bits.instrValid 132 val if4_btb_missPre = WireInit(false.B) 133 val if4_jal_flush = WireInit(false.B) 134 val if4_start_ready = io.fetchPacket.ready && (GTimer() > 500.U) 135 val if4_fetch_mask = Fill(FetchWidth*2, 1.U(1.W)) //TODO : consider cross cacheline fetch 136 137 if4_ready := (io.fetchPacket.fire() || !if4_valid) && if4_start_ready 138 139 //redirect: use tage prediction when tage taken 140 when(io.icacheResp.fire() && if4_tage_taken &&if4_valid) 141 { 142 if1_npc := if4_tage_target 143 } 144 145 //redirect: tage result differ btb 146 if4_btb_missPre := if4_valid && ((if4_tage_taken ^ if4_btb_taken) || (if4_tage_taken && if4_btb_taken && (if4_tage_target =/= if4_btb_target))) 147 148 149 //flush pipline 150 //-> backend redirect (frontend miss predict) (if enable BPD) 151 //-> tage differ from btb (btb miss predeict) 152 //-> jal differ from btb (btb miss predict) 153 if(EnableBPD){needflush := (if4_btb_missPre) || io.redirectInfo.flush() ||if4_jal_flush} //TODO: consider tage now only has btb 154 else {needflush := io.redirectInfo.flush() || if4_jal_flush} 155 156 when(needflush){ 157 if3_valid := false.B 158 if4_valid := false.B 159 } 160 161 //flush ICache register 162 io.icacheReq.bits.flush := needflush 163 164 //IF4 to BPU 165 bpu.io.predecode.valid := io.icacheResp.fire() && if4_valid 166 bpu.io.predecode.bits <> io.icacheResp.bits.predecode 167 bpu.io.predecode.bits.mask := Fill(FetchWidth, 1.U(1.W)) //TODO: consider RVC && consider cross cacheline fetch 168 bpu.io.redirectInfo := io.redirectInfo 169 io.icacheResp.ready := if4_start_ready 170 171 //------------------------- 172 // Output fetch packet 173 // -> Ibuffer 174 //------------------------- 175 io.fetchPacket.valid := if4_valid && io.icacheResp.valid && !io.redirectInfo.flush() 176 io.fetchPacket.bits.instrs := io.icacheResp.bits.icacheOut 177 io.fetchPacket.bits.pc := if4_pc 178 io.fetchPacket.bits.hist := bpu.io.tageOut.bits.hist 179 io.fetchPacket.bits.predCtr := bpu.io.tageOut.bits.predCtr 180 io.fetchPacket.bits.btbHitWay := bpu.io.tageOut.bits.btbHitWay 181 io.fetchPacket.bits.tageMeta := bpu.io.tageOut.bits.tageMeta 182 io.fetchPacket.bits.rasSp := bpu.io.tageOut.bits.rasSp 183 io.fetchPacket.bits.rasTopCtr := bpu.io.tageOut.bits.rasTopCtr 184 //fetch mask Mux 185 if(EnableBPU){ 186 io.fetchPacket.bits.mask := Mux(if4_tage_taken,(if4_fetch_mask.asUInt & maskExp(if4_tage_insMask.asUInt)), 187 Mux(if4_btb_taken, (if4_fetch_mask.asUInt & maskExp(if4_btb_insMask.asUInt)), 188 if4_fetch_mask) 189 ) 190 } 191 else{ 192 io.fetchPacket.bits.mask := Mux(if4_btb_taken, (if4_fetch_mask.asUInt & maskExp(if4_btb_insMask.asUInt)) ,if4_fetch_mask) 193 } 194 //fetch npc logic 195 for(i <- 0 until FetchWidth){ 196 if(EnableBPD){ 197 when (if4_tage_taken && i.U === OHToUInt(HighestBit(if4_tage_insMask.asUInt, FetchWidth))){ 198 io.fetchPacket.bits.pnpc(i) := if4_tage_target 199 } .otherwise{ 200 io.fetchPacket.bits.pnpc(i) := if4_pc + ((i + 1).U << 2.U) //use fetch PC 201 } 202 } 203 else{ 204 when(if4_btb_taken && i.U === OHToUInt(HighestBit(if4_btb_insMask.asUInt, FetchWidth))){ 205 io.fetchPacket.bits.pnpc(i) := if4_btb_target 206 } .otherwise{ 207 io.fetchPacket.bits.pnpc(i) := if4_pc + ((i + 1).U << 2.U) 208 } 209 } 210 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] instruction %x pc:%x pnpc:0x%x\n",io.fetchPacket.bits.instrs(i).asUInt, io.fetchPacket.bits.pc + (4*i).U, io.fetchPacket.bits.pnpc(i).asUInt) 211 } 212 213 214 //-------------------------------------------------------------- 215 //-------------------------------- 216 // jal judgement logic 217 // fetch index calculate 218 // taget calculate 219 //-------------------------------- 220 //jal should not predict 221 val predecMask = bpu.io.predecode.bits.mask 222 val jalOH = LowestBit(Reverse(Cat(bpu.io.predecode.bits.fuOpTypes.map { t => (t === JumpOpType.jal || t === JumpOpType.call) }).asUInt) & predecMask & if4_btb_insMask.asUInt,FetchWidth) 223 val jalMask = Wire(Vec(FetchWidth,UInt(1.W))) 224 (0 until FetchWidth).map(i => jalMask(i) := LowerMask(jalOH,FetchWidth)(i)) 225 val jalIns = Mux1H(jalOH.asUInt,io.fetchPacket.bits.instrs) 226 val jalIdx = OHToUInt(jalOH) 227 val hasJAL = jalOH.orR.asBool 228 val jalTarget = if4_pc + (jalIdx << 2.U) + SignExt(Cat(jalIns(31), jalIns(19, 12), jalIns(20), jalIns(30, 21), 0.U(1.W)), XLEN) 229 if4_jal_flush := io.fetchPacket.fire && hasJAL && ((!if4_btb_taken) || (if4_btb_taken && if4_btb_target =/= jalTarget)) 230 231 when(io.fetchPacket.fire && hasJAL ){ 232 if1_npc := jalTarget 233 io.fetchPacket.bits.pnpc(jalIdx) := jalTarget 234 io.fetchPacket.bits.mask := Reverse(Cat(jalMask.map(i => Fill(2, i.asUInt))).asUInt) 235 } 236 //-------------------------------------------------------------- 237 238 //redirect: backend redirect: prediction miss 239 when(io.redirectInfo.flush()){ 240 if1_npc := io.redirectInfo.redirect.target 241 } 242 243 //------------------------- 244 // Debug Infomation 245 //------------------------- 246 //IF1 247 XSDebug("[IF1]if1_valid:%d || if1_npc:0x%x || if1_pcUpdate:%d if1_pc:0x%x || if2_ready:%d",if1_valid,if1_npc,if1_pcUpdate,if1_pc,if2_ready) 248 XSDebug(false,if1_fire,"------IF1->fire!!!") 249 XSDebug(false,true.B,"\n") 250 //IF2 251 XSDebug("[IF2]if2_valid:%d || if2_pc:0x%x || if3_ready:%d ",if2_valid,if2_pc,if3_ready) 252 XSDebug(false,if2_fire,"------IF2->fire!!!") 253 XSDebug(false,true.B,"\n") 254 XSDebug("[IF2-Icache-Req] icache_in_valid:%d icache_in_ready:%d\n",io.icacheReq.valid,io.icacheReq.ready) 255 XSDebug("[IF2-BPU-out]if2_btbTaken:%d || if2_btb_insMask:%b || if2_btb_target:0x%x \n",if2_btb_taken,if2_btb_insMask.asUInt,if2_btb_target) 256 //IF3 257 XSDebug("[IF3]if3_valid:%d || if3_pc:0x%x || if4_ready:%d ",if3_valid,if3_pc,if4_ready) 258 XSDebug(false,if3_fire,"------IF3->fire!!!") 259 XSDebug(false,true.B,"\n") 260 //IF4 261 XSDebug("[IF4]if4_valid:%d || if4_pc:0x%x \n",if4_valid,if4_pc) 262 XSDebug("[IF4-TAGE-out]if4_tage_taken:%d || if4_tage_insMask:%b || if4_tage_target:0x%x \n",if4_tage_taken,if4_tage_insMask.asUInt,if4_tage_target) 263 XSDebug("[IF4-ICACHE-RESP]icacheResp.valid:%d icacheResp.ready:%d\n",io.icacheResp.valid,io.icacheResp.ready) 264 //redirect 265 XSDebug(io.redirectInfo.flush(),"[IFU-REDIRECT] target:0x%x \n",io.redirectInfo.redirect.target.asUInt) 266 //out fetch packet 267 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] starPC:0x%x GroupPC:0x%xn\n",if4_pc.asUInt,groupPC(if4_pc).asUInt) 268 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] instrmask %b\n",io.fetchPacket.bits.mask.asUInt) 269 //JAL 270 XSDebug(if4_valid && hasJAL,"[IFU-JAL-hasJAL]: jalOH:%b jalMask:%b jalIns:%x jalTarget:%x\n",jalOH,jalMask.asUInt,jalIns,jalTarget) 271 XSDebug(if4_valid && if4_jal_flush,"[IFU-JAL-needflush]: if4_btb_target:0x%x jalTarget:%x \n",if4_btb_target,jalTarget) 272 273} 274 275