1 package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import xiangshan.utils._ 8 9trait HasIFUConst { this: XSModule => 10 val resetVector = 0x80000000L//TODO: set reset vec 11 val groupAlign = log2Up(FetchWidth * 4) 12 def groupPC(pc: UInt): UInt = Cat(pc(VAddrBits-1, groupAlign), 0.U(groupAlign.W)) 13 14} 15 16class IFUIO extends XSBundle 17{ 18 val fetchPacket = DecoupledIO(new FetchPacket) 19 val redirectInfo = Input(new RedirectInfo) 20 val icacheReq = DecoupledIO(new FakeIcacheReq) 21 val icacheResp = Flipped(DecoupledIO(new FakeIcacheResp)) 22} 23 24class FakeBPU extends XSModule{ 25 val io = IO(new Bundle() { 26 val redirectInfo = Input(new RedirectInfo) 27 val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) } 28 val btbOut = ValidIO(new BranchPrediction) 29 val tageOut = ValidIO(new BranchPrediction) 30 val predecode = Flipped(ValidIO(new Predecode)) 31 }) 32 33 io.btbOut.valid := false.B 34 io.btbOut.bits <> DontCare 35 io.tageOut.valid := false.B 36 io.tageOut.bits <> DontCare 37} 38 39 40class IFU extends XSModule with HasIFUConst 41{ 42 val io = IO(new IFUIO) 43 val bpu = if(EnableBPU) Module(new BPU) else Module(new FakeBPU) 44 45 //------------------------- 46 // IF1 PC update 47 //------------------------- 48 //local 49 val if1_npc = WireInit(0.U(VAddrBits.W)) 50 val if1_valid = !reset.asBool 51 val if1_pc = RegInit(resetVector.U(VAddrBits.W)) 52 //next 53 val if2_ready = WireInit(false.B) 54 val if2_snpc = if1_pc + 32.U //TODO: this is ugly 55 val if1_ready = if2_ready 56 57 //pipe fire 58 val if1_fire = if1_valid && if1_ready 59 val if1_pcUpdate = io.redirectInfo.flush() || if1_fire 60 61 when(RegNext(reset.asBool) && !reset.asBool){ 62 //when((GTimer() === 501.U)){ //TODO:this is ugly 63 XSDebug("RESET....\n") 64 if1_npc := resetVector.U(VAddrBits.W) 65 } .otherwise{ 66 if1_npc := if2_snpc 67 } 68 69 when(if1_pcUpdate) 70 { 71 if1_pc := if1_npc 72 } 73 74 bpu.io.in.pc.valid := if1_fire 75 bpu.io.in.pc.bits := if1_npc 76 bpu.io.redirectInfo := io.redirectInfo 77 78 XSDebug("[IF1]if1_valid:%d || if1_npc:0x%x || if1_pcUpdate:%d if1_pc:0x%x || if2_ready:%d",if1_valid,if1_npc,if1_pcUpdate,if1_pc,if2_ready) 79 XSDebug(false,if1_fire,"------IF1->fire!!!") 80 XSDebug(false,true.B,"\n") 81 82 //------------------------- 83 // IF2 btb response 84 // icache visit 85 //------------------------- 86 //local 87 val if2_valid = RegEnable(next=if1_valid,init=false.B,enable=if1_fire) 88 val if2_pc = if1_pc 89 val if2_btb_taken = bpu.io.btbOut.valid && bpu.io.btbOut.bits.redirect 90 val if2_btb_insMask = bpu.io.btbOut.bits.instrValid 91 val if2_btb_target = bpu.io.btbOut.bits.target 92 93 //next 94 val if3_ready = WireInit(false.B) 95 96 //pipe fire 97 val if2_fire = if2_valid && if3_ready && io.icacheReq.fire() 98 if2_ready := (if2_fire) || !if2_valid 99 100 io.icacheReq.valid := if2_valid 101 io.icacheReq.bits.addr := if2_pc 102 io.icacheReq.bits.flush := io.redirectInfo.flush() 103 104 when(if2_valid && if2_btb_taken) 105 { 106 if1_npc := if2_btb_target 107 } 108 109 XSDebug("[IF2]if2_valid:%d || if2_pc:0x%x || if3_ready:%d ",if2_valid,if2_pc,if3_ready) 110 //XSDebug("[IF2-BPU-out]if2_btbTaken:%d || if2_btb_insMask:%b || if2_btb_target:0x%x \n",if2_btb_taken,if2_btb_insMask.asUInt,if2_btb_target) 111 XSDebug(false,if2_fire,"------IF2->fire!!!") 112 XSDebug(false,true.B,"\n") 113 XSDebug("[IF2-Icache-Req] icache_in_valid:%d icache_in_ready:%d\n",io.icacheReq.valid,io.icacheReq.ready) 114 115 //------------------------- 116 // IF3 icache hit check 117 //------------------------- 118 //local 119 val if3_valid = RegEnable(next=if2_valid,init=false.B,enable=if2_fire) 120 val if3_pc = RegEnable(if2_pc,if2_fire) 121 val if3_npc = RegEnable(if1_npc,if2_fire) 122 val if3_btb_target = RegEnable(if2_btb_target,if2_fire) 123 val if3_btb_taken = RegEnable(if2_btb_taken,if2_fire) 124 125 //next 126 val if4_ready = WireInit(false.B) 127 128 //pipe fire 129 val if3_fire = if3_valid && if4_ready 130 if3_ready := if3_fire || !if3_valid 131 132 133 XSDebug("[IF3]if3_valid:%d || if3_pc:0x%x if3_npc:0x%x || if4_ready:%d ",if3_valid,if3_pc,if3_npc,if4_ready) 134 XSDebug(false,if3_fire,"------IF3->fire!!!") 135 XSDebug(false,true.B,"\n") 136 137 //------------------------- 138 // IF4 icache response 139 // RAS result 140 // taget generate 141 //------------------------- 142 val if4_valid = RegEnable(next=if3_valid,init=false.B,enable=if3_fire) 143 val if4_pc = RegEnable(if3_pc,if3_fire) 144 val if4_npc = RegEnable(if3_npc,if3_fire) 145 val if4_btb_target = RegEnable(if3_btb_target,if3_fire) 146 val if4_btb_taken = RegEnable(if3_btb_taken,if3_fire) 147 val if4_tage_target = bpu.io.tageOut.bits.target 148 val if4_tage_taken = bpu.io.tageOut.valid && bpu.io.tageOut.bits.redirect 149 val if4_tage_insMask = bpu.io.tageOut.bits.instrValid 150 XSDebug("[IF4]if4_valid:%d || if4_pc:0x%x if4_npc:0x%x\n",if4_valid,if4_pc,if4_npc) 151 //XSDebug("[IF4-TAGE-out]if4_tage_taken:%d || if4_btb_insMask:%b || if4_tage_target:0x%x \n",if4_tage_taken,if4_tage_insMask.asUInt,if4_tage_target) 152 XSDebug("[IF4-ICACHE-RESP]icacheResp.valid:%d icacheResp.ready:%d\n",io.icacheResp.valid,io.icacheResp.ready) 153 154 when(if4_valid && io.icacheResp.fire() && if4_tage_taken) 155 { 156 if1_npc := if4_tage_target 157 } 158 159 160 //redirect: miss predict 161 when(io.redirectInfo.flush()){ 162 if1_npc := io.redirectInfo.redirect.target 163 if3_valid := false.B 164 if4_valid := false.B 165 } 166 XSDebug(io.redirectInfo.flush(),"[IFU-REDIRECT] target:0x%x \n",io.redirectInfo.redirect.target.asUInt) 167 168 //Output -> iBuffer 169 //io.fetchPacket <> DontCare 170 if4_ready := io.fetchPacket.ready && (io.icacheResp.valid || !if4_valid) && (GTimer() > 500.U) 171 io.fetchPacket.valid := if4_valid && !io.redirectInfo.flush() 172 io.fetchPacket.bits.instrs := io.icacheResp.bits.icacheOut 173 if(EnableBPU){ 174 io.fetchPacket.bits.mask := Mux( if4_tage_taken, 175 (Fill(FetchWidth*2, 1.U(1.W)) & Cat(if4_tage_insMask.map(i => Fill(2, i.asUInt))).asUInt) << if4_pc(2+log2Up(FetchWidth)-1, 1), 176 Fill(FetchWidth*2, 1.U(1.W)) << if4_pc(2+log2Up(FetchWidth)-1, 1) 177 ) 178 } 179 else{ 180 //io.fetchPacket.bits.mask := Fill(FetchWidth*2, 1.U(1.W)) << if4_pc(2+log2Up(FetchWidth)-1, 1) 181 io.fetchPacket.bits.mask := Fill(FetchWidth*2, 1.U(1.W)) //TODO : consider cross cacheline fetch 182 } 183 io.fetchPacket.bits.pc := if4_pc 184 185 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] starPC:0x%x GroupPC:0x%xn\n",if4_pc.asUInt,groupPC(if4_pc).asUInt) 186 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] instrmask %b\n",io.fetchPacket.bits.mask.asUInt) 187 for(i <- 0 until FetchWidth){ 188 //io.fetchPacket.bits.pnpc(i) := if1_npc 189 when (if4_tage_taken && i.U === OHToUInt(HighestBit(if4_tage_insMask.asUInt, FetchWidth))) { 190 io.fetchPacket.bits.pnpc(i) := if1_npc 191 }.otherwise { 192 io.fetchPacket.bits.pnpc(i) := if4_pc + ((i + 1).U << 2.U) //use fetch PC 193 } 194 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] instruction %x pnpc:0x%x\n",io.fetchPacket.bits.instrs(i).asUInt,io.fetchPacket.bits.pnpc(i).asUInt) 195 } 196 io.fetchPacket.bits.hist := bpu.io.tageOut.bits.hist 197 io.fetchPacket.bits.btbVictimWay := bpu.io.tageOut.bits.btbVictimWay 198 io.fetchPacket.bits.predCtr := bpu.io.tageOut.bits.predCtr 199 io.fetchPacket.bits.btbHitWay := bpu.io.tageOut.bits.btbHitWay 200 io.fetchPacket.bits.tageMeta := bpu.io.tageOut.bits.tageMeta 201 io.fetchPacket.bits.rasSp := bpu.io.tageOut.bits.rasSp 202 io.fetchPacket.bits.rasTopCtr := bpu.io.tageOut.bits.rasTopCtr 203 204 //to BPU 205 bpu.io.predecode.valid := io.icacheResp.fire() && if4_valid 206 bpu.io.predecode.bits <> io.icacheResp.bits.predecode 207 bpu.io.predecode.bits.mask := Fill(FetchWidth, 1.U(1.W)) //TODO: consider RVC && consider cross cacheline fetch 208 209 bpu.io.redirectInfo := io.redirectInfo 210 211 io.icacheResp.ready := io.fetchPacket.ready && (GTimer() > 500.U) 212 213} 214 215