1 package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import xiangshan.utils._ 8 9trait HasIFUConst { this: XSModule => 10 val resetVector = 0x80000000L//TODO: set reset vec 11 val groupAlign = log2Up(FetchWidth * 4) 12 def groupPC(pc: UInt): UInt = Cat(pc(VAddrBits-1, groupAlign), 0.U(groupAlign.W)) 13 14} 15 16class IFUIO extends XSBundle 17{ 18 val fetchPacket = DecoupledIO(new FetchPacket) 19 val redirectInfo = Input(new RedirectInfo) 20 val icacheReq = DecoupledIO(new FakeIcacheReq) 21 val icacheResp = Flipped(DecoupledIO(new FakeIcacheResp)) 22} 23 24class FakeBPU extends XSModule{ 25 val io = IO(new Bundle() { 26 val redirectInfo = Input(new RedirectInfo) 27 val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) } 28 val btbOut = ValidIO(new BranchPrediction) 29 val tageOut = ValidIO(new BranchPrediction) 30 val predecode = Flipped(ValidIO(new Predecode)) 31 }) 32 33 io.btbOut.valid := false.B 34 io.btbOut.bits <> DontCare 35 io.tageOut.valid := false.B 36 io.tageOut.bits <> DontCare 37} 38 39 40class IFU extends XSModule with HasIFUConst 41{ 42 val io = IO(new IFUIO) 43 val bpu = if(EnableBPU) Module(new BPU) else Module(new FakeBPU) 44 45 //------------------------- 46 // IF1 PC update 47 //------------------------- 48 //local 49 val if1_npc = WireInit(0.U(VAddrBits.W)) 50 val if1_valid = !reset.asBool && (GTimer() > 500.U)//TODO:this is ugly 51 val if1_pc = RegInit(resetVector.U(VAddrBits.W)) 52 //next 53 val if2_ready = WireInit(false.B) 54 val if2_snpc = if1_pc + 32.U //TODO: this is ugly 55 val if1_ready = if2_ready 56 57 //pipe fire 58 val if1_fire = if1_valid && if1_ready 59 val if1_pcUpdate = io.redirectInfo.flush() || if1_fire 60 61 when(RegNext(reset.asBool) && !reset.asBool){ 62 //when((GTimer() === 501.U)){ //TODO:this is ugly 63 XSDebug("RESET....\n") 64 if1_npc := resetVector.U(VAddrBits.W) 65 } .elsewhen(GTimer() === 501.U){ //TODO: this may cause bug 66 if1_npc := resetVector.U(VAddrBits.W) 67 } .otherwise{ 68 if1_npc := if2_snpc 69 } 70 71 when(if1_pcUpdate) 72 { 73 if1_pc := if1_npc 74 } 75 76 bpu.io.in.pc.valid := if1_fire 77 bpu.io.in.pc.bits := if1_npc 78 bpu.io.redirectInfo := io.redirectInfo 79 80 XSDebug("[IF1]if1_valid:%d || if1_npc:0x%x || if1_pcUpdate:%d if1_pc:0x%x || if2_ready:%d",if1_valid,if1_npc,if1_pcUpdate,if1_pc,if2_ready) 81 XSDebug(false,if1_fire,"------IF1->fire!!!") 82 XSDebug(false,true.B,"\n") 83 84 //------------------------- 85 // IF2 btb response 86 // icache visit 87 //------------------------- 88 //local 89 val if2_valid = RegEnable(next=if1_valid,init=false.B,enable=if1_fire) 90 val if2_pc = if1_pc 91 val if2_btb_taken = bpu.io.btbOut.valid && bpu.io.btbOut.bits.redirect 92 val if2_btb_insMask = bpu.io.btbOut.bits.instrValid 93 val if2_btb_target = bpu.io.btbOut.bits.target 94 95 //next 96 val if3_ready = WireInit(false.B) 97 98 //pipe fire 99 val if2_fire = if2_valid && if3_ready && io.icacheReq.fire() 100 if2_ready := (if2_fire) || !if2_valid 101 102 io.icacheReq.valid := if2_valid 103 io.icacheReq.bits.addr := if2_pc 104 io.icacheReq.bits.flush := io.redirectInfo.flush() 105 106 when(if2_valid && if2_btb_taken) 107 { 108 if1_npc := if2_btb_target 109 } 110 111 XSDebug("[IF2]if2_valid:%d || if2_pc:0x%x || if3_ready:%d ",if2_valid,if2_pc,if3_ready) 112 //XSDebug("[IF2-BPU-out]if2_btbTaken:%d || if2_btb_insMask:%b || if2_btb_target:0x%x \n",if2_btb_taken,if2_btb_insMask.asUInt,if2_btb_target) 113 XSDebug(false,if2_fire,"------IF2->fire!!!") 114 XSDebug(false,true.B,"\n") 115 XSDebug("[IF2-Icache-Req] icache_in_valid:%d icache_in_ready:%d\n",io.icacheReq.valid,io.icacheReq.ready) 116 117 //------------------------- 118 // IF3 icache hit check 119 //------------------------- 120 //local 121 val if3_valid = RegEnable(next=if2_valid,init=false.B,enable=if2_fire) 122 val if3_pc = RegEnable(if2_pc,if2_fire) 123 val if3_npc = RegEnable(if1_npc,if2_fire) 124 val if3_btb_target = RegEnable(if2_btb_target,if2_fire) 125 val if3_btb_taken = RegEnable(if2_btb_taken,if2_fire) 126 127 //next 128 val if4_ready = WireInit(false.B) 129 130 //pipe fire 131 val if3_fire = if3_valid && if4_ready 132 if3_ready := if3_fire || !if3_valid 133 134 135 XSDebug("[IF3]if3_valid:%d || if3_pc:0x%x if3_npc:0x%x || if4_ready:%d ",if3_valid,if3_pc,if3_npc,if4_ready) 136 XSDebug(false,if3_fire,"------IF3->fire!!!") 137 XSDebug(false,true.B,"\n") 138 139 //------------------------- 140 // IF4 icache response 141 // RAS result 142 // taget generate 143 //------------------------- 144 val if4_valid = RegEnable(next=if3_valid,init=false.B,enable=if3_fire) 145 val if4_pc = RegEnable(if3_pc,if3_fire) 146 val if4_npc = RegEnable(if3_npc,if3_fire) 147 val if4_btb_target = RegEnable(if3_btb_target,if3_fire) 148 val if4_btb_taken = RegEnable(if3_btb_taken,if3_fire) 149 val if4_tage_target = bpu.io.tageOut.bits.target 150 val if4_tage_taken = bpu.io.tageOut.valid && bpu.io.tageOut.bits.redirect 151 val if4_tage_insMask = bpu.io.tageOut.bits.instrValid 152 XSDebug("[IF4]if4_valid:%d || if4_pc:0x%x if4_npc:0x%x\n",if4_valid,if4_pc,if4_npc) 153 //XSDebug("[IF4-TAGE-out]if4_tage_taken:%d || if4_btb_insMask:%b || if4_tage_target:0x%x \n",if4_tage_taken,if4_tage_insMask.asUInt,if4_tage_target) 154 XSDebug("[IF4-ICACHE-RESP]icacheResp.valid:%d icacheResp.ready:%d\n",io.icacheResp.valid,io.icacheResp.ready) 155 156 when(if4_valid && io.icacheResp.fire() && if4_tage_taken) 157 { 158 if1_npc := if4_tage_target 159 } 160 161 162 //redirect: miss predict 163 when(io.redirectInfo.flush()){ 164 if1_npc := io.redirectInfo.redirect.target 165 if3_valid := false.B 166 if4_valid := false.B 167 } 168 XSDebug(io.redirectInfo.flush(),"[IFU-REDIRECT] target:0x%x \n",io.redirectInfo.redirect.target.asUInt) 169 170 //Output -> iBuffer 171 //io.fetchPacket <> DontCare 172 if4_ready := io.fetchPacket.ready && (io.icacheResp.valid || !if4_valid) 173 io.fetchPacket.valid := if4_valid && !io.redirectInfo.flush() 174 io.fetchPacket.bits.instrs := io.icacheResp.bits.icacheOut 175 if(EnableBPU){ 176 io.fetchPacket.bits.mask := Mux( if4_tage_taken, 177 Fill(FetchWidth*2, 1.U(1.W)) & Reverse(Cat(if4_tage_insMask.map(i => Fill(2, i.asUInt)))).asUInt , 178 Fill(FetchWidth*2, 1.U(1.W)) 179 ) 180 } 181 else{ 182 //io.fetchPacket.bits.mask := Fill(FetchWidth*2, 1.U(1.W)) << if4_pc(2+log2Up(FetchWidth)-1, 1) 183 io.fetchPacket.bits.mask := Fill(FetchWidth*2, 1.U(1.W)) //TODO : consider cross cacheline fetch 184 } 185 io.fetchPacket.bits.pc := if4_pc 186 187 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] starPC:0x%x GroupPC:0x%xn\n",if4_pc.asUInt,groupPC(if4_pc).asUInt) 188 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] instrmask %b\n",io.fetchPacket.bits.mask.asUInt) 189 for(i <- 0 until FetchWidth){ 190 //io.fetchPacket.bits.pnpc(i) := if1_npc 191 when (if4_tage_taken && i.U === OHToUInt(HighestBit(if4_tage_insMask.asUInt, FetchWidth))) { 192 io.fetchPacket.bits.pnpc(i) := if1_npc 193 }.otherwise { 194 io.fetchPacket.bits.pnpc(i) := if4_pc + ((i + 1).U << 2.U) //use fetch PC 195 } 196 XSDebug(io.fetchPacket.fire,"[IFU-Out-FetchPacket] instruction %x pnpc:0x%x\n",io.fetchPacket.bits.instrs(i).asUInt,io.fetchPacket.bits.pnpc(i).asUInt) 197 } 198 io.fetchPacket.bits.hist := bpu.io.tageOut.bits.hist 199 io.fetchPacket.bits.btbVictimWay := bpu.io.tageOut.bits.btbVictimWay 200 io.fetchPacket.bits.predCtr := bpu.io.tageOut.bits.predCtr 201 io.fetchPacket.bits.btbHitWay := bpu.io.tageOut.bits.btbHitWay 202 io.fetchPacket.bits.tageMeta := bpu.io.tageOut.bits.tageMeta 203 io.fetchPacket.bits.rasSp := bpu.io.tageOut.bits.rasSp 204 io.fetchPacket.bits.rasTopCtr := bpu.io.tageOut.bits.rasTopCtr 205 206 //to BPU 207 bpu.io.predecode.valid := io.icacheResp.fire() && if4_valid 208 bpu.io.predecode.bits <> io.icacheResp.bits.predecode 209 bpu.io.predecode.bits.mask := Fill(FetchWidth, 1.U(1.W)) //TODO: consider RVC && consider cross cacheline fetch 210 211 bpu.io.redirectInfo := io.redirectInfo 212 213 io.icacheResp.ready := io.fetchPacket.ready 214 215} 216 217