1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import utils._ 8import xiangshan.cache._ 9import chisel3.experimental.chiselName 10import freechips.rocketchip.tile.HasLazyRoCC 11 12trait HasIFUConst extends HasXSParameter { 13 val resetVector = 0x80000000L//TODO: set reset vec 14 def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W)) 15 val instBytes = if (HasCExtension) 2 else 4 16 val instOffsetBits = log2Ceil(instBytes) 17 val groupBytes = 64 // correspond to cache line size 18 val groupOffsetBits = log2Ceil(groupBytes) 19 val groupWidth = groupBytes / instBytes 20 val packetBytes = PredictWidth * instBytes 21 val packetOffsetBits = log2Ceil(packetBytes) 22 def offsetInPacket(pc: UInt) = pc(packetOffsetBits-1, instOffsetBits) 23 def packetIdx(pc: UInt) = pc(VAddrBits-1, log2Ceil(packetBytes)) 24 def groupAligned(pc: UInt) = align(pc, groupBytes) 25 def packetAligned(pc: UInt) = align(pc, packetBytes) 26 def mask(pc: UInt): UInt = ((~(0.U(PredictWidth.W))) << offsetInPacket(pc))(PredictWidth-1,0) 27 def snpc(pc: UInt): UInt = packetAligned(pc) + packetBytes.U 28 29 val enableGhistRepair = true 30 val IFUDebug = true 31} 32 33class GlobalHistory extends XSBundle { 34 val predHist = UInt(HistoryLength.W) 35 def update(sawNTBr: Bool, takenOnBr: Bool, hist: UInt = predHist): GlobalHistory = { 36 val g = Wire(new GlobalHistory) 37 val shifted = takenOnBr || sawNTBr 38 g.predHist := Mux(shifted, (hist << 1) | takenOnBr.asUInt, hist) 39 g 40 } 41 42 final def === (that: GlobalHistory): Bool = { 43 predHist === that.predHist 44 } 45 46 final def =/= (that: GlobalHistory): Bool = !(this === that) 47 48 implicit val name = "IFU" 49 def debug(where: String) = XSDebug(p"[${where}_GlobalHistory] hist=${Binary(predHist)}\n") 50 // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI) 51} 52 53 54class IFUIO extends XSBundle 55{ 56 // to ibuffer 57 val fetchPacket = DecoupledIO(new FetchPacket) 58 // from backend 59 val redirect = Flipped(ValidIO(UInt(VAddrBits.W))) 60 val cfiUpdateInfo = Flipped(ValidIO(new CfiUpdateInfo)) 61 // to icache 62 val icacheMemGrant = Flipped(DecoupledIO(new L1plusCacheResp)) 63 val fencei = Input(Bool()) 64 // from icache 65 val icacheMemAcq = DecoupledIO(new L1plusCacheReq) 66 val l1plusFlush = Output(Bool()) 67 val prefetchTrainReq = ValidIO(new IcacheMissReq) 68 // to tlb 69 val sfence = Input(new SfenceBundle) 70 val tlbCsr = Input(new TlbCsrBundle) 71 // from tlb 72 val ptw = new TlbPtwIO 73 // icache uncache 74 val mmio_acquire = DecoupledIO(new InsUncacheReq) 75 val mmio_grant = Flipped(DecoupledIO(new InsUncacheResp)) 76 val mmio_flush = Output(Bool()) 77} 78 79class PrevHalfInstr extends XSBundle { 80 val taken = Bool() 81 val ghInfo = new GlobalHistory() 82 val fetchpc = UInt(VAddrBits.W) // only for debug 83 val idx = UInt(VAddrBits.W) // only for debug 84 val pc = UInt(VAddrBits.W) 85 val npc = UInt(VAddrBits.W) 86 val target = UInt(VAddrBits.W) 87 val instr = UInt(16.W) 88 val ipf = Bool() 89 val meta = new BpuMeta 90} 91 92@chiselName 93class IFU extends XSModule with HasIFUConst 94{ 95 val io = IO(new IFUIO) 96 val bpu = BPU(EnableBPU) 97 val icache = Module(new ICache) 98 99 io.ptw <> TLB( 100 in = Seq(icache.io.tlb), 101 sfence = io.sfence, 102 csr = io.tlbCsr, 103 width = 1, 104 isDtlb = false, 105 shouldBlock = true 106 ) 107 108 val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B) 109 val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B) 110 111 val icacheResp = icache.io.resp.bits 112 113 if4_flush := io.redirect.valid 114 if3_flush := if4_flush || if4_redirect 115 if2_flush := if3_flush || if3_redirect 116 if1_flush := if2_flush || if2_redirect 117 118 //********************** IF1 ****************************// 119 val if1_valid = !reset.asBool && GTimer() > 500.U 120 val if1_npc = WireInit(0.U(VAddrBits.W)) 121 val if2_ready = WireInit(false.B) 122 val if2_valid = RegInit(init = false.B) 123 val if2_allReady = WireInit(if2_ready && icache.io.req.ready) 124 val if1_fire = (if1_valid && if2_allReady) && (icache.io.tlb.resp.valid || !if2_valid) 125 val if1_can_go = if1_fire || if2_flush 126 127 val if1_gh, if2_gh, if3_gh, if4_gh = Wire(new GlobalHistory) 128 val if2_predicted_gh, if3_predicted_gh, if4_predicted_gh = Wire(new GlobalHistory) 129 val final_gh = RegInit(0.U.asTypeOf(new GlobalHistory)) 130 val final_gh_bypass = WireInit(0.U.asTypeOf(new GlobalHistory)) 131 val flush_final_gh = WireInit(false.B) 132 133 //********************** IF2 ****************************// 134 val if2_allValid = if2_valid && icache.io.tlb.resp.valid 135 val if3_ready = WireInit(false.B) 136 val if2_fire = (if2_valid && if3_ready) && icache.io.tlb.resp.valid 137 val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_can_go) 138 val if2_snpc = snpc(if2_pc) 139 val if2_predHist = RegEnable(if1_gh.predHist, enable=if1_can_go) 140 if2_ready := if3_ready || !if2_valid 141 when (if1_can_go) { if2_valid := true.B } 142 .elsewhen (if2_flush) { if2_valid := false.B } 143 .elsewhen (if2_fire) { if2_valid := false.B } 144 145 val npcGen = new PriorityMuxGenerator[UInt] 146 npcGen.register(true.B, RegNext(if1_npc), Some("stallPC")) 147 val if2_bp = bpu.io.out(0) 148 149 // if taken, bp_redirect should be true 150 // when taken on half RVI, we suppress this redirect signal 151 152 npcGen.register(if2_valid, Mux(if2_bp.taken, if2_bp.target, if2_snpc), Some("if2_target")) 153 154 if2_predicted_gh := if2_gh.update(if2_bp.hasNotTakenBrs, if2_bp.takenOnBr) 155 156 //********************** IF3 ****************************// 157 // if3 should wait for instructions resp to arrive 158 val if3_valid = RegInit(init = false.B) 159 val if4_ready = WireInit(false.B) 160 val if3_allValid = if3_valid && icache.io.resp.valid 161 val if3_fire = if3_allValid && if4_ready 162 val if3_pc = RegEnable(if2_pc, if2_fire) 163 val if3_snpc = RegEnable(if2_snpc, if2_fire) 164 val if3_predHist = RegEnable(if2_predHist, enable=if2_fire) 165 if3_ready := if4_ready && icache.io.resp.valid || !if3_valid 166 when (if3_flush) { 167 if3_valid := false.B 168 }.elsewhen (if2_fire && !if2_flush) { 169 if3_valid := true.B 170 }.elsewhen (if3_fire) { 171 if3_valid := false.B 172 } 173 174 val if3_bp = bpu.io.out(1) 175 if3_predicted_gh := if3_gh.update(if3_bp.hasNotTakenBrs, if3_bp.takenOnBr) 176 177 178 val prevHalfInstrReq = WireInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr))) 179 // only valid when if4_fire 180 val hasPrevHalfInstrReq = prevHalfInstrReq.valid && HasCExtension.B 181 182 val if3_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr))) 183 184 // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault 185 val crossPageIPF = WireInit(false.B) 186 187 val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid && HasCExtension.B 188 189 // the previous half of RVI instruction waits until it meets its last half 190 val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && if3_prevHalfInstr.bits.npc === if3_pc && if3_valid 191 // set to invalid once consumed or redirect from backend 192 val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire 193 val if3_prevHalfFlush = if4_flush 194 when (if3_prevHalfFlush) { 195 if3_prevHalfInstr.valid := false.B 196 }.elsewhen (hasPrevHalfInstrReq) { 197 if3_prevHalfInstr.valid := true.B 198 }.elsewhen (if3_prevHalfConsumed) { 199 if3_prevHalfInstr.valid := false.B 200 } 201 when (hasPrevHalfInstrReq) { 202 if3_prevHalfInstr.bits := prevHalfInstrReq.bits 203 } 204 // when bp signal a redirect, we distinguish between taken and not taken 205 // if taken and saveHalfRVI is true, we do not redirect to the target 206 207 class IF3_PC_COMP extends XSModule { 208 val io = IO(new Bundle { 209 val if2_pc = Input(UInt(VAddrBits.W)) 210 val pc = Input(UInt(VAddrBits.W)) 211 val if2_valid = Input(Bool()) 212 val res = Output(Bool()) 213 }) 214 io.res := !io.if2_valid || io.if2_valid && io.if2_pc =/= io.pc 215 } 216 def if3_nextValidPCNotEquals(pc: UInt) = { 217 val comp = Module(new IF3_PC_COMP) 218 comp.io.if2_pc := if2_pc 219 comp.io.pc := pc 220 comp.io.if2_valid := if2_valid 221 comp.io.res 222 } 223 224 val if3_predTakenRedirectVec = VecInit((0 until PredictWidth).map(i => !if3_pendingPrevHalfInstr && if3_bp.realTakens(i) && if3_nextValidPCNotEquals(if3_bp.targets(i)))) 225 val if3_prevHalfMetRedirect = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.bits.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.target) 226 val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.npc) 227 val if3_predTakenRedirect = ParallelOR(if3_predTakenRedirectVec) 228 val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(if3_snpc) 229 // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr 230 // val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo && enableGhistRepair.B 231 232 if3_redirect := if3_valid && ( 233 // prevHalf is consumed but the next packet is not where it meant to be 234 // we do not handle this condition because of the burden of building a correct GHInfo 235 // prevHalfMetRedirect || 236 // prevHalf does not match if3_pc and the next fetch packet is not snpc 237 if3_prevHalfNotMetRedirect && HasCExtension.B || 238 // pred taken and next fetch packet is not the predicted target 239 if3_predTakenRedirect || 240 // pred not taken and next fetch packet is not snpc 241 if3_predNotTakenRedirect 242 // GHInfo from last pred does not corresponds with this packet 243 // if3_ghInfoNotIdenticalRedirect 244 ) 245 246 val if3_target = WireInit(if3_snpc) 247 248 if3_target := Mux1H(Seq((if3_prevHalfNotMetRedirect -> if3_prevHalfInstr.bits.npc), 249 (if3_predTakenRedirect -> if3_bp.target), 250 (if3_predNotTakenRedirect -> if3_snpc))) 251 252 npcGen.register(if3_redirect, if3_target, Some("if3_target")) 253 254 255 //********************** IF4 ****************************// 256 val if4_pd = RegEnable(icache.io.pd_out, if3_fire) 257 val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.bits.ipf, if3_fire) 258 val if4_acf = RegEnable(icacheResp.acf, if3_fire) 259 val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire) 260 val if4_valid = RegInit(false.B) 261 val if4_fire = if4_valid && io.fetchPacket.ready 262 val if4_pc = RegEnable(if3_pc, if3_fire) 263 val if4_snpc = RegEnable(if3_snpc, if3_fire) 264 // This is the real mask given from icache 265 val if4_mask = RegEnable(icacheResp.mask, if3_fire) 266 267 268 val if4_predHist = RegEnable(if3_predHist, enable=if3_fire) 269 // wait until prevHalfInstr written into reg 270 if4_ready := (io.fetchPacket.ready && !hasPrevHalfInstrReq || !if4_valid) && GTimer() > 500.U 271 when (if4_flush) { 272 if4_valid := false.B 273 }.elsewhen (if3_fire && !if3_flush) { 274 if4_valid := Mux(if3_pendingPrevHalfInstr, if3_prevHalfInstrMet, true.B) 275 }.elsewhen (if4_fire) { 276 if4_valid := false.B 277 } 278 279 val if4_bp = Wire(new BranchPrediction) 280 if4_bp := bpu.io.out(2) 281 282 if4_predicted_gh := if4_gh.update(if4_bp.hasNotTakenBrs, if4_bp.takenOnBr) 283 284 def jal_offset(inst: UInt, rvc: Bool): SInt = { 285 Mux(rvc, 286 Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)).asSInt(), 287 Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)).asSInt() 288 ) 289 } 290 val if4_instrs = if4_pd.instrs 291 val if4_jals = if4_bp.jalMask 292 val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => (if4_pd.pc(i).asSInt + jal_offset(if4_instrs(i), if4_pd.pd(i).isRVC)).asUInt)) 293 294 (0 until PredictWidth).foreach {i => 295 when (if4_jals(i)) { 296 if4_bp.targets(i) := if4_jal_tgts(i) 297 } 298 } 299 300 // we need this to tell BPU the prediction of prev half 301 // because the prediction is with the start of each inst 302 val if4_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr))) 303 val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid && HasCExtension.B 304 val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && if4_valid 305 val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire 306 val if4_prevHalfFlush = if4_flush 307 308 val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.bits.taken) 309 when (if4_prevHalfFlush) { 310 if4_prevHalfInstr.valid := false.B 311 }.elsewhen (if3_prevHalfConsumed) { 312 if4_prevHalfInstr.valid := if3_prevHalfInstr.valid 313 }.elsewhen (if4_prevHalfConsumed) { 314 if4_prevHalfInstr.valid := false.B 315 } 316 317 when (if3_prevHalfConsumed) { 318 if4_prevHalfInstr.bits := if3_prevHalfInstr.bits 319 } 320 321 prevHalfInstrReq.valid := if4_fire && if4_bp.saveHalfRVI && HasCExtension.B 322 val idx = if4_bp.lastHalfRVIIdx 323 324 // // this is result of the last half RVI 325 prevHalfInstrReq.bits.taken := if4_bp.lastHalfRVITaken 326 prevHalfInstrReq.bits.ghInfo := if4_gh 327 prevHalfInstrReq.bits.fetchpc := if4_pc 328 prevHalfInstrReq.bits.idx := idx 329 prevHalfInstrReq.bits.pc := if4_pd.pc(idx) 330 prevHalfInstrReq.bits.npc := if4_pd.pc(idx) + 2.U 331 prevHalfInstrReq.bits.target := if4_bp.lastHalfRVITarget 332 prevHalfInstrReq.bits.instr := if4_pd.instrs(idx)(15, 0) 333 prevHalfInstrReq.bits.ipf := if4_ipf 334 prevHalfInstrReq.bits.meta := bpu.io.bpuMeta(idx) 335 336 class IF4_PC_COMP extends XSModule { 337 val io = IO(new Bundle { 338 val if2_pc = Input(UInt(VAddrBits.W)) 339 val if3_pc = Input(UInt(VAddrBits.W)) 340 val pc = Input(UInt(VAddrBits.W)) 341 val if2_valid = Input(Bool()) 342 val if3_valid = Input(Bool()) 343 val res = Output(Bool()) 344 }) 345 io.res := io.if3_valid && io.if3_pc =/= io.pc || 346 !io.if3_valid && (io.if2_valid && io.if2_pc =/= io.pc) || 347 !io.if3_valid && !io.if2_valid 348 } 349 def if4_nextValidPCNotEquals(pc: UInt) = { 350 val comp = Module(new IF4_PC_COMP) 351 comp.io.if2_pc := if2_pc 352 comp.io.if3_pc := if3_pc 353 comp.io.pc := pc 354 comp.io.if2_valid := if2_valid 355 comp.io.if3_valid := if3_valid 356 comp.io.res 357 } 358 359 val if4_predTakenRedirectVec = VecInit((0 until PredictWidth).map(i => if4_bp.realTakens(i) && if4_nextValidPCNotEquals(if4_bp.targets(i)))) 360 361 val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.bits.pc+2.U) 362 val if4_predTakenRedirect = ParallelORR(if4_predTakenRedirectVec) 363 val if4_predNotTakenRedirect = !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc) 364 // val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo && enableGhistRepair.B 365 366 if4_redirect := if4_valid && ( 367 // when if4 has a lastHalfRVI, but the next fetch packet is not snpc 368 // if4_prevHalfNextNotMet || 369 // when if4 preds taken, but the pc of next fetch packet is not the target 370 if4_predTakenRedirect || 371 // when if4 preds not taken, but the pc of next fetch packet is not snpc 372 if4_predNotTakenRedirect 373 // GHInfo from last pred does not corresponds with this packet 374 // if4_ghInfoNotIdenticalRedirect 375 ) 376 377 val if4_target = WireInit(if4_snpc) 378 379 if4_target := Mux(if4_bp.taken, if4_bp.target, if4_snpc) 380 381 npcGen.register(if4_redirect, if4_target, Some("if4_target")) 382 383 when (if4_fire) { 384 final_gh := if4_predicted_gh 385 } 386 if4_gh := Mux(flush_final_gh, final_gh_bypass, final_gh) 387 if3_gh := Mux(if4_valid && !if4_flush, if4_predicted_gh, if4_gh) 388 if2_gh := Mux(if3_valid && !if3_flush, if3_predicted_gh, if3_gh) 389 if1_gh := Mux(if2_valid && !if2_flush, if2_predicted_gh, if2_gh) 390 391 392 393 394 val cfiUpdate = io.cfiUpdateInfo 395 when (cfiUpdate.valid && (cfiUpdate.bits.isMisPred || cfiUpdate.bits.isReplay)) { 396 val b = cfiUpdate.bits 397 val oldGh = b.bpuMeta.hist 398 val sawNTBr = b.bpuMeta.sawNotTakenBranch 399 val isBr = b.pd.isBr 400 val taken = Mux(cfiUpdate.bits.isReplay, b.bpuMeta.predTaken, b.taken) 401 val updatedGh = oldGh.update(sawNTBr, isBr && taken) 402 final_gh := updatedGh 403 final_gh_bypass := updatedGh 404 flush_final_gh := true.B 405 } 406 407 npcGen.register(io.redirect.valid, io.redirect.bits, Some("backend_redirect")) 408 npcGen.register(RegNext(reset.asBool) && !reset.asBool, resetVector.U(VAddrBits.W), Some("reset_vector")) 409 410 if1_npc := npcGen() 411 412 413 icache.io.req.valid := if1_can_go 414 icache.io.resp.ready := if4_ready 415 icache.io.req.bits.addr := if1_npc 416 icache.io.req.bits.mask := mask(if1_npc) 417 icache.io.flush := Cat(if3_flush, if2_flush) 418 icache.io.mem_grant <> io.icacheMemGrant 419 icache.io.fencei := io.fencei 420 icache.io.prev.valid := if3_prevHalfInstrMet 421 icache.io.prev.bits := if3_prevHalfInstr.bits.instr 422 icache.io.prev_ipf := if3_prevHalfInstr.bits.ipf 423 icache.io.prev_pc := if3_prevHalfInstr.bits.pc 424 icache.io.mmio_acquire <> io.mmio_acquire 425 icache.io.mmio_grant <> io.mmio_grant 426 icache.io.mmio_flush <> io.mmio_flush 427 io.icacheMemAcq <> icache.io.mem_acquire 428 io.l1plusFlush := icache.io.l1plusflush 429 io.prefetchTrainReq := icache.io.prefetchTrainReq 430 431 bpu.io.cfiUpdateInfo <> io.cfiUpdateInfo 432 433 bpu.io.inFire(0) := if1_can_go 434 bpu.io.inFire(1) := if2_fire 435 bpu.io.inFire(2) := if3_fire 436 bpu.io.inFire(3) := if4_fire 437 bpu.io.in.pc := if1_npc 438 bpu.io.in.hist := if1_gh.asUInt 439 bpu.io.in.inMask := mask(if1_npc) 440 bpu.io.predecode.mask := if4_pd.mask 441 bpu.io.predecode.lastHalf := if4_pd.lastHalf 442 bpu.io.predecode.pd := if4_pd.pd 443 bpu.io.predecode.hasLastHalfRVI := if4_prevHalfInstrMet 444 bpu.io.realMask := if4_mask 445 bpu.io.prevHalf := if4_prevHalfInstr 446 447 448 when (if3_prevHalfInstrMet && icacheResp.ipf && !if3_prevHalfInstr.bits.ipf) { 449 crossPageIPF := true.B // higher 16 bits page fault 450 } 451 452 val fetchPacketValid = if4_valid && !io.redirect.valid 453 val fetchPacketWire = Wire(new FetchPacket) 454 455 fetchPacketWire.instrs := if4_pd.instrs 456 fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx))) 457 fetchPacketWire.pdmask := if4_pd.mask 458 459 fetchPacketWire.pc := if4_pd.pc 460 (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U)) 461 when (if4_bp.taken) { 462 fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target 463 } 464 fetchPacketWire.bpuMeta := bpu.io.bpuMeta 465 // save it for update 466 when (if4_pendingPrevHalfInstr) { 467 fetchPacketWire.bpuMeta(0) := if4_prevHalfInstr.bits.meta 468 } 469 (0 until PredictWidth).foreach(i => { 470 val meta = fetchPacketWire.bpuMeta(i) 471 meta.hist := final_gh 472 meta.predHist := if4_predHist.asTypeOf(new GlobalHistory) 473 meta.predTaken := if4_bp.takens(i) 474 }) 475 fetchPacketWire.pd := if4_pd.pd 476 fetchPacketWire.ipf := if4_ipf 477 fetchPacketWire.acf := if4_acf 478 fetchPacketWire.crossPageIPFFix := if4_crossPageIPF 479 480 // predTaken Vec 481 fetchPacketWire.predTaken := if4_bp.taken 482 483 io.fetchPacket.bits := fetchPacketWire 484 io.fetchPacket.valid := fetchPacketValid 485 486 // debug info 487 if (IFUDebug) { 488 XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n") 489 XSDebug(icache.io.flush(0).asBool, "Flush icache stage2...\n") 490 XSDebug(icache.io.flush(1).asBool, "Flush icache stage3...\n") 491 XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits)}\n") 492 493 XSDebug("[IF1] v=%d fire=%d cango=%d flush=%d pc=%x mask=%b\n", if1_valid, if1_fire,if1_can_go, if1_flush, if1_npc, mask(if1_npc)) 494 XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_snpc) 495 XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, crossPageIPF, if3_bp.hasNotTakenBrs) 496 XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_crossPageIPF, if4_bp.hasNotTakenBrs) 497 XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", icache.io.req.valid, icache.io.req.ready, icache.io.req.bits.addr) 498 XSDebug("[IF1][ghr] hist=%b\n", if1_gh.asUInt) 499 XSDebug("[IF1][ghr] extHist=%b\n\n", if1_gh.asUInt) 500 501 XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI) 502 if2_gh.debug("if2") 503 504 XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", icache.io.resp.valid, icache.io.resp.ready, icache.io.resp.bits.pc, icache.io.resp.bits.mask) 505 XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI) 506 XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect) 507 // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n", 508 // prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr) 509 XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n\n", 510 if3_prevHalfInstr.valid, if3_prevHalfInstr.bits.taken, if3_prevHalfInstr.bits.fetchpc, if3_prevHalfInstr.bits.idx, if3_prevHalfInstr.bits.pc, if3_prevHalfInstr.bits.npc, if3_prevHalfInstr.bits.target, if3_prevHalfInstr.bits.instr, if3_prevHalfInstr.bits.ipf) 511 if3_gh.debug("if3") 512 513 XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask) 514 XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask) 515 XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI) 516 XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect) 517 XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal! instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx)) 518 XSDebug("[IF4][ prevHalfInstrReq] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n", 519 prevHalfInstrReq.valid, prevHalfInstrReq.bits.taken, prevHalfInstrReq.bits.fetchpc, prevHalfInstrReq.bits.idx, prevHalfInstrReq.bits.pc, prevHalfInstrReq.bits.npc, prevHalfInstrReq.bits.target, prevHalfInstrReq.bits.instr, prevHalfInstrReq.bits.ipf) 520 XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n", 521 if4_prevHalfInstr.valid, if4_prevHalfInstr.bits.taken, if4_prevHalfInstr.bits.fetchpc, if4_prevHalfInstr.bits.idx, if4_prevHalfInstr.bits.pc, if4_prevHalfInstr.bits.npc, if4_prevHalfInstr.bits.target, if4_prevHalfInstr.bits.instr, if4_prevHalfInstr.bits.ipf) 522 if4_gh.debug("if4") 523 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d acf=%d crossPageIPF=%d\n", 524 io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.acf, io.fetchPacket.bits.crossPageIPFFix) 525 for (i <- 0 until PredictWidth) { 526 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n", 527 io.fetchPacket.bits.mask(i), 528 io.fetchPacket.bits.instrs(i), 529 io.fetchPacket.bits.pc(i), 530 io.fetchPacket.bits.pnpc(i), 531 io.fetchPacket.bits.pd(i).isRVC, 532 io.fetchPacket.bits.pd(i).brType, 533 io.fetchPacket.bits.pd(i).isCall, 534 io.fetchPacket.bits.pd(i).isRet 535 ) 536 } 537 } 538}