1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import utils._ 8import xiangshan.cache._ 9 10trait HasIFUConst { this: XSModule => 11 val resetVector = 0x80000000L//TODO: set reset vec 12 def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W)) 13 val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size 14 val groupOffsetBits = log2Ceil(groupBytes) 15 val nBanks = 4 16 val bankBytes = PredictWidth 17 val bankWidth = bankBytes / 2 18 val bankOffsetBits = log2Ceil(bankBytes) 19 // (0, nBanks-1) 20 def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits) 21 def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanks-1).U 22 // (0, bankBytes/2-1) 23 def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1) 24 def bankAligned(pc: UInt) = align(pc, bankBytes) 25 def groupAligned(pc: UInt) = align(pc, groupBytes) 26 // each 1 bit in mask stands for 2 Bytes 27 // 8 bits, in which only the first 7 bits could be 0 28 def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0) 29 def maskLastHalf(pc: UInt): UInt = Mux(isInLastBank, 0.U(bankWidth.W), ~0.U(bankWidth.W)) 30 def mask(pc: UInt): UInt = Cat(maskFirstHalf(pc), maskLastHalf(pc)) 31 def snpc(pc: UInt): UInt = pc + (PopCount(mask(pc)) << 1) 32 33 val IFUDebug = true 34} 35 36class GlobalHistoryInfo() extends XSBundle { 37 val sawNTBr = Bool() 38 val takenOnBr = Bool() 39 val saveHalfRVI = Bool() 40 def shifted = takenOnBr || sawNTBr 41 def newPtr(ptr: UInt) = Mux(shifted, ptr - 1.U, ptr) 42 implicit val name = "IFU" 43 def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d\n", sawNTBr, takenOnBr, saveHalfRVI) 44 // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI) 45} 46 47class IFUIO extends XSBundle 48{ 49 val fetchPacket = DecoupledIO(new FetchPacket) 50 val redirect = Flipped(ValidIO(new Redirect)) 51 val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 52 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 53 val icacheReq = DecoupledIO(new ICacheReq) 54 val icacheResp = Flipped(DecoupledIO(new ICacheResp)) 55 val icacheFlush = Output(UInt(2.W)) 56 // val loopBufPar = Flipped(new LoopBufferParameters) 57} 58 59class IFU extends XSModule with HasIFUConst 60{ 61 val io = IO(new IFUIO) 62 val bpu = BPU(EnableBPU) 63 val pd = Module(new PreDecode) 64 val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) } 65 66 val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B) 67 val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B) 68 69 val loopBufPar = loopBuffer.io.loopBufPar 70 val inLoop = WireInit(loopBuffer.io.out.valid) 71 val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits)) 72 73 if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid 74 if3_flush := if4_flush || if4_redirect 75 if2_flush := if3_flush || if3_redirect 76 if1_flush := if2_flush || if2_redirect 77 78 loopBuffer.io.flush := io.redirect.valid 79 80 //********************** IF1 ****************************// 81 val if1_valid = !reset.asBool && GTimer() > 500.U 82 val if1_npc = WireInit(0.U(VAddrBits.W)) 83 val if2_ready = WireInit(false.B) 84 val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready) 85 86 87 val if1_histPtr, if2_histPtr, if3_histPtr, if4_histPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 88 val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 89 90 val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W)))) 91 val shiftPtr = WireInit(false.B) 92 val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 93 val ptr = Mux(shiftPtr, newPtr, if1_histPtr) 94 val hist = Wire(Vec(HistoryLength, UInt(1.W))) 95 for (i <- 0 until HistoryLength) { 96 hist(i) := extHist(ptr + i.U) 97 } 98 99 shiftPtr := false.B 100 newPtr := if1_histPtr 101 102 def wrapGHInfo(bp: BranchPrediction) = { 103 val ghi = Wire(new GlobalHistoryInfo()) 104 ghi.sawNTBr := bp.hasNotTakenBrs 105 ghi.takenOnBr := bp.takenOnBr 106 ghi.saveHalfRVI := bp.saveHalfRVI 107 ghi 108 } 109 110 //********************** IF2 ****************************// 111 val if2_valid = RegInit(init = false.B) 112 val if3_ready = WireInit(false.B) 113 val if2_fire = if2_valid && if3_ready && !if2_flush 114 val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire) 115 val if2_snpc = snpc(if2_pc) 116 val if2_predHistPtr = RegEnable(ptr, enable=if1_fire) 117 if2_ready := if2_fire || !if2_valid || if2_flush 118 when (if1_fire) { if2_valid := if1_valid } 119 .elsewhen (if2_flush) { if2_valid := false.B } 120 .elsewhen (if2_fire) { if2_valid := false.B } 121 122 when (RegNext(reset.asBool) && !reset.asBool) { 123 if1_npc := resetVector.U(VAddrBits.W) 124 }.elsewhen (if2_fire) { 125 if1_npc := if2_snpc 126 }.otherwise { 127 if1_npc := RegNext(if1_npc) 128 } 129 130 val if2_bp = bpu.io.out(0) 131 // if taken, bp_redirect should be true 132 // when taken on half RVI, we suppress this redirect signal 133 if2_redirect := if2_fire && if2_bp.redirect && !if2_bp.saveHalfRVI 134 when (if2_redirect) { 135 if1_npc := if2_bp.target 136 } 137 138 val if2_GHInfo = wrapGHInfo(if2_bp) 139 140 when (if2_fire && if2_GHInfo.shifted) { 141 shiftPtr := true.B 142 newPtr := if2_newPtr 143 } 144 when (if2_GHInfo.shifted && if2_newPtr >= ptr) { 145 hist(if2_newPtr-ptr) := if2_GHInfo.takenOnBr.asUInt 146 } 147 148 149 150 //********************** IF3 ****************************// 151 val if3_valid = RegInit(init = false.B) 152 val if4_ready = WireInit(false.B) 153 val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush 154 val if3_pc = RegEnable(if2_pc, if2_fire) 155 val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire) 156 if3_ready := if3_fire || !if3_valid || if3_flush 157 when (if3_flush) { if3_valid := false.B } 158 .elsewhen (if2_fire) { if3_valid := true.B } 159 .elsewhen (if3_fire) { if3_valid := false.B } 160 161 val if3_bp = bpu.io.out(1) 162 163 val if3_GHInfo = wrapGHInfo(if3_bp) 164 165 class PrevHalfInstr extends Bundle { 166 val valid = Bool() 167 val taken = Bool() 168 val ghInfo = new GlobalHistoryInfo() 169 val fetchpc = UInt(VAddrBits.W) // only for debug 170 val idx = UInt(VAddrBits.W) // only for debug 171 val pc = UInt(VAddrBits.W) 172 val target = UInt(VAddrBits.W) 173 val instr = UInt(16.W) 174 val ipf = Bool() 175 val newPtr = UInt(log2Up(ExtHistoryLength).W) 176 } 177 178 val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr)) 179 val if4_prevHalfInstr = Wire(new PrevHalfInstr) 180 // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault 181 val crossPageIPF = WireInit(false.B) 182 when (if4_prevHalfInstr.valid) { 183 if3_prevHalfInstr := if4_prevHalfInstr 184 } 185 val prevHalfInstr = Mux(if4_prevHalfInstr.valid, if4_prevHalfInstr, if3_prevHalfInstr) 186 187 // the previous half of RVI instruction waits until it meets its last half 188 val if3_hasPrevHalfInstr = prevHalfInstr.valid && (prevHalfInstr.pc + 2.U) === if3_pc 189 // set to invalid once consumed or redirect from backend 190 val prevHalfConsumed = if3_hasPrevHalfInstr && if3_fire || if4_flush 191 when (prevHalfConsumed) { 192 if3_prevHalfInstr.valid := false.B 193 } 194 195 // when bp signal a redirect, we distinguish between taken and not taken 196 // if taken and saveHalfRVI is true, we do not redirect to the target 197 if3_redirect := if3_fire && (if3_hasPrevHalfInstr && prevHalfInstr.taken || if3_bp.redirect && (if3_bp.taken && !if3_bp.saveHalfRVI || !if3_bp.taken) ) 198 199 when (if3_redirect) { 200 when (!(if3_hasPrevHalfInstr && prevHalfInstr.taken)) { 201 if1_npc := if3_bp.target 202 when (if3_GHInfo.shifted){ 203 shiftPtr := true.B 204 newPtr := if3_newPtr 205 } 206 } 207 } 208 209 // when it does not redirect, we still need to modify hist(wire) 210 when(if3_GHInfo.shifted && if3_newPtr >= ptr) { 211 hist(if3_newPtr-ptr) := if3_GHInfo.takenOnBr 212 } 213 when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted && prevHalfInstr.newPtr >= ptr) { 214 hist(prevHalfInstr.newPtr-ptr) := prevHalfInstr.ghInfo.takenOnBr 215 } 216 217 //********************** IF4 ****************************// 218 val if4_pd = RegEnable(pd.io.out, if3_fire) 219 val if4_ipf = RegEnable(icacheResp.ipf || if3_hasPrevHalfInstr && prevHalfInstr.ipf, if3_fire) 220 val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire) 221 val if4_valid = RegInit(false.B) 222 val if4_fire = if4_valid && io.fetchPacket.ready 223 val if4_pc = RegEnable(if3_pc, if3_fire) 224 225 val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire) 226 if4_ready := (if4_fire || !if4_valid || if4_flush) && GTimer() > 500.U 227 when (if4_flush) { if4_valid := false.B } 228 .elsewhen (if3_fire) { if4_valid := true.B } 229 .elsewhen (if4_fire) { if4_valid := false.B } 230 231 val if4_bp = Wire(new BranchPrediction) 232 if4_bp := bpu.io.out(2) 233 234 val if4_GHInfo = wrapGHInfo(if4_bp) 235 236 val if4_cfi_jal = if4_pd.instrs(if4_bp.jmpIdx) 237 val if4_cfi_jal_tgt = if4_pd.pc(if4_bp.jmpIdx) + Mux(if4_pd.pd(if4_bp.jmpIdx).isRVC, 238 SignExt(Cat(if4_cfi_jal(12), if4_cfi_jal(8), if4_cfi_jal(10, 9), if4_cfi_jal(6), if4_cfi_jal(7), if4_cfi_jal(2), if4_cfi_jal(11), if4_cfi_jal(5, 3), 0.U(1.W)), XLEN), 239 SignExt(Cat(if4_cfi_jal(31), if4_cfi_jal(19, 12), if4_cfi_jal(20), if4_cfi_jal(30, 21), 0.U(1.W)), XLEN)) 240 if4_bp.target := Mux(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, if4_cfi_jal_tgt, bpu.io.out(2).target) 241 if4_bp.redirect := bpu.io.out(2).redirect || if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken && if4_cfi_jal_tgt =/= bpu.io.out(2).target 242 243 if4_prevHalfInstr := 0.U.asTypeOf(new PrevHalfInstr) 244 when (if4_fire && if4_bp.saveHalfRVI) { 245 if4_prevHalfInstr.valid := true.B 246 if4_prevHalfInstr.taken := if4_bp.taken 247 if4_prevHalfInstr.ghInfo := if4_GHInfo 248 // Make sure shifted can work 249 if4_prevHalfInstr.ghInfo.saveHalfRVI := false.B 250 if4_prevHalfInstr.newPtr := if4_newPtr 251 if4_prevHalfInstr.fetchpc := if4_pc 252 if4_prevHalfInstr.idx := PopCount(mask(if4_pc)) - 1.U 253 if4_prevHalfInstr.pc := if4_pd.pc(if4_prevHalfInstr.idx) 254 if4_prevHalfInstr.target := if4_bp.target 255 if4_prevHalfInstr.instr := if4_pd.instrs(if4_prevHalfInstr.idx)(15, 0) 256 if4_prevHalfInstr.ipf := if4_ipf 257 } 258 259 // Redirect and npc logic for if4 260 when (if4_fire && if4_bp.redirect) { 261 if4_redirect := true.B 262 when (if4_bp.saveHalfRVI) { 263 if1_npc := snpc(if4_pc) 264 }.otherwise { 265 if1_npc := if4_bp.target 266 } 267 } 268 269 // This should cover the if4 redirect to snpc when saveHalfRVI 270 when (if3_redirect) { 271 when (if3_hasPrevHalfInstr && prevHalfInstr.taken) { 272 if1_npc := prevHalfInstr.target 273 } 274 } 275 276 // history logic for if4 277 when (if4_fire && if4_bp.redirect) { 278 shiftPtr := true.B 279 newPtr := if4_newPtr 280 } 281 282 when (if4_GHInfo.shifted && if4_newPtr >= ptr) { 283 hist(if4_newPtr-ptr) := if4_GHInfo.takenOnBr 284 } 285 286 when (if3_redirect) { 287 // when redirect and if3_hasPrevHalfInstr, this prevHalfInstr should only be taken 288 when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted) { 289 shiftPtr := true.B 290 newPtr := prevHalfInstr.newPtr 291 extHist(prevHalfInstr.newPtr) := prevHalfInstr.ghInfo.takenOnBr 292 } 293 } 294 295 // modify GHR at the end of a prediction lifetime 296 when (if4_fire && if4_GHInfo.shifted) { 297 extHist(if4_newPtr) := if4_GHInfo.takenOnBr 298 } 299 300 // This is a histPtr which is only modified when a prediction 301 // is sent, so that it can get the final prediction info 302 val finalPredHistPtr = RegInit(0.U(log2Up(ExtHistoryLength).W)) 303 if4_histPtr := finalPredHistPtr 304 if4_newPtr := if3_histPtr 305 when (if4_fire && if4_GHInfo.shifted) { 306 finalPredHistPtr := if4_newPtr 307 } 308 309 if3_histPtr := Mux(if4_GHInfo.shifted && if4_valid && !if4_flush, if4_histPtr - 1.U, if4_histPtr) 310 if3_newPtr := if2_histPtr 311 312 if2_histPtr := Mux(if3_GHInfo.shifted && if3_valid && !if3_flush, if3_histPtr - 1.U, if3_histPtr) 313 if2_newPtr := if1_histPtr 314 315 if1_histPtr := Mux(if2_GHInfo.shifted && if2_valid && !if2_flush, if2_histPtr - 1.U, if2_histPtr) 316 317 318 319 320 when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) { 321 val b = io.outOfOrderBrInfo.bits 322 val oldPtr = b.brInfo.histPtr 323 shiftPtr := true.B 324 when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) { 325 // If mispredicted cfi is not a branch, 326 // and there wasn't any not taken branch before it, 327 // we should only recover the pointer to an unshifted state 328 newPtr := oldPtr 329 finalPredHistPtr := oldPtr 330 }.otherwise { 331 newPtr := oldPtr - 1.U 332 finalPredHistPtr := oldPtr - 1.U 333 hist(0) := Mux(b.pd.isBr, b.taken, 0.U) 334 extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U) 335 } 336 } 337 338 when (loopBufPar.LBredirect.valid) { 339 if1_npc := loopBufPar.LBredirect.bits 340 } 341 342 when (io.redirect.valid) { 343 if1_npc := io.redirect.bits.target 344 } 345 346 when(inLoop) { 347 io.icacheReq.valid := if4_flush 348 }.otherwise { 349 io.icacheReq.valid := if1_valid && if2_ready 350 } 351 io.icacheResp.ready := if4_ready 352 io.icacheReq.bits.addr := if1_npc 353 354 // when(if4_bp.taken) { 355 // when(if4_bp.saveHalfRVI) { 356 // io.loopBufPar.LBReq := snpc(if4_pc) 357 // }.otherwise { 358 // io.loopBufPar.LBReq := if4_bp.target 359 // } 360 // }.otherwise { 361 // io.loopBufPar.LBReq := snpc(if4_pc) 362 // XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n") 363 // } 364 loopBufPar.fetchReq := if3_pc 365 366 io.icacheReq.bits.mask := mask(if1_npc) 367 368 io.icacheFlush := Cat(if3_flush, if2_flush) 369 370 val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W))) 371 (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr)) 372 bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid 373 bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt) 374 bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid 375 bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist 376 377 // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush) 378 bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush) 379 bpu.io.inFire(0) := if1_fire 380 bpu.io.inFire(1) := if2_fire 381 bpu.io.inFire(2) := if3_fire 382 bpu.io.inFire(3) := if4_fire 383 bpu.io.stageValid(0) := if2_valid 384 bpu.io.stageValid(1) := if3_valid 385 bpu.io.stageValid(2) := if4_valid 386 bpu.io.in.pc := if1_npc 387 bpu.io.in.hist := hist.asUInt 388 bpu.io.in.histPtr := ptr 389 bpu.io.in.inMask := mask(if1_npc) 390 bpu.io.predecode.mask := if4_pd.mask 391 bpu.io.predecode.pd := if4_pd.pd 392 bpu.io.predecode.isFetchpcEqualFirstpc := if4_pc === if4_pd.pc(0) 393 394 pd.io.in := icacheResp 395 when(inLoop) { 396 pd.io.in.mask := loopBuffer.io.out.bits.mask & mask(loopBuffer.io.out.bits.pc) // TODO: Maybe this is unnecessary 397 // XSDebug("Fetch from LB\n") 398 // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n") 399 // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n") 400 // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n") 401 } 402 403 pd.io.prev.valid := if3_hasPrevHalfInstr 404 pd.io.prev.bits := prevHalfInstr.instr 405 // if a fetch packet triggers page fault, set the pf instruction to nop 406 when (!if3_hasPrevHalfInstr && icacheResp.ipf) { 407 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 408 (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop 409 pd.io.in.data := instrs.asUInt 410 }.elsewhen (if3_hasPrevHalfInstr && (prevHalfInstr.ipf || icacheResp.ipf)) { 411 pd.io.prev.bits := ZeroExt("b0010011".U, 16) 412 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 413 (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W)))) 414 pd.io.in.data := instrs.asUInt 415 416 when (icacheResp.ipf && !prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault 417 } 418 419 //Performance Counter 420 // if (!env.FPGAPlatform ) { 421 // ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf) 422 // ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf) 423 // } 424 425 val fetchPacketValid = if4_valid && !io.redirect.valid 426 val fetchPacketWire = Wire(new FetchPacket) 427 428 // io.fetchPacket.valid := if4_valid && !io.redirect.valid 429 fetchPacketWire.instrs := if4_pd.instrs 430 fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx))) 431 loopBufPar.noTakenMask := if4_pd.mask 432 fetchPacketWire.pc := if4_pd.pc 433 (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U)) 434 when (if4_bp.taken) { 435 fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target 436 } 437 fetchPacketWire.brInfo := bpu.io.branchInfo 438 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := finalPredHistPtr) 439 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr) 440 fetchPacketWire.pd := if4_pd.pd 441 fetchPacketWire.ipf := if4_ipf 442 fetchPacketWire.crossPageIPFFix := if4_crossPageIPF 443 444 // predTaken Vec 445 fetchPacketWire.predTaken := if4_bp.taken 446 447 loopBuffer.io.in.bits := fetchPacketWire 448 io.fetchPacket.bits := fetchPacketWire 449 io.fetchPacket.valid := fetchPacketValid 450 loopBuffer.io.in.valid := io.fetchPacket.fire 451 452 // debug info 453 if (IFUDebug) { 454 XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n") 455 XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n") 456 XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n") 457 XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n", 458 io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc) 459 XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n") 460 461 XSDebug("[IF1] v=%d fire=%d flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc)) 462 XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_histPtr, if2_snpc) 463 XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_histPtr, crossPageIPF, if3_GHInfo.sawNTBr) 464 XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_histPtr, if4_crossPageIPF, if4_GHInfo.sawNTBr) 465 XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr) 466 XSDebug("[IF1][ghr] headPtr=%d shiftPtr=%d newPtr=%d ptr=%d\n", if1_histPtr, shiftPtr, newPtr, ptr) 467 XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt) 468 XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt) 469 470 XSDebug("[IF2][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.redirect, if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI) 471 if2_GHInfo.debug 472 473 XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask) 474 XSDebug("[IF3][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.redirect, if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI) 475 // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n", 476 // prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr) 477 XSDebug("[IF3][ prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 478 prevHalfInstr.valid, prevHalfInstr.taken, prevHalfInstr.fetchpc, prevHalfInstr.idx, prevHalfInstr.pc, prevHalfInstr.target, prevHalfInstr.instr, prevHalfInstr.ipf) 479 XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n", 480 if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf) 481 if3_GHInfo.debug 482 483 XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask) 484 XSDebug("[IF4][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.redirect, if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI) 485 XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal! instr=%x target=%x\n", if4_cfi_jal, if4_cfi_jal_tgt) 486 XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 487 if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf) 488 if4_GHInfo.debug 489 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n", 490 io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix) 491 for (i <- 0 until PredictWidth) { 492 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n", 493 io.fetchPacket.bits.mask(i), 494 io.fetchPacket.bits.instrs(i), 495 io.fetchPacket.bits.pc(i), 496 io.fetchPacket.bits.pnpc(i), 497 io.fetchPacket.bits.pd(i).isRVC, 498 io.fetchPacket.bits.pd(i).brType, 499 io.fetchPacket.bits.pd(i).isCall, 500 io.fetchPacket.bits.pd(i).isRet 501 ) 502 } 503 } 504}