1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import utils._ 8import xiangshan.cache._ 9import chisel3.experimental.chiselName 10 11trait HasIFUConst extends HasXSParameter { 12 val resetVector = 0x80000000L//TODO: set reset vec 13 def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W)) 14 val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size 15 val groupOffsetBits = log2Ceil(groupBytes) 16 val nBanksInPacket = 2 17 val bankBytes = PredictWidth * 2 / nBanksInPacket 18 val nBanksInGroup = groupBytes / bankBytes 19 val bankWidth = PredictWidth / nBanksInPacket 20 val bankOffsetBits = log2Ceil(bankBytes) 21 // (0, nBanksInGroup-1) 22 def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits) 23 def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanksInGroup-1).U 24 // (0, bankBytes/2-1) 25 def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1) 26 def bankAligned(pc: UInt) = align(pc, bankBytes) 27 def groupAligned(pc: UInt) = align(pc, groupBytes) 28 // each 1 bit in mask stands for 2 Bytes 29 // 8 bits, in which only the first 7 bits could be 0 30 def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0) 31 // when in loop(buffer), we need to make use of the full packet 32 // and get the real mask in iCacheResp from loop buffer 33 // we may make predictions on more instructions than we could get from loop buffer 34 // and this will be handled in if4 35 def maskLastHalf(pc: UInt, inLoop: Bool = false.B): UInt = Mux(isInLastBank(pc) && !inLoop, 0.U(bankWidth.W), ~0.U(bankWidth.W)) 36 def mask(pc: UInt, inLoop: Bool = false.B): UInt = Reverse(Cat(maskFirstHalf(pc), maskLastHalf(pc, inLoop))) 37 def snpc(pc: UInt, inLoop: Bool = false.B): UInt = pc + (PopCount(mask(pc, inLoop)) << 1) 38 39 val IFUDebug = true 40} 41 42class GlobalHistoryInfo() extends XSBundle { 43 val nowPtr = UInt(log2Ceil(ExtHistoryLength).W) 44 val sawNTBr = Bool() 45 val takenOnBr = Bool() 46 // val saveHalfRVI = Bool() 47 def shifted = takenOnBr || sawNTBr 48 def newPtr(ptr: UInt = nowPtr): UInt = Mux(shifted, ptr - 1.U, ptr) 49 50 final def === (that: GlobalHistoryInfo): Bool = { 51 shifted === that.shifted && 52 takenOnBr === that.takenOnBr && 53 nowPtr === that.nowPtr 54 } 55 56 final def =/= (that: GlobalHistoryInfo): Bool = !(this === that) 57 58 // def update(): GlobalHistoryInfo = { 59 // val g = WireInit(this) 60 // g.nowPtr := nowPtr - Mux(shifted, 1.U, 0.U) 61 // g.sawNTBr := Mux(saveHalfRVI, sawNTBr, false.B) 62 // g.takenOnBr := Mux(saveHalfRVI, takenOnBr, false.B) 63 // // g.saveHalfRVI := false.B 64 // g 65 // } 66 67 implicit val name = "IFU" 68 def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d\n", sawNTBr, takenOnBr) 69 // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI) 70} 71 72class IFUIO extends XSBundle 73{ 74 val fetchPacket = DecoupledIO(new FetchPacket) 75 val redirect = Flipped(ValidIO(new Redirect)) 76 val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 77 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 78 val icacheReq = DecoupledIO(new ICacheReq) 79 val icacheResp = Flipped(DecoupledIO(new ICacheResp)) 80 val icacheFlush = Output(UInt(2.W)) 81 // val loopBufPar = Flipped(new LoopBufferParameters) 82} 83 84class PrevHalfInstr extends XSBundle { 85 val valid = Bool() 86 val taken = Bool() 87 val ghInfo = new GlobalHistoryInfo() 88 val fetchpc = UInt(VAddrBits.W) // only for debug 89 val idx = UInt(VAddrBits.W) // only for debug 90 val pc = UInt(VAddrBits.W) 91 val target = UInt(VAddrBits.W) 92 val instr = UInt(16.W) 93 val ipf = Bool() 94 val newPtr = UInt(log2Up(ExtHistoryLength).W) 95} 96 97@chiselName 98class IFU extends XSModule with HasIFUConst 99{ 100 val io = IO(new IFUIO) 101 val bpu = BPU(EnableBPU) 102 val pd = Module(new PreDecode) 103 val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) } 104 105 val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B) 106 val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B) 107 108 val loopBufPar = loopBuffer.io.loopBufPar 109 val inLoop = WireInit(loopBuffer.io.out.valid) 110 val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits)) 111 112 if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid 113 if3_flush := if4_flush || if4_redirect 114 if2_flush := if3_flush || if3_redirect 115 if1_flush := if2_flush || if2_redirect 116 117 loopBuffer.io.flush := io.redirect.valid 118 119 //********************** IF1 ****************************// 120 val if1_valid = !reset.asBool && GTimer() > 500.U 121 val if1_npc = WireInit(0.U(VAddrBits.W)) 122 val if2_ready = WireInit(false.B) 123 val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready) 124 125 126 // val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 127 128 val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W)))) 129 val updatePtr = WireInit(false.B) 130 val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 131 val if1_histPtr = RegEnable(next=newPtr, init=0.U(log2Up(ExtHistoryLength).W), enable=updatePtr) 132 val ptr = Mux(updatePtr, newPtr, if1_histPtr) 133 val hist = Wire(Vec(HistoryLength, UInt(1.W))) 134 for (i <- 0 until HistoryLength) { 135 hist(i) := extHist(ptr + i.U) 136 } 137 138 updatePtr := false.B 139 newPtr := if1_histPtr 140 141 142 143 def wrapGHInfo(bp: BranchPrediction, ptr: UInt) = { 144 val ghi = Wire(new GlobalHistoryInfo()) 145 ghi.sawNTBr := bp.hasNotTakenBrs 146 ghi.takenOnBr := bp.takenOnBr 147 // ghi.saveHalfRVI := bp.saveHalfRVI 148 ghi.nowPtr := ptr 149 ghi 150 } 151 152 //********************** IF2 ****************************// 153 val if2_valid = RegInit(init = false.B) 154 val if3_ready = WireInit(false.B) 155 val if2_fire = if2_valid && if3_ready && !if2_flush 156 val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire) 157 val if2_snpc = snpc(if2_pc, inLoop) 158 val if2_predHistPtr = RegEnable(ptr, enable=if1_fire) 159 if2_ready := if2_fire || !if2_valid || if2_flush 160 when (if1_fire) { if2_valid := if1_valid } 161 .elsewhen (if2_flush) { if2_valid := false.B } 162 .elsewhen (if2_fire) { if2_valid := false.B } 163 164 when (RegNext(reset.asBool) && !reset.asBool) { 165 if1_npc := resetVector.U(VAddrBits.W) 166 }.elsewhen (if2_fire) { 167 if1_npc := if2_snpc 168 }.otherwise { 169 if1_npc := RegNext(if1_npc) 170 } 171 172 val if2_bp = bpu.io.out(0) 173 174 val if2_GHInfo = wrapGHInfo(if2_bp, if2_predHistPtr) 175 // if taken, bp_redirect should be true 176 // when taken on half RVI, we suppress this redirect signal 177 if2_redirect := if2_fire && if2_bp.taken 178 when (if2_redirect) { 179 if1_npc := if2_bp.target 180 } 181 when (if2_fire && if2_GHInfo.shifted) { 182 val if2_newPtr = if2_GHInfo.newPtr() 183 updatePtr := true.B 184 newPtr := if2_newPtr 185 extHist(if2_newPtr) := if2_GHInfo.takenOnBr.asUInt 186 } 187 188 189 190 //********************** IF3 ****************************// 191 val if3_valid = RegInit(init = false.B) 192 val if4_ready = WireInit(false.B) 193 val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush 194 val if3_pc = RegEnable(if2_pc, if2_fire) 195 val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire) 196 val if3_lastGHInfo = RegEnable(if2_GHInfo, enable=if2_fire) 197 // val if3_nextValidPC = Mux(if2_valid) 198 if3_ready := if3_fire || !if3_valid || if3_flush 199 when (if3_flush) { if3_valid := false.B } 200 .elsewhen (if2_fire) { if3_valid := true.B } 201 .elsewhen (if3_fire) { if3_valid := false.B } 202 203 val if3_bp = bpu.io.out(1) 204 205 val if3_GHInfo = wrapGHInfo(if3_bp, if3_predHistPtr) 206 207 val prevHalfInstrReq = Wire(new PrevHalfInstr) 208 // only valid when if4_fire 209 val hasPrevHalfInstrReq = prevHalfInstrReq.valid 210 211 val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr)) 212 // val if4_prevHalfInstr = Wire(new PrevHalfInstr) 213 // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault 214 val crossPageIPF = WireInit(false.B) 215 216 val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid 217 218 // the previous half of RVI instruction waits until it meets its last half 219 val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && (if3_prevHalfInstr.pc + 2.U) === if3_pc && if3_valid 220 // set to invalid once consumed or redirect from backend 221 val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire 222 val if3_prevHalfFlush = if4_flush 223 when (hasPrevHalfInstrReq) { 224 if3_prevHalfInstr := prevHalfInstrReq 225 }.elsewhen (if3_prevHalfConsumed || if3_prevHalfFlush) { 226 if3_prevHalfInstr.valid := false.B 227 } 228 229 // when bp signal a redirect, we distinguish between taken and not taken 230 // if taken and saveHalfRVI is true, we do not redirect to the target 231 232 def if3_nextValidPCNotEquals(pc: UInt) = !if2_valid || if2_valid && if2_pc =/= pc 233 val if3_prevHalfMetRedirect = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.target) 234 val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.pc + 2.U) 235 val if3_predTakenRedirect = !if3_pendingPrevHalfInstr && if3_bp.taken && if3_nextValidPCNotEquals(if3_bp.target) 236 val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(snpc(if3_pc, inLoop)) 237 // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr 238 val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo 239 240 if3_redirect := if3_fire && ( 241 // prevHalf is consumed but the next packet is not where it meant to be 242 // we do not handle this condition because of the burden of building a correct GHInfo 243 // prevHalfMetRedirect || 244 // prevHalf does not match if3_pc and the next fetch packet is not snpc 245 if3_prevHalfNotMetRedirect || 246 // pred taken and next fetch packet is not the predicted target 247 if3_predTakenRedirect || 248 // pred not taken and next fetch packet is not snpc 249 if3_predNotTakenRedirect || 250 // GHInfo from last pred does not corresponds with this packet 251 if3_ghInfoNotIdenticalRedirect 252 ) 253 254 when (if3_redirect) { 255 /* when (prevHalfMetRedirect) { 256 if1_npc := if3_prevHalfInstr.target 257 }.else */ 258 when (if3_prevHalfNotMetRedirect) { 259 if1_npc := if3_prevHalfInstr.pc + 2.U 260 }.elsewhen (if3_predTakenRedirect) { 261 if1_npc := if3_bp.target 262 }.elsewhen (if3_predNotTakenRedirect) { 263 if1_npc := snpc(if3_pc) 264 }.elsewhen (if3_ghInfoNotIdenticalRedirect) { 265 if1_npc := Mux(if3_bp.taken, if3_bp.target, snpc(if3_pc)) 266 } 267 val if3_newPtr = if3_GHInfo.newPtr() 268 updatePtr := true.B 269 newPtr := if3_newPtr 270 extHist(if3_newPtr) := if3_GHInfo.takenOnBr.asUInt 271 } 272 273 //********************** IF4 ****************************// 274 val if4_pd = RegEnable(pd.io.out, if3_fire) 275 val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.ipf, if3_fire) 276 val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire) 277 val if4_valid = RegInit(false.B) 278 val if4_fire = if4_valid && io.fetchPacket.ready 279 val if4_pc = RegEnable(if3_pc, if3_fire) 280 val if4_lastGHInfo = RegEnable(if3_GHInfo, if3_fire) 281 // This is the real mask given from icache or loop buffer 282 val if4_mask = RegEnable(icacheResp.mask, if3_fire) 283 val if4_snpc = Mux(inLoop, if4_pc + (PopCount(if4_mask) << 1), snpc(if4_pc)) 284 285 286 val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire) 287 // wait until prevHalfInstr written into reg 288 if4_ready := (if4_fire && !hasPrevHalfInstrReq || !if4_valid || if4_flush) && GTimer() > 500.U 289 when (if4_flush) { if4_valid := false.B } 290 .elsewhen (if3_fire) { if4_valid := true.B } 291 .elsewhen (if4_fire) { if4_valid := false.B } 292 293 val if4_bp = Wire(new BranchPrediction) 294 if4_bp := bpu.io.out(2) 295 if4_bp.takens := bpu.io.out(2).takens & if4_mask 296 if4_bp.brMask := bpu.io.out(2).brMask & if4_mask 297 if4_bp.jalMask := bpu.io.out(2).jalMask & if4_mask 298 299 val if4_GHInfo = wrapGHInfo(if4_bp, if4_predHistPtr) 300 301 def cal_jal_tgt(inst: UInt, rvc: Bool): UInt = { 302 Mux(rvc, 303 SignExt(Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)), XLEN), 304 SignExt(Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)), XLEN) 305 ) 306 } 307 val if4_instrs = if4_pd.instrs 308 val if4_jals = if4_bp.jalMask 309 val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => if4_pd.pc(i) + cal_jal_tgt(if4_instrs(i), if4_pd.pd(i).isRVC))) 310 311 (0 until PredictWidth).foreach {i => 312 when (if4_jals(i)) { 313 if4_bp.targets(i) := if4_jal_tgts(i) 314 } 315 } 316 317 // we need this to tell BPU the prediction of prev half 318 // because the prediction is with the start of each inst 319 val if4_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr)) 320 val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid 321 val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && (if4_prevHalfInstr.pc + 2.U) === if4_pc && if4_valid 322 val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire 323 val if4_prevHalfFlush = if4_flush 324 325 val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.taken) 326 when (if3_prevHalfConsumed) { 327 if4_prevHalfInstr := if3_prevHalfInstr 328 }.elsewhen (if4_prevHalfConsumed || if4_prevHalfFlush) { 329 if4_prevHalfInstr.valid := false.B 330 } 331 332 prevHalfInstrReq := 0.U.asTypeOf(new PrevHalfInstr) 333 when (if4_fire && if4_bp.saveHalfRVI) { 334 val idx = if4_bp.lastHalfRVIIdx 335 prevHalfInstrReq.valid := true.B 336 // this is result of the last half RVI 337 prevHalfInstrReq.taken := if4_bp.lastHalfRVITaken 338 prevHalfInstrReq.ghInfo := if4_GHInfo 339 prevHalfInstrReq.newPtr := if4_GHInfo.newPtr() 340 prevHalfInstrReq.fetchpc := if4_pc 341 prevHalfInstrReq.idx := idx 342 prevHalfInstrReq.pc := if4_pd.pc(idx) 343 prevHalfInstrReq.target := if4_bp.lastHalfRVITarget 344 prevHalfInstrReq.instr := if4_pd.instrs(idx)(15, 0) 345 prevHalfInstrReq.ipf := if4_ipf 346 } 347 348 def if4_nextValidPCNotEquals(pc: UInt) = if3_valid && if3_pc =/= pc || 349 !if3_valid && (if2_valid && if2_pc =/= pc) || 350 !if3_valid && !if2_valid 351 352 val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.pc+2.U) 353 val if4_predTakenRedirect = !hasPrevHalfInstrReq && if4_bp.taken && if4_nextValidPCNotEquals(if4_bp.target) 354 val if4_predNotTakenRedirect = !hasPrevHalfInstrReq && !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc) 355 val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo 356 357 if4_redirect := if4_fire && ( 358 // when if4 has a lastHalfRVI, but the next fetch packet is not snpc 359 if4_prevHalfNextNotMet || 360 // when if4 preds taken, but the pc of next fetch packet is not the target 361 if4_predTakenRedirect || 362 // when if4 preds not taken, but the pc of next fetch packet is not snpc 363 if4_predNotTakenRedirect || 364 // GHInfo from last pred does not corresponds with this packet 365 if4_ghInfoNotIdenticalRedirect 366 ) 367 368 when (if4_redirect) { 369 when (if4_prevHalfNextNotMet) { 370 if1_npc := prevHalfInstrReq.pc+2.U 371 }.elsewhen (if4_predTakenRedirect) { 372 if1_npc := if4_bp.target 373 }.elsewhen (if4_predNotTakenRedirect) { 374 if1_npc := if4_snpc 375 }.elsewhen (if4_ghInfoNotIdenticalRedirect) { 376 if1_npc := Mux(if4_bp.taken, if4_bp.target, if4_snpc) 377 } 378 val if4_newPtr = if4_GHInfo.newPtr() 379 updatePtr := true.B 380 newPtr := if4_newPtr 381 extHist(if4_newPtr) := if4_GHInfo.takenOnBr.asUInt 382 } 383 384 385 when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) { 386 val b = io.outOfOrderBrInfo.bits 387 val oldPtr = b.brInfo.histPtr 388 updatePtr := true.B 389 when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) { 390 // If mispredicted cfi is not a branch, 391 // and there wasn't any not taken branch before it, 392 // we should only recover the pointer to an unshifted state 393 newPtr := oldPtr 394 // finalPredHistPtr := oldPtr 395 }.otherwise { 396 newPtr := oldPtr - 1.U 397 // finalPredHistPtr := oldPtr - 1.U 398 // hist(0) := Mux(b.pd.isBr, b.taken, 0.U) 399 extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U) 400 } 401 } 402 403 when (loopBufPar.LBredirect.valid) { 404 if1_npc := loopBufPar.LBredirect.bits 405 } 406 407 when (io.redirect.valid) { 408 if1_npc := io.redirect.bits.target 409 } 410 411 when(inLoop) { 412 io.icacheReq.valid := if4_flush 413 }.otherwise { 414 io.icacheReq.valid := if1_valid && if2_ready 415 } 416 io.icacheResp.ready := if4_ready 417 io.icacheReq.bits.addr := if1_npc 418 419 // when(if4_bp.taken) { 420 // when(if4_bp.saveHalfRVI) { 421 // io.loopBufPar.LBReq := snpc(if4_pc) 422 // }.otherwise { 423 // io.loopBufPar.LBReq := if4_bp.target 424 // } 425 // }.otherwise { 426 // io.loopBufPar.LBReq := snpc(if4_pc) 427 // XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n") 428 // } 429 loopBufPar.fetchReq := if3_pc 430 431 io.icacheReq.bits.mask := mask(if1_npc) 432 433 io.icacheFlush := Cat(if3_flush, if2_flush) 434 435 val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W))) 436 (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr)) 437 bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid 438 bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt) 439 bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid 440 bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist 441 442 // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush) 443 bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush) 444 bpu.io.inFire(0) := if1_fire 445 bpu.io.inFire(1) := if2_fire 446 bpu.io.inFire(2) := if3_fire 447 bpu.io.inFire(3) := if4_fire 448 bpu.io.in.pc := if1_npc 449 bpu.io.in.hist := hist.asUInt 450 bpu.io.in.histPtr := ptr 451 bpu.io.in.inMask := mask(if1_npc) 452 bpu.io.predecode.mask := if4_pd.mask 453 bpu.io.predecode.endMask := if4_pd.endMask 454 bpu.io.predecode.pd := if4_pd.pd 455 bpu.io.predecode.hasLastHalfRVI := if4_pc =/= if4_pd.pc(0) 456 bpu.io.realMask := if4_mask 457 bpu.io.prevHalf := if4_prevHalfInstr 458 459 pd.io.in := icacheResp 460 when(inLoop) { 461 pd.io.in.mask := loopBuffer.io.out.bits.mask // TODO: Maybe this is unnecessary 462 // XSDebug("Fetch from LB\n") 463 // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n") 464 // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n") 465 // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n") 466 } 467 468 pd.io.prev.valid := if3_prevHalfInstrMet 469 pd.io.prev.bits := if3_prevHalfInstr.instr 470 // if a fetch packet triggers page fault, set the pf instruction to nop 471 when (!if3_prevHalfInstrMet && icacheResp.ipf) { 472 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 473 (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop 474 pd.io.in.data := instrs.asUInt 475 }.elsewhen (if3_prevHalfInstrMet && (if3_prevHalfInstr.ipf || icacheResp.ipf)) { 476 pd.io.prev.bits := ZeroExt("b0010011".U, 16) 477 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 478 (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W)))) 479 pd.io.in.data := instrs.asUInt 480 481 when (icacheResp.ipf && !if3_prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault 482 } 483 484 //Performance Counter 485 // if (!env.FPGAPlatform ) { 486 // ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf) 487 // ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf) 488 // } 489 490 val fetchPacketValid = if4_valid && !io.redirect.valid 491 val fetchPacketWire = Wire(new FetchPacket) 492 493 // io.fetchPacket.valid := if4_valid && !io.redirect.valid 494 fetchPacketWire.instrs := if4_pd.instrs 495 fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx))) 496 497 loopBufPar.noTakenMask := if4_pd.mask 498 fetchPacketWire.pc := if4_pd.pc 499 (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U)) 500 when (if4_bp.taken) { 501 fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target 502 } 503 fetchPacketWire.brInfo := bpu.io.branchInfo 504 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := if4_predHistPtr) 505 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr) 506 fetchPacketWire.pd := if4_pd.pd 507 fetchPacketWire.ipf := if4_ipf 508 fetchPacketWire.crossPageIPFFix := if4_crossPageIPF 509 510 // predTaken Vec 511 fetchPacketWire.predTaken := if4_bp.taken 512 513 loopBuffer.io.in.bits := fetchPacketWire 514 io.fetchPacket.bits := fetchPacketWire 515 io.fetchPacket.valid := fetchPacketValid 516 loopBuffer.io.in.valid := io.fetchPacket.fire 517 518 // debug info 519 if (IFUDebug) { 520 XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n") 521 XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n") 522 XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n") 523 XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n", 524 io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc) 525 XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n") 526 527 XSDebug("[IF1] v=%d fire=%d flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc)) 528 XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_predHistPtr, if2_snpc) 529 XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_predHistPtr, crossPageIPF, if3_GHInfo.sawNTBr) 530 XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_predHistPtr, if4_crossPageIPF, if4_GHInfo.sawNTBr) 531 XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr) 532 XSDebug("[IF1][ghr] headPtr=%d updatePtr=%d newPtr=%d ptr=%d\n", if1_histPtr, updatePtr, newPtr, ptr) 533 XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt) 534 XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt) 535 536 XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI) 537 if2_GHInfo.debug 538 539 XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask) 540 XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI) 541 XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d, ghInfo=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect, if3_ghInfoNotIdenticalRedirect) 542 // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n", 543 // prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr) 544 XSDebug("[IF3][ prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 545 if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf) 546 XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n", 547 if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf) 548 if3_GHInfo.debug 549 550 XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask) 551 XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask) 552 XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI) 553 XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d, ghInfo=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect, if4_ghInfoNotIdenticalRedirect) 554 XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal! instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx)) 555 XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 556 if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf) 557 if4_GHInfo.debug 558 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n", 559 io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix) 560 for (i <- 0 until PredictWidth) { 561 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n", 562 io.fetchPacket.bits.mask(i), 563 io.fetchPacket.bits.instrs(i), 564 io.fetchPacket.bits.pc(i), 565 io.fetchPacket.bits.pnpc(i), 566 io.fetchPacket.bits.pd(i).isRVC, 567 io.fetchPacket.bits.pd(i).brType, 568 io.fetchPacket.bits.pd(i).isCall, 569 io.fetchPacket.bits.pd(i).isRet 570 ) 571 } 572 } 573}