1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import utils._ 8import xiangshan.cache._ 9 10trait HasIFUConst extends HasXSParameter { 11 val resetVector = 0x80000000L//TODO: set reset vec 12 def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W)) 13 val groupBytes = FetchWidth * 4 * 2 // correspond to cache line size 14 val groupOffsetBits = log2Ceil(groupBytes) 15 val bankBytes = PredictWidth 16 val nBanks = groupBytes / bankBytes 17 val bankWidth = bankBytes / 2 18 val bankOffsetBits = log2Ceil(bankBytes) 19 // (0, nBanks-1) 20 def bankInGroup(pc: UInt) = pc(groupOffsetBits-1,bankOffsetBits) 21 def isInLastBank(pc: UInt) = bankInGroup(pc) === (nBanks-1).U 22 // (0, bankBytes/2-1) 23 def offsetInBank(pc: UInt) = pc(bankOffsetBits-1,1) 24 def bankAligned(pc: UInt) = align(pc, bankBytes) 25 def groupAligned(pc: UInt) = align(pc, groupBytes) 26 // each 1 bit in mask stands for 2 Bytes 27 // 8 bits, in which only the first 7 bits could be 0 28 def maskFirstHalf(pc: UInt): UInt = ((~(0.U(bankWidth.W))) >> offsetInBank(pc))(bankWidth-1,0) 29 // when in loop(buffer), we need to make use of the full packet 30 // and get the real mask in iCacheResp from loop buffer 31 // we may make predictions on more instructions than we could get from loop buffer 32 // and this will be handled in if4 33 def maskLastHalf(pc: UInt, inLoop: Bool = false.B): UInt = Mux(isInLastBank(pc) && !inLoop, 0.U(bankWidth.W), ~0.U(bankWidth.W)) 34 def mask(pc: UInt, inLoop: Bool = false.B): UInt = Cat(maskFirstHalf(pc), maskLastHalf(pc, inLoop)) 35 def snpc(pc: UInt, inLoop: Bool = false.B): UInt = pc + (PopCount(mask(pc, inLoop)) << 1) 36 37 val IFUDebug = true 38} 39 40class GlobalHistoryInfo() extends XSBundle { 41 val nowPtr = UInt(log2Ceil(ExtHistoryLength).W) 42 val sawNTBr = Bool() 43 val takenOnBr = Bool() 44 // val saveHalfRVI = Bool() 45 def shifted = takenOnBr || sawNTBr 46 def newPtr(ptr: UInt = nowPtr): UInt = Mux(shifted, ptr - 1.U, ptr) 47 48 final def === (that: GlobalHistoryInfo): Bool = { 49 shifted === that.shifted && 50 takenOnBr === that.takenOnBr && 51 nowPtr === that.nowPtr 52 } 53 54 final def =/= (that: GlobalHistoryInfo): Bool = !(this === that) 55 56 // def update(): GlobalHistoryInfo = { 57 // val g = WireInit(this) 58 // g.nowPtr := nowPtr - Mux(shifted, 1.U, 0.U) 59 // g.sawNTBr := Mux(saveHalfRVI, sawNTBr, false.B) 60 // g.takenOnBr := Mux(saveHalfRVI, takenOnBr, false.B) 61 // // g.saveHalfRVI := false.B 62 // g 63 // } 64 65 implicit val name = "IFU" 66 def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d\n", sawNTBr, takenOnBr) 67 // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI) 68} 69 70class IFUIO extends XSBundle 71{ 72 val fetchPacket = DecoupledIO(new FetchPacket) 73 val redirect = Flipped(ValidIO(new Redirect)) 74 val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 75 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 76 val icacheReq = DecoupledIO(new ICacheReq) 77 val icacheResp = Flipped(DecoupledIO(new ICacheResp)) 78 val icacheFlush = Output(UInt(2.W)) 79 // val loopBufPar = Flipped(new LoopBufferParameters) 80} 81 82class PrevHalfInstr extends XSBundle { 83 val valid = Bool() 84 val taken = Bool() 85 val ghInfo = new GlobalHistoryInfo() 86 val fetchpc = UInt(VAddrBits.W) // only for debug 87 val idx = UInt(VAddrBits.W) // only for debug 88 val pc = UInt(VAddrBits.W) 89 val target = UInt(VAddrBits.W) 90 val instr = UInt(16.W) 91 val ipf = Bool() 92 val newPtr = UInt(log2Up(ExtHistoryLength).W) 93} 94 95class IFU extends XSModule with HasIFUConst 96{ 97 val io = IO(new IFUIO) 98 val bpu = BPU(EnableBPU) 99 val pd = Module(new PreDecode) 100 val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) } 101 102 val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B) 103 val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B) 104 105 val loopBufPar = loopBuffer.io.loopBufPar 106 val inLoop = WireInit(loopBuffer.io.out.valid) 107 val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits)) 108 109 if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid 110 if3_flush := if4_flush || if4_redirect 111 if2_flush := if3_flush || if3_redirect 112 if1_flush := if2_flush || if2_redirect 113 114 loopBuffer.io.flush := io.redirect.valid 115 116 //********************** IF1 ****************************// 117 val if1_valid = !reset.asBool && GTimer() > 500.U 118 val if1_npc = WireInit(0.U(VAddrBits.W)) 119 val if2_ready = WireInit(false.B) 120 val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready) 121 122 123 // val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 124 125 val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W)))) 126 val updatePtr = WireInit(false.B) 127 val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 128 val if1_histPtr = RegEnable(next=newPtr, init=0.U(log2Up(ExtHistoryLength).W), enable=updatePtr) 129 val ptr = Mux(updatePtr, newPtr, if1_histPtr) 130 val hist = Wire(Vec(HistoryLength, UInt(1.W))) 131 for (i <- 0 until HistoryLength) { 132 hist(i) := extHist(ptr + i.U) 133 } 134 135 updatePtr := false.B 136 newPtr := if1_histPtr 137 138 139 140 def wrapGHInfo(bp: BranchPrediction, ptr: UInt) = { 141 val ghi = Wire(new GlobalHistoryInfo()) 142 ghi.sawNTBr := bp.hasNotTakenBrs 143 ghi.takenOnBr := bp.takenOnBr 144 // ghi.saveHalfRVI := bp.saveHalfRVI 145 ghi.nowPtr := ptr 146 ghi 147 } 148 149 //********************** IF2 ****************************// 150 val if2_valid = RegInit(init = false.B) 151 val if3_ready = WireInit(false.B) 152 val if2_fire = if2_valid && if3_ready && !if2_flush 153 val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire) 154 val if2_snpc = snpc(if2_pc, inLoop) 155 val if2_predHistPtr = RegEnable(ptr, enable=if1_fire) 156 if2_ready := if2_fire || !if2_valid || if2_flush 157 when (if1_fire) { if2_valid := if1_valid } 158 .elsewhen (if2_flush) { if2_valid := false.B } 159 .elsewhen (if2_fire) { if2_valid := false.B } 160 161 when (RegNext(reset.asBool) && !reset.asBool) { 162 if1_npc := resetVector.U(VAddrBits.W) 163 }.elsewhen (if2_fire) { 164 if1_npc := if2_snpc 165 }.otherwise { 166 if1_npc := RegNext(if1_npc) 167 } 168 169 val if2_bp = bpu.io.out(0) 170 171 val if2_GHInfo = wrapGHInfo(if2_bp, if2_predHistPtr) 172 // if taken, bp_redirect should be true 173 // when taken on half RVI, we suppress this redirect signal 174 if2_redirect := if2_fire && if2_bp.taken 175 when (if2_redirect) { 176 if1_npc := if2_bp.target 177 } 178 when (if2_fire && if2_GHInfo.shifted) { 179 val if2_newPtr = if2_GHInfo.newPtr() 180 updatePtr := true.B 181 newPtr := if2_newPtr 182 extHist(if2_newPtr) := if2_GHInfo.takenOnBr.asUInt 183 } 184 185 186 187 //********************** IF3 ****************************// 188 val if3_valid = RegInit(init = false.B) 189 val if4_ready = WireInit(false.B) 190 val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush 191 val if3_pc = RegEnable(if2_pc, if2_fire) 192 val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire) 193 val if3_lastGHInfo = RegEnable(if2_GHInfo, enable=if2_fire) 194 // val if3_nextValidPC = Mux(if2_valid) 195 if3_ready := if3_fire || !if3_valid || if3_flush 196 when (if3_flush) { if3_valid := false.B } 197 .elsewhen (if2_fire) { if3_valid := true.B } 198 .elsewhen (if3_fire) { if3_valid := false.B } 199 200 val if3_bp = bpu.io.out(1) 201 202 val if3_GHInfo = wrapGHInfo(if3_bp, if3_predHistPtr) 203 204 val prevHalfInstrReq = Wire(new PrevHalfInstr) 205 // only valid when if4_fire 206 val hasPrevHalfInstrReq = prevHalfInstrReq.valid 207 208 val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr)) 209 // val if4_prevHalfInstr = Wire(new PrevHalfInstr) 210 // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault 211 val crossPageIPF = WireInit(false.B) 212 213 val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid 214 215 // the previous half of RVI instruction waits until it meets its last half 216 val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && (if3_prevHalfInstr.pc + 2.U) === if3_pc && if3_valid 217 // set to invalid once consumed or redirect from backend 218 val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire 219 val if3_prevHalfFlush = if4_flush 220 when (hasPrevHalfInstrReq) { 221 if3_prevHalfInstr := prevHalfInstrReq 222 }.elsewhen (if3_prevHalfConsumed || if3_prevHalfFlush) { 223 if3_prevHalfInstr.valid := false.B 224 } 225 226 // when bp signal a redirect, we distinguish between taken and not taken 227 // if taken and saveHalfRVI is true, we do not redirect to the target 228 229 def if3_nextValidPCNotEquals(pc: UInt) = !if2_valid || if2_valid && if2_pc =/= pc 230 val if3_prevHalfMetRedirect = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.target) 231 val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.pc + 2.U) 232 val if3_predTakenRedirect = !if3_pendingPrevHalfInstr && if3_bp.taken && if3_nextValidPCNotEquals(if3_bp.target) 233 val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(snpc(if3_pc, inLoop)) 234 // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr 235 val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo 236 237 if3_redirect := if3_fire && ( 238 // prevHalf is consumed but the next packet is not where it meant to be 239 // we do not handle this condition because of the burden of building a correct GHInfo 240 // prevHalfMetRedirect || 241 // prevHalf does not match if3_pc and the next fetch packet is not snpc 242 if3_prevHalfNotMetRedirect || 243 // pred taken and next fetch packet is not the predicted target 244 if3_predTakenRedirect || 245 // pred not taken and next fetch packet is not snpc 246 if3_predNotTakenRedirect || 247 // GHInfo from last pred does not corresponds with this packet 248 if3_ghInfoNotIdenticalRedirect 249 ) 250 251 when (if3_redirect) { 252 /* when (prevHalfMetRedirect) { 253 if1_npc := if3_prevHalfInstr.target 254 }.else */ 255 when (if3_prevHalfNotMetRedirect) { 256 if1_npc := if3_prevHalfInstr.pc + 2.U 257 }.elsewhen (if3_predTakenRedirect) { 258 if1_npc := if3_bp.target 259 }.elsewhen (if3_predNotTakenRedirect) { 260 if1_npc := snpc(if3_pc) 261 }.elsewhen (if3_ghInfoNotIdenticalRedirect) { 262 if1_npc := Mux(if3_bp.taken, if3_bp.target, snpc(if3_pc)) 263 } 264 val if3_newPtr = if3_GHInfo.newPtr() 265 updatePtr := true.B 266 newPtr := if3_newPtr 267 extHist(if3_newPtr) := if3_GHInfo.takenOnBr.asUInt 268 } 269 270 //********************** IF4 ****************************// 271 val if4_pd = RegEnable(pd.io.out, if3_fire) 272 val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.ipf, if3_fire) 273 val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire) 274 val if4_valid = RegInit(false.B) 275 val if4_fire = if4_valid && io.fetchPacket.ready 276 val if4_pc = RegEnable(if3_pc, if3_fire) 277 val if4_lastGHInfo = RegEnable(if3_GHInfo, if3_fire) 278 // This is the real mask given from icache or loop buffer 279 val if4_mask = RegEnable(icacheResp.mask, if3_fire) 280 val if4_snpc = Mux(inLoop, if4_pc + (PopCount(if4_mask) << 1), snpc(if4_pc)) 281 282 283 val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire) 284 // wait until prevHalfInstr written into reg 285 if4_ready := (if4_fire && !hasPrevHalfInstrReq || !if4_valid || if4_flush) && GTimer() > 500.U 286 when (if4_flush) { if4_valid := false.B } 287 .elsewhen (if3_fire) { if4_valid := true.B } 288 .elsewhen (if4_fire) { if4_valid := false.B } 289 290 val if4_bp = Wire(new BranchPrediction) 291 if4_bp := bpu.io.out(2) 292 if4_bp.takens := bpu.io.out(2).takens & if4_mask 293 if4_bp.brMask := bpu.io.out(2).brMask & if4_mask 294 if4_bp.jalMask := bpu.io.out(2).jalMask & if4_mask 295 296 val if4_GHInfo = wrapGHInfo(if4_bp, if4_predHistPtr) 297 298 def cal_jal_tgt(inst: UInt, rvc: Bool): UInt = { 299 Mux(rvc, 300 SignExt(Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)), XLEN), 301 SignExt(Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)), XLEN) 302 ) 303 } 304 val if4_instrs = if4_pd.instrs 305 val if4_jals = if4_bp.jalMask 306 val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => if4_pd.pc(i) + cal_jal_tgt(if4_instrs(i), if4_pd.pd(i).isRVC))) 307 308 (0 until PredictWidth).foreach {i => 309 when (if4_jals(i)) { 310 if4_bp.targets(i) := if4_jal_tgts(i) 311 } 312 } 313 314 // we need this to tell BPU the prediction of prev half 315 // because the prediction is with the start of each inst 316 val if4_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr)) 317 val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid 318 val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && (if4_prevHalfInstr.pc + 2.U) === if4_pc && if4_valid 319 val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire 320 val if4_prevHalfFlush = if4_flush 321 322 val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.taken) 323 when (if3_prevHalfConsumed) { 324 if4_prevHalfInstr := if3_prevHalfInstr 325 }.elsewhen (if4_prevHalfConsumed || if4_prevHalfFlush) { 326 if4_prevHalfInstr.valid := false.B 327 } 328 329 prevHalfInstrReq := 0.U.asTypeOf(new PrevHalfInstr) 330 when (if4_fire && if4_bp.saveHalfRVI) { 331 val idx = if4_bp.lastHalfRVIIdx 332 prevHalfInstrReq.valid := true.B 333 // this is result of the last half RVI 334 prevHalfInstrReq.taken := if4_bp.lastHalfRVITaken 335 prevHalfInstrReq.ghInfo := if4_GHInfo 336 prevHalfInstrReq.newPtr := if4_GHInfo.newPtr() 337 prevHalfInstrReq.fetchpc := if4_pc 338 prevHalfInstrReq.idx := idx 339 prevHalfInstrReq.pc := if4_pd.pc(idx) 340 prevHalfInstrReq.target := if4_bp.lastHalfRVITarget 341 prevHalfInstrReq.instr := if4_pd.instrs(idx)(15, 0) 342 prevHalfInstrReq.ipf := if4_ipf 343 } 344 345 def if4_nextValidPCNotEquals(pc: UInt) = if3_valid && if3_pc =/= pc || 346 !if3_valid && (if2_valid && if2_pc =/= pc) || 347 !if3_valid && !if2_valid 348 349 val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.pc+2.U) 350 val if4_predTakenRedirect = !hasPrevHalfInstrReq && if4_bp.taken && if4_nextValidPCNotEquals(if4_bp.target) 351 val if4_predNotTakenRedirect = !hasPrevHalfInstrReq && if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc) 352 val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo 353 354 if4_redirect := if4_fire && ( 355 // when if4 has a lastHalfRVI, but the next fetch packet is not snpc 356 if4_prevHalfNextNotMet || 357 // when if4 preds taken, but the pc of next fetch packet is not the target 358 if4_predTakenRedirect || 359 // when if4 preds not taken, but the pc of next fetch packet is not snpc 360 if4_predNotTakenRedirect || 361 // GHInfo from last pred does not corresponds with this packet 362 if4_ghInfoNotIdenticalRedirect 363 ) 364 365 when (if4_redirect) { 366 when (if4_prevHalfNextNotMet) { 367 if1_npc := prevHalfInstrReq.pc+2.U 368 }.elsewhen (if4_predTakenRedirect) { 369 if1_npc := if4_bp.target 370 }.elsewhen (if4_predNotTakenRedirect) { 371 if1_npc := if4_snpc 372 }.elsewhen (if4_ghInfoNotIdenticalRedirect) { 373 if1_npc := Mux(if4_bp.taken, if4_bp.target, if4_snpc) 374 } 375 val if4_newPtr = if4_GHInfo.newPtr() 376 updatePtr := true.B 377 newPtr := if4_newPtr 378 extHist(if4_newPtr) := if4_GHInfo.takenOnBr.asUInt 379 } 380 381 382 when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) { 383 val b = io.outOfOrderBrInfo.bits 384 val oldPtr = b.brInfo.histPtr 385 updatePtr := true.B 386 when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) { 387 // If mispredicted cfi is not a branch, 388 // and there wasn't any not taken branch before it, 389 // we should only recover the pointer to an unshifted state 390 newPtr := oldPtr 391 // finalPredHistPtr := oldPtr 392 }.otherwise { 393 newPtr := oldPtr - 1.U 394 // finalPredHistPtr := oldPtr - 1.U 395 // hist(0) := Mux(b.pd.isBr, b.taken, 0.U) 396 extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U) 397 } 398 } 399 400 when (loopBufPar.LBredirect.valid) { 401 if1_npc := loopBufPar.LBredirect.bits 402 } 403 404 when (io.redirect.valid) { 405 if1_npc := io.redirect.bits.target 406 } 407 408 when(inLoop) { 409 io.icacheReq.valid := if4_flush 410 }.otherwise { 411 io.icacheReq.valid := if1_valid && if2_ready 412 } 413 io.icacheResp.ready := if4_ready 414 io.icacheReq.bits.addr := if1_npc 415 416 // when(if4_bp.taken) { 417 // when(if4_bp.saveHalfRVI) { 418 // io.loopBufPar.LBReq := snpc(if4_pc) 419 // }.otherwise { 420 // io.loopBufPar.LBReq := if4_bp.target 421 // } 422 // }.otherwise { 423 // io.loopBufPar.LBReq := snpc(if4_pc) 424 // XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n") 425 // } 426 loopBufPar.fetchReq := if3_pc 427 428 io.icacheReq.bits.mask := mask(if1_npc) 429 430 io.icacheFlush := Cat(if3_flush, if2_flush) 431 432 val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W))) 433 (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr)) 434 bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid 435 bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt) 436 bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid 437 bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist 438 439 // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush) 440 bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush) 441 bpu.io.inFire(0) := if1_fire 442 bpu.io.inFire(1) := if2_fire 443 bpu.io.inFire(2) := if3_fire 444 bpu.io.inFire(3) := if4_fire 445 bpu.io.in.pc := if1_npc 446 bpu.io.in.hist := hist.asUInt 447 bpu.io.in.histPtr := ptr 448 bpu.io.in.inMask := mask(if1_npc) 449 bpu.io.predecode.mask := if4_pd.mask 450 bpu.io.predecode.pd := if4_pd.pd 451 bpu.io.predecode.hasLastHalfRVI := if4_pc =/= if4_pd.pc(0) 452 bpu.io.realMask := if4_mask 453 bpu.io.prevHalf := if4_prevHalfInstr 454 455 pd.io.in := icacheResp 456 when(inLoop) { 457 pd.io.in.mask := loopBuffer.io.out.bits.mask // TODO: Maybe this is unnecessary 458 // XSDebug("Fetch from LB\n") 459 // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n") 460 // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n") 461 // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n") 462 } 463 464 pd.io.prev.valid := if3_prevHalfInstrMet 465 pd.io.prev.bits := if3_prevHalfInstr.instr 466 // if a fetch packet triggers page fault, set the pf instruction to nop 467 when (!if3_prevHalfInstrMet && icacheResp.ipf) { 468 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 469 (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop 470 pd.io.in.data := instrs.asUInt 471 }.elsewhen (if3_prevHalfInstrMet && (if3_prevHalfInstr.ipf || icacheResp.ipf)) { 472 pd.io.prev.bits := ZeroExt("b0010011".U, 16) 473 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 474 (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W)))) 475 pd.io.in.data := instrs.asUInt 476 477 when (icacheResp.ipf && !if3_prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault 478 } 479 480 //Performance Counter 481 // if (!env.FPGAPlatform ) { 482 // ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf) 483 // ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf) 484 // } 485 486 val fetchPacketValid = if4_valid && !io.redirect.valid 487 val fetchPacketWire = Wire(new FetchPacket) 488 489 // io.fetchPacket.valid := if4_valid && !io.redirect.valid 490 fetchPacketWire.instrs := if4_pd.instrs 491 fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx))) 492 493 loopBufPar.noTakenMask := if4_pd.mask 494 fetchPacketWire.pc := if4_pd.pc 495 (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U)) 496 when (if4_bp.taken) { 497 fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target 498 } 499 fetchPacketWire.brInfo := bpu.io.branchInfo 500 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := if4_predHistPtr) 501 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr) 502 fetchPacketWire.pd := if4_pd.pd 503 fetchPacketWire.ipf := if4_ipf 504 fetchPacketWire.crossPageIPFFix := if4_crossPageIPF 505 506 // predTaken Vec 507 fetchPacketWire.predTaken := if4_bp.taken 508 509 loopBuffer.io.in.bits := fetchPacketWire 510 io.fetchPacket.bits := fetchPacketWire 511 io.fetchPacket.valid := fetchPacketValid 512 loopBuffer.io.in.valid := io.fetchPacket.fire 513 514 // debug info 515 if (IFUDebug) { 516 XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n") 517 XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n") 518 XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n") 519 XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n", 520 io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc) 521 XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n") 522 523 XSDebug("[IF1] v=%d fire=%d flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc)) 524 XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_predHistPtr, if2_snpc) 525 XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_predHistPtr, crossPageIPF, if3_GHInfo.sawNTBr) 526 XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_predHistPtr, if4_crossPageIPF, if4_GHInfo.sawNTBr) 527 XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr) 528 XSDebug("[IF1][ghr] headPtr=%d updatePtr=%d newPtr=%d ptr=%d\n", if1_histPtr, updatePtr, newPtr, ptr) 529 XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt) 530 XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt) 531 532 XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI) 533 if2_GHInfo.debug 534 535 XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask) 536 XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI) 537 // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n", 538 // prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr) 539 XSDebug("[IF3][ prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 540 if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf) 541 XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n", 542 if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf) 543 if3_GHInfo.debug 544 545 XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask) 546 XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI) 547 XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal! instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx)) 548 XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 549 if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf) 550 if4_GHInfo.debug 551 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n", 552 io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix) 553 for (i <- 0 until PredictWidth) { 554 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n", 555 io.fetchPacket.bits.mask(i), 556 io.fetchPacket.bits.instrs(i), 557 io.fetchPacket.bits.pc(i), 558 io.fetchPacket.bits.pnpc(i), 559 io.fetchPacket.bits.pd(i).isRVC, 560 io.fetchPacket.bits.pd(i).brType, 561 io.fetchPacket.bits.pd(i).isCall, 562 io.fetchPacket.bits.pd(i).isRet 563 ) 564 } 565 } 566}