1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import xiangshan._ 24import xiangshan.frontend.icache._ 25import xiangshan.backend.CtrlToFtqIO 26import xiangshan.backend.decode.ImmUnion 27 28class FtqPtr(implicit p: Parameters) extends CircularQueuePtr[FtqPtr]( 29 p => p(XSCoreParamsKey).FtqSize 30){ 31} 32 33object FtqPtr { 34 def apply(f: Bool, v: UInt)(implicit p: Parameters): FtqPtr = { 35 val ptr = Wire(new FtqPtr) 36 ptr.flag := f 37 ptr.value := v 38 ptr 39 } 40 def inverse(ptr: FtqPtr)(implicit p: Parameters): FtqPtr = { 41 apply(!ptr.flag, ptr.value) 42 } 43} 44 45class FtqNRSRAM[T <: Data](gen: T, numRead: Int)(implicit p: Parameters) extends XSModule { 46 47 val io = IO(new Bundle() { 48 val raddr = Input(Vec(numRead, UInt(log2Up(FtqSize).W))) 49 val ren = Input(Vec(numRead, Bool())) 50 val rdata = Output(Vec(numRead, gen)) 51 val waddr = Input(UInt(log2Up(FtqSize).W)) 52 val wen = Input(Bool()) 53 val wdata = Input(gen) 54 }) 55 56 for(i <- 0 until numRead){ 57 val sram = Module(new SRAMTemplate(gen, FtqSize)) 58 sram.io.r.req.valid := io.ren(i) 59 sram.io.r.req.bits.setIdx := io.raddr(i) 60 io.rdata(i) := sram.io.r.resp.data(0) 61 sram.io.w.req.valid := io.wen 62 sram.io.w.req.bits.setIdx := io.waddr 63 sram.io.w.req.bits.data := VecInit(io.wdata) 64 } 65 66} 67 68class Ftq_RF_Components(implicit p: Parameters) extends XSBundle with BPUUtils { 69 val startAddr = UInt(VAddrBits.W) 70 val nextLineAddr = UInt(VAddrBits.W) 71 val isNextMask = Vec(PredictWidth, Bool()) 72 val fallThruError = Bool() 73 // val carry = Bool() 74 def getPc(offset: UInt) = { 75 def getHigher(pc: UInt) = pc(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 76 def getOffset(pc: UInt) = pc(log2Ceil(PredictWidth)+instOffsetBits, instOffsetBits) 77 Cat(getHigher(Mux(isNextMask(offset) && startAddr(log2Ceil(PredictWidth)+instOffsetBits), nextLineAddr, startAddr)), 78 getOffset(startAddr)+offset, 0.U(instOffsetBits.W)) 79 } 80 def fromBranchPrediction(resp: BranchPredictionBundle) = { 81 def carryPos(addr: UInt) = addr(instOffsetBits+log2Ceil(PredictWidth)+1) 82 this.startAddr := resp.pc 83 this.nextLineAddr := resp.pc + (FetchWidth * 4 * 2).U // may be broken on other configs 84 this.isNextMask := VecInit((0 until PredictWidth).map(i => 85 (resp.pc(log2Ceil(PredictWidth), 1) +& i.U)(log2Ceil(PredictWidth)).asBool() 86 )) 87 this.fallThruError := resp.fallThruError 88 this 89 } 90 override def toPrintable: Printable = { 91 p"startAddr:${Hexadecimal(startAddr)}" 92 } 93} 94 95class Ftq_pd_Entry(implicit p: Parameters) extends XSBundle { 96 val brMask = Vec(PredictWidth, Bool()) 97 val jmpInfo = ValidUndirectioned(Vec(3, Bool())) 98 val jmpOffset = UInt(log2Ceil(PredictWidth).W) 99 val jalTarget = UInt(VAddrBits.W) 100 val rvcMask = Vec(PredictWidth, Bool()) 101 def hasJal = jmpInfo.valid && !jmpInfo.bits(0) 102 def hasJalr = jmpInfo.valid && jmpInfo.bits(0) 103 def hasCall = jmpInfo.valid && jmpInfo.bits(1) 104 def hasRet = jmpInfo.valid && jmpInfo.bits(2) 105 106 def fromPdWb(pdWb: PredecodeWritebackBundle) = { 107 val pds = pdWb.pd 108 this.brMask := VecInit(pds.map(pd => pd.isBr && pd.valid)) 109 this.jmpInfo.valid := VecInit(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid)).asUInt.orR 110 this.jmpInfo.bits := ParallelPriorityMux(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid), 111 pds.map(pd => VecInit(pd.isJalr, pd.isCall, pd.isRet))) 112 this.jmpOffset := ParallelPriorityEncoder(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid)) 113 this.rvcMask := VecInit(pds.map(pd => pd.isRVC)) 114 this.jalTarget := pdWb.jalTarget 115 } 116 117 def toPd(offset: UInt) = { 118 require(offset.getWidth == log2Ceil(PredictWidth)) 119 val pd = Wire(new PreDecodeInfo) 120 pd.valid := true.B 121 pd.isRVC := rvcMask(offset) 122 val isBr = brMask(offset) 123 val isJalr = offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(0) 124 pd.brType := Cat(offset === jmpOffset && jmpInfo.valid, isJalr || isBr) 125 pd.isCall := offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(1) 126 pd.isRet := offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(2) 127 pd 128 } 129} 130 131 132 133class Ftq_Redirect_SRAMEntry(implicit p: Parameters) extends XSBundle with HasBPUConst { 134 val rasSp = UInt(log2Ceil(RasSize).W) 135 val rasEntry = new RASEntry 136 // val specCnt = Vec(numBr, UInt(10.W)) 137 // val ghist = new ShiftingGlobalHistory 138 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 139 val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos) 140 val lastBrNumOH = UInt((numBr+1).W) 141 142 val histPtr = new CGHPtr 143 144 def fromBranchPrediction(resp: BranchPredictionBundle) = { 145 assert(!resp.is_minimal) 146 this.rasSp := resp.rasSp 147 this.rasEntry := resp.rasTop 148 this.folded_hist := resp.folded_hist 149 this.afhob := resp.afhob 150 this.lastBrNumOH := resp.lastBrNumOH 151 this.histPtr := resp.histPtr 152 this 153 } 154} 155 156class Ftq_1R_SRAMEntry(implicit p: Parameters) extends XSBundle with HasBPUConst { 157 val meta = UInt(MaxMetaLength.W) 158} 159 160class Ftq_Pred_Info(implicit p: Parameters) extends XSBundle { 161 val target = UInt(VAddrBits.W) 162 val cfiIndex = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 163} 164 165// class FtqEntry(implicit p: Parameters) extends XSBundle with HasBPUConst { 166// val startAddr = UInt(VAddrBits.W) 167// val fallThruAddr = UInt(VAddrBits.W) 168// val isNextMask = Vec(PredictWidth, Bool()) 169 170// val meta = UInt(MaxMetaLength.W) 171 172// val rasSp = UInt(log2Ceil(RasSize).W) 173// val rasEntry = new RASEntry 174// val hist = new ShiftingGlobalHistory 175// val specCnt = Vec(numBr, UInt(10.W)) 176 177// val valids = Vec(PredictWidth, Bool()) 178// val brMask = Vec(PredictWidth, Bool()) 179// // isJalr, isCall, isRet 180// val jmpInfo = ValidUndirectioned(Vec(3, Bool())) 181// val jmpOffset = UInt(log2Ceil(PredictWidth).W) 182 183// val mispredVec = Vec(PredictWidth, Bool()) 184// val cfiIndex = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 185// val target = UInt(VAddrBits.W) 186// } 187 188class FtqRead[T <: Data](private val gen: T)(implicit p: Parameters) extends XSBundle { 189 val ptr = Output(new FtqPtr) 190 val offset = Output(UInt(log2Ceil(PredictWidth).W)) 191 val data = Input(gen) 192 def apply(ptr: FtqPtr, offset: UInt) = { 193 this.ptr := ptr 194 this.offset := offset 195 this.data 196 } 197} 198 199 200class FtqToBpuIO(implicit p: Parameters) extends XSBundle { 201 val redirect = Valid(new BranchPredictionRedirect) 202 val update = Valid(new BranchPredictionUpdate) 203 val enq_ptr = Output(new FtqPtr) 204} 205 206class FtqToIfuIO(implicit p: Parameters) extends XSBundle with HasCircularQueuePtrHelper { 207 val req = Decoupled(new FetchRequestBundle) 208 val redirect = Valid(new Redirect) 209 val flushFromBpu = new Bundle { 210 // when ifu pipeline is not stalled, 211 // a packet from bpu s3 can reach f1 at most 212 val s2 = Valid(new FtqPtr) 213 val s3 = Valid(new FtqPtr) 214 def shouldFlushBy(src: Valid[FtqPtr], idx_to_flush: FtqPtr) = { 215 src.valid && !isAfter(src.bits, idx_to_flush) 216 } 217 def shouldFlushByStage2(idx: FtqPtr) = shouldFlushBy(s2, idx) 218 def shouldFlushByStage3(idx: FtqPtr) = shouldFlushBy(s3, idx) 219 } 220} 221 222trait HasBackendRedirectInfo extends HasXSParameter { 223 def numRedirectPcRead = exuParameters.JmpCnt + exuParameters.AluCnt + 1 224 def isLoadReplay(r: Valid[Redirect]) = r.bits.flushItself() 225} 226 227class FtqToCtrlIO(implicit p: Parameters) extends XSBundle with HasBackendRedirectInfo { 228 val pc_reads = Vec(1 + numRedirectPcRead + 1 + 1, Flipped(new FtqRead(UInt(VAddrBits.W)))) 229 val target_read = Flipped(new FtqRead(UInt(VAddrBits.W))) 230 val redirect_s1_real_pc = Output(UInt(VAddrBits.W)) 231 def getJumpPcRead = pc_reads.head 232 def getRedirectPcRead = VecInit(pc_reads.tail.dropRight(2)) 233 def getRedirectPcReadData = pc_reads.tail.dropRight(2).map(_.data) 234 def getMemPredPcRead = pc_reads.init.last 235 def getRobFlushPcRead = pc_reads.last 236} 237 238 239class FTBEntryGen(implicit p: Parameters) extends XSModule with HasBackendRedirectInfo with HasBPUParameter { 240 val io = IO(new Bundle { 241 val start_addr = Input(UInt(VAddrBits.W)) 242 val old_entry = Input(new FTBEntry) 243 val pd = Input(new Ftq_pd_Entry) 244 val cfiIndex = Flipped(Valid(UInt(log2Ceil(PredictWidth).W))) 245 val target = Input(UInt(VAddrBits.W)) 246 val hit = Input(Bool()) 247 val mispredict_vec = Input(Vec(PredictWidth, Bool())) 248 249 val new_entry = Output(new FTBEntry) 250 val new_br_insert_pos = Output(Vec(numBr, Bool())) 251 val taken_mask = Output(Vec(numBr, Bool())) 252 val mispred_mask = Output(Vec(numBr+1, Bool())) 253 254 // for perf counters 255 val is_init_entry = Output(Bool()) 256 val is_old_entry = Output(Bool()) 257 val is_new_br = Output(Bool()) 258 val is_jalr_target_modified = Output(Bool()) 259 val is_always_taken_modified = Output(Bool()) 260 val is_br_full = Output(Bool()) 261 }) 262 263 // no mispredictions detected at predecode 264 val hit = io.hit 265 val pd = io.pd 266 267 val init_entry = WireInit(0.U.asTypeOf(new FTBEntry)) 268 269 270 val cfi_is_br = pd.brMask(io.cfiIndex.bits) && io.cfiIndex.valid 271 val entry_has_jmp = pd.jmpInfo.valid 272 val new_jmp_is_jal = entry_has_jmp && !pd.jmpInfo.bits(0) && io.cfiIndex.valid 273 val new_jmp_is_jalr = entry_has_jmp && pd.jmpInfo.bits(0) && io.cfiIndex.valid 274 val new_jmp_is_call = entry_has_jmp && pd.jmpInfo.bits(1) && io.cfiIndex.valid 275 val new_jmp_is_ret = entry_has_jmp && pd.jmpInfo.bits(2) && io.cfiIndex.valid 276 val last_jmp_rvi = entry_has_jmp && pd.jmpOffset === (PredictWidth-1).U && !pd.rvcMask.last 277 // val last_br_rvi = cfi_is_br && io.cfiIndex.bits === (PredictWidth-1).U && !pd.rvcMask.last 278 279 val cfi_is_jal = io.cfiIndex.bits === pd.jmpOffset && new_jmp_is_jal 280 val cfi_is_jalr = io.cfiIndex.bits === pd.jmpOffset && new_jmp_is_jalr 281 282 def carryPos = log2Ceil(PredictWidth)+instOffsetBits 283 def getLower(pc: UInt) = pc(carryPos-1, instOffsetBits) 284 // if not hit, establish a new entry 285 init_entry.valid := true.B 286 // tag is left for ftb to assign 287 288 // case br 289 val init_br_slot = init_entry.getSlotForBr(0) 290 when (cfi_is_br) { 291 init_br_slot.valid := true.B 292 init_br_slot.offset := io.cfiIndex.bits 293 init_br_slot.setLowerStatByTarget(io.start_addr, io.target, numBr == 1) 294 init_entry.always_taken(0) := true.B // set to always taken on init 295 } 296 297 // case jmp 298 when (entry_has_jmp) { 299 init_entry.tailSlot.offset := pd.jmpOffset 300 init_entry.tailSlot.valid := new_jmp_is_jal || new_jmp_is_jalr 301 init_entry.tailSlot.setLowerStatByTarget(io.start_addr, Mux(cfi_is_jalr, io.target, pd.jalTarget), isShare=false) 302 } 303 304 val jmpPft = getLower(io.start_addr) +& pd.jmpOffset +& Mux(pd.rvcMask(pd.jmpOffset), 1.U, 2.U) 305 init_entry.pftAddr := Mux(entry_has_jmp && !last_jmp_rvi, jmpPft, getLower(io.start_addr)) 306 init_entry.carry := Mux(entry_has_jmp && !last_jmp_rvi, jmpPft(carryPos-instOffsetBits), true.B) 307 init_entry.isJalr := new_jmp_is_jalr 308 init_entry.isCall := new_jmp_is_call 309 init_entry.isRet := new_jmp_is_ret 310 // that means fall thru points to the middle of an inst 311 init_entry.last_may_be_rvi_call := io.cfiIndex.bits === (PredictWidth-1).U && !pd.rvcMask(pd.jmpOffset) 312 313 // if hit, check whether a new cfi(only br is possible) is detected 314 val oe = io.old_entry 315 val br_recorded_vec = oe.getBrRecordedVec(io.cfiIndex.bits) 316 val br_recorded = br_recorded_vec.asUInt.orR 317 val is_new_br = cfi_is_br && !br_recorded 318 val new_br_offset = io.cfiIndex.bits 319 // vec(i) means new br will be inserted BEFORE old br(i) 320 val allBrSlotsVec = oe.allSlotsForBr 321 val new_br_insert_onehot = VecInit((0 until numBr).map{ 322 i => i match { 323 case 0 => 324 !allBrSlotsVec(0).valid || new_br_offset < allBrSlotsVec(0).offset 325 case idx => 326 allBrSlotsVec(idx-1).valid && new_br_offset > allBrSlotsVec(idx-1).offset && 327 (!allBrSlotsVec(idx).valid || new_br_offset < allBrSlotsVec(idx).offset) 328 } 329 }) 330 331 val old_entry_modified = WireInit(io.old_entry) 332 for (i <- 0 until numBr) { 333 val slot = old_entry_modified.allSlotsForBr(i) 334 when (new_br_insert_onehot(i)) { 335 slot.valid := true.B 336 slot.offset := new_br_offset 337 slot.setLowerStatByTarget(io.start_addr, io.target, i == numBr-1) 338 old_entry_modified.always_taken(i) := true.B 339 }.elsewhen (new_br_offset > oe.allSlotsForBr(i).offset) { 340 old_entry_modified.always_taken(i) := false.B 341 // all other fields remain unchanged 342 }.otherwise { 343 // case i == 0, remain unchanged 344 if (i != 0) { 345 val noNeedToMoveFromFormerSlot = (i == numBr-1).B && !oe.brSlots.last.valid 346 when (!noNeedToMoveFromFormerSlot) { 347 slot.fromAnotherSlot(oe.allSlotsForBr(i-1)) 348 old_entry_modified.always_taken(i) := oe.always_taken(i) 349 } 350 } 351 } 352 } 353 354 // two circumstances: 355 // 1. oe: | br | j |, new br should be in front of j, thus addr of j should be new pft 356 // 2. oe: | br | br |, new br could be anywhere between, thus new pft is the addr of either 357 // the previous last br or the new br 358 val may_have_to_replace = oe.noEmptySlotForNewBr 359 val pft_need_to_change = is_new_br && may_have_to_replace 360 // it should either be the given last br or the new br 361 when (pft_need_to_change) { 362 val new_pft_offset = 363 Mux(!new_br_insert_onehot.asUInt.orR, 364 new_br_offset, oe.allSlotsForBr.last.offset) 365 366 // set jmp to invalid 367 old_entry_modified.pftAddr := getLower(io.start_addr) + new_pft_offset 368 old_entry_modified.carry := (getLower(io.start_addr) +& new_pft_offset).head(1).asBool 369 old_entry_modified.last_may_be_rvi_call := false.B 370 old_entry_modified.isCall := false.B 371 old_entry_modified.isRet := false.B 372 old_entry_modified.isJalr := false.B 373 } 374 375 val old_entry_jmp_target_modified = WireInit(oe) 376 val old_target = oe.tailSlot.getTarget(io.start_addr) // may be wrong because we store only 20 lowest bits 377 val old_tail_is_jmp = !oe.tailSlot.sharing 378 val jalr_target_modified = cfi_is_jalr && (old_target =/= io.target) && old_tail_is_jmp // TODO: pass full jalr target 379 when (jalr_target_modified) { 380 old_entry_jmp_target_modified.setByJmpTarget(io.start_addr, io.target) 381 old_entry_jmp_target_modified.always_taken := 0.U.asTypeOf(Vec(numBr, Bool())) 382 } 383 384 val old_entry_always_taken = WireInit(oe) 385 val always_taken_modified_vec = Wire(Vec(numBr, Bool())) // whether modified or not 386 for (i <- 0 until numBr) { 387 old_entry_always_taken.always_taken(i) := 388 oe.always_taken(i) && io.cfiIndex.valid && oe.brValids(i) && io.cfiIndex.bits === oe.brOffset(i) 389 always_taken_modified_vec(i) := oe.always_taken(i) && !old_entry_always_taken.always_taken(i) 390 } 391 val always_taken_modified = always_taken_modified_vec.reduce(_||_) 392 393 394 395 val derived_from_old_entry = 396 Mux(is_new_br, old_entry_modified, 397 Mux(jalr_target_modified, old_entry_jmp_target_modified, old_entry_always_taken)) 398 399 400 io.new_entry := Mux(!hit, init_entry, derived_from_old_entry) 401 402 io.new_br_insert_pos := new_br_insert_onehot 403 io.taken_mask := VecInit((io.new_entry.brOffset zip io.new_entry.brValids).map{ 404 case (off, v) => io.cfiIndex.bits === off && io.cfiIndex.valid && v 405 }) 406 for (i <- 0 until numBr) { 407 io.mispred_mask(i) := io.new_entry.brValids(i) && io.mispredict_vec(io.new_entry.brOffset(i)) 408 } 409 io.mispred_mask.last := io.new_entry.jmpValid && io.mispredict_vec(pd.jmpOffset) 410 411 // for perf counters 412 io.is_init_entry := !hit 413 io.is_old_entry := hit && !is_new_br && !jalr_target_modified && !always_taken_modified 414 io.is_new_br := hit && is_new_br 415 io.is_jalr_target_modified := hit && jalr_target_modified 416 io.is_always_taken_modified := hit && always_taken_modified 417 io.is_br_full := hit && is_new_br && may_have_to_replace 418} 419 420class Ftq(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper 421 with HasBackendRedirectInfo with BPUUtils with HasBPUConst with HasPerfEvents 422 with HasICacheParameters{ 423 val io = IO(new Bundle { 424 val fromBpu = Flipped(new BpuToFtqIO) 425 val fromIfu = Flipped(new IfuToFtqIO) 426 val fromBackend = Flipped(new CtrlToFtqIO) 427 428 val toBpu = new FtqToBpuIO 429 val toIfu = new FtqToIfuIO 430 val toBackend = new FtqToCtrlIO 431 432 val toPrefetch = new FtqPrefechBundle 433 434 val bpuInfo = new Bundle { 435 val bpRight = Output(UInt(XLEN.W)) 436 val bpWrong = Output(UInt(XLEN.W)) 437 } 438 }) 439 io.bpuInfo := DontCare 440 441 val backendRedirect = Wire(Valid(new Redirect)) 442 val backendRedirectReg = RegNext(backendRedirect) 443 444 val stage2Flush = backendRedirect.valid 445 val backendFlush = stage2Flush || RegNext(stage2Flush) 446 val ifuFlush = Wire(Bool()) 447 448 val flush = stage2Flush || RegNext(stage2Flush) 449 450 val allowBpuIn, allowToIfu = WireInit(false.B) 451 val flushToIfu = !allowToIfu 452 allowBpuIn := !ifuFlush && !backendRedirect.valid && !backendRedirectReg.valid 453 allowToIfu := !ifuFlush && !backendRedirect.valid && !backendRedirectReg.valid 454 455 val bpuPtr, ifuPtr, ifuWbPtr, commPtr = RegInit(FtqPtr(false.B, 0.U)) 456 val validEntries = distanceBetween(bpuPtr, commPtr) 457 458 // ********************************************************************** 459 // **************************** enq from bpu **************************** 460 // ********************************************************************** 461 val new_entry_ready = validEntries < FtqSize.U 462 io.fromBpu.resp.ready := new_entry_ready 463 464 val bpu_s2_resp = io.fromBpu.resp.bits.s2 465 val bpu_s3_resp = io.fromBpu.resp.bits.s3 466 val bpu_s2_redirect = bpu_s2_resp.valid && bpu_s2_resp.hasRedirect 467 val bpu_s3_redirect = bpu_s3_resp.valid && bpu_s3_resp.hasRedirect 468 469 io.toBpu.enq_ptr := bpuPtr 470 val enq_fire = io.fromBpu.resp.fire() && allowBpuIn // from bpu s1 471 val bpu_in_fire = (io.fromBpu.resp.fire() || bpu_s2_redirect || bpu_s3_redirect) && allowBpuIn 472 473 val bpu_in_resp = io.fromBpu.resp.bits.selectedResp 474 val bpu_in_stage = io.fromBpu.resp.bits.selectedRespIdx 475 val bpu_in_resp_ptr = Mux(bpu_in_stage === BP_S1, bpuPtr, bpu_in_resp.ftq_idx) 476 val bpu_in_resp_idx = bpu_in_resp_ptr.value 477 478 // read ports: jumpPc + redirects + loadPred + robFlush + ifuReq1 + ifuReq2 + commitUpdate 479 val ftq_pc_mem = Module(new SyncDataModuleTemplate(new Ftq_RF_Components, FtqSize, 1+numRedirectPcRead+2+1+1+1, 1)) 480 // resp from uBTB 481 ftq_pc_mem.io.wen(0) := bpu_in_fire 482 ftq_pc_mem.io.waddr(0) := bpu_in_resp_idx 483 ftq_pc_mem.io.wdata(0).fromBranchPrediction(bpu_in_resp) 484 485 // ifuRedirect + backendRedirect + commit 486 val ftq_redirect_sram = Module(new FtqNRSRAM(new Ftq_Redirect_SRAMEntry, 1+1+1)) 487 // these info is intended to enq at the last stage of bpu 488 ftq_redirect_sram.io.wen := io.fromBpu.resp.bits.lastStage.valid 489 ftq_redirect_sram.io.waddr := io.fromBpu.resp.bits.lastStage.ftq_idx.value 490 ftq_redirect_sram.io.wdata.fromBranchPrediction(io.fromBpu.resp.bits.lastStage) 491 println(f"ftq redirect SRAM: entry ${ftq_redirect_sram.io.wdata.getWidth} * ${FtqSize} * 3") 492 println(f"ftq redirect SRAM: ahead fh ${ftq_redirect_sram.io.wdata.afhob.getWidth} * ${FtqSize} * 3") 493 494 val ftq_meta_1r_sram = Module(new FtqNRSRAM(new Ftq_1R_SRAMEntry, 1)) 495 // these info is intended to enq at the last stage of bpu 496 ftq_meta_1r_sram.io.wen := io.fromBpu.resp.bits.lastStage.valid 497 ftq_meta_1r_sram.io.waddr := io.fromBpu.resp.bits.lastStage.ftq_idx.value 498 ftq_meta_1r_sram.io.wdata.meta := io.fromBpu.resp.bits.meta 499 // ifuRedirect + backendRedirect + commit 500 val ftb_entry_mem = Module(new SyncDataModuleTemplate(new FTBEntry, FtqSize, 1+1+1, 1)) 501 ftb_entry_mem.io.wen(0) := io.fromBpu.resp.bits.lastStage.valid 502 ftb_entry_mem.io.waddr(0) := io.fromBpu.resp.bits.lastStage.ftq_idx.value 503 ftb_entry_mem.io.wdata(0) := io.fromBpu.resp.bits.lastStage.ftb_entry 504 505 506 // multi-write 507 val update_target = Reg(Vec(FtqSize, UInt(VAddrBits.W))) // could be taken target or fallThrough 508 val cfiIndex_vec = Reg(Vec(FtqSize, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))) 509 val mispredict_vec = Reg(Vec(FtqSize, Vec(PredictWidth, Bool()))) 510 val pred_stage = Reg(Vec(FtqSize, UInt(2.W))) 511 512 val c_invalid :: c_valid :: c_commited :: Nil = Enum(3) 513 val commitStateQueue = RegInit(VecInit(Seq.fill(FtqSize) { 514 VecInit(Seq.fill(PredictWidth)(c_invalid)) 515 })) 516 517 val f_to_send :: f_sent :: Nil = Enum(2) 518 val entry_fetch_status = RegInit(VecInit(Seq.fill(FtqSize)(f_sent))) 519 520 val h_not_hit :: h_false_hit :: h_hit :: Nil = Enum(3) 521 val entry_hit_status = RegInit(VecInit(Seq.fill(FtqSize)(h_not_hit))) 522 523 524 when (bpu_in_fire) { 525 entry_fetch_status(bpu_in_resp_idx) := f_to_send 526 commitStateQueue(bpu_in_resp_idx) := VecInit(Seq.fill(PredictWidth)(c_invalid)) 527 cfiIndex_vec(bpu_in_resp_idx) := bpu_in_resp.cfiIndex 528 mispredict_vec(bpu_in_resp_idx) := WireInit(VecInit(Seq.fill(PredictWidth)(false.B))) 529 update_target(bpu_in_resp_idx) := bpu_in_resp.getTarget 530 pred_stage(bpu_in_resp_idx) := bpu_in_stage 531 } 532 533 bpuPtr := bpuPtr + enq_fire 534 ifuPtr := ifuPtr + (io.toIfu.req.fire && allowToIfu) 535 536 // only use ftb result to assign hit status 537 when (bpu_s2_resp.valid) { 538 entry_hit_status(bpu_s2_resp.ftq_idx.value) := Mux(bpu_s2_resp.full_pred.hit, h_hit, h_not_hit) 539 } 540 541 542 io.toIfu.flushFromBpu.s2.valid := bpu_s2_redirect 543 io.toIfu.flushFromBpu.s2.bits := bpu_s2_resp.ftq_idx 544 when (bpu_s2_resp.valid && bpu_s2_resp.hasRedirect) { 545 bpuPtr := bpu_s2_resp.ftq_idx + 1.U 546 // only when ifuPtr runs ahead of bpu s2 resp should we recover it 547 when (!isBefore(ifuPtr, bpu_s2_resp.ftq_idx)) { 548 ifuPtr := bpu_s2_resp.ftq_idx 549 } 550 } 551 552 io.toIfu.flushFromBpu.s3.valid := bpu_s3_redirect 553 io.toIfu.flushFromBpu.s3.bits := bpu_s3_resp.ftq_idx 554 when (bpu_s3_resp.valid && bpu_s3_resp.hasRedirect) { 555 bpuPtr := bpu_s3_resp.ftq_idx + 1.U 556 // only when ifuPtr runs ahead of bpu s2 resp should we recover it 557 when (!isBefore(ifuPtr, bpu_s3_resp.ftq_idx)) { 558 ifuPtr := bpu_s3_resp.ftq_idx 559 } 560 } 561 562 XSError(isBefore(bpuPtr, ifuPtr) && !isFull(bpuPtr, ifuPtr), "\nifuPtr is before bpuPtr!\n") 563 564 // **************************************************************** 565 // **************************** to ifu **************************** 566 // **************************************************************** 567 val bpu_in_bypass_buf = RegEnable(ftq_pc_mem.io.wdata(0), enable=bpu_in_fire) 568 val bpu_in_bypass_ptr = RegNext(bpu_in_resp_ptr) 569 val last_cycle_bpu_in = RegNext(bpu_in_fire) 570 val last_cycle_to_ifu_fire = RegNext(io.toIfu.req.fire) 571 572 // read pc and target 573 ftq_pc_mem.io.raddr.init.init.last := ifuPtr.value 574 ftq_pc_mem.io.raddr.init.last := (ifuPtr+1.U).value 575 576 io.toIfu.req.bits.ftqIdx := ifuPtr 577 io.toIfu.req.bits.nextStartAddr := update_target(ifuPtr.value) 578 io.toIfu.req.bits.ftqOffset := cfiIndex_vec(ifuPtr.value) 579 580 val toIfuPcBundle = Wire(new Ftq_RF_Components) 581 val entry_is_to_send = WireInit(false.B) 582 583 when (last_cycle_bpu_in && bpu_in_bypass_ptr === ifuPtr) { 584 toIfuPcBundle := bpu_in_bypass_buf 585 entry_is_to_send := true.B 586 }.elsewhen (last_cycle_to_ifu_fire) { 587 toIfuPcBundle := ftq_pc_mem.io.rdata.init.last 588 entry_is_to_send := RegNext(entry_fetch_status((ifuPtr+1.U).value) === f_to_send) 589 }.otherwise { 590 toIfuPcBundle := ftq_pc_mem.io.rdata.init.init.last 591 entry_is_to_send := RegNext(entry_fetch_status(ifuPtr.value) === f_to_send) 592 } 593 594 io.toIfu.req.valid := entry_is_to_send && ifuPtr =/= bpuPtr 595 io.toIfu.req.bits.fromFtqPcBundle(toIfuPcBundle) 596 597 // when fall through is smaller in value than start address, there must be a false hit 598 when (toIfuPcBundle.fallThruError && entry_hit_status(ifuPtr.value) === h_hit) { 599 when (io.toIfu.req.fire && 600 !(bpu_s2_redirect && bpu_s2_resp.ftq_idx === ifuPtr) && 601 !(bpu_s3_redirect && bpu_s3_resp.ftq_idx === ifuPtr) 602 ) { 603 entry_hit_status(ifuPtr.value) := h_false_hit 604 // XSError(true.B, "FTB false hit by fallThroughError, startAddr: %x, fallTHru: %x\n", io.toIfu.req.bits.startAddr, io.toIfu.req.bits.nextStartAddr) 605 } 606 XSDebug(true.B, "fallThruError! start:%x, fallThru:%x\n", io.toIfu.req.bits.startAddr, io.toIfu.req.bits.nextStartAddr) 607 } 608 609 XSPerfAccumulate(f"fall_through_error_to_ifu", toIfuPcBundle.fallThruError && entry_hit_status(ifuPtr.value) === h_hit && 610 io.toIfu.req.fire && !(bpu_s2_redirect && bpu_s2_resp.ftq_idx === ifuPtr) && !(bpu_s3_redirect && bpu_s3_resp.ftq_idx === ifuPtr)) 611 612 val ifu_req_should_be_flushed = 613 io.toIfu.flushFromBpu.shouldFlushByStage2(io.toIfu.req.bits.ftqIdx) || 614 io.toIfu.flushFromBpu.shouldFlushByStage3(io.toIfu.req.bits.ftqIdx) 615 616 when (io.toIfu.req.fire && !ifu_req_should_be_flushed) { 617 entry_fetch_status(ifuPtr.value) := f_sent 618 } 619 620 // ********************************************************************* 621 // **************************** wb from ifu **************************** 622 // ********************************************************************* 623 val pdWb = io.fromIfu.pdWb 624 val pds = pdWb.bits.pd 625 val ifu_wb_valid = pdWb.valid 626 val ifu_wb_idx = pdWb.bits.ftqIdx.value 627 // read ports: commit update 628 val ftq_pd_mem = Module(new SyncDataModuleTemplate(new Ftq_pd_Entry, FtqSize, 1, 1)) 629 ftq_pd_mem.io.wen(0) := ifu_wb_valid 630 ftq_pd_mem.io.waddr(0) := pdWb.bits.ftqIdx.value 631 ftq_pd_mem.io.wdata(0).fromPdWb(pdWb.bits) 632 633 val hit_pd_valid = entry_hit_status(ifu_wb_idx) === h_hit && ifu_wb_valid 634 val hit_pd_mispred = hit_pd_valid && pdWb.bits.misOffset.valid 635 val hit_pd_mispred_reg = RegNext(hit_pd_mispred, init=false.B) 636 val pd_reg = RegEnable(pds, enable = pdWb.valid) 637 val start_pc_reg = RegEnable(pdWb.bits.pc(0), enable = pdWb.valid) 638 val wb_idx_reg = RegEnable(ifu_wb_idx, enable = pdWb.valid) 639 640 when (ifu_wb_valid) { 641 val comm_stq_wen = VecInit(pds.map(_.valid).zip(pdWb.bits.instrRange).map{ 642 case (v, inRange) => v && inRange 643 }) 644 (commitStateQueue(ifu_wb_idx) zip comm_stq_wen).map{ 645 case (qe, v) => when (v) { qe := c_valid } 646 } 647 } 648 649 ifuWbPtr := ifuWbPtr + ifu_wb_valid 650 651 ftb_entry_mem.io.raddr.head := ifu_wb_idx 652 val has_false_hit = WireInit(false.B) 653 when (RegNext(hit_pd_valid)) { 654 // check for false hit 655 val pred_ftb_entry = ftb_entry_mem.io.rdata.head 656 val brSlots = pred_ftb_entry.brSlots 657 val tailSlot = pred_ftb_entry.tailSlot 658 // we check cfis that bpu predicted 659 660 // bpu predicted branches but denied by predecode 661 val br_false_hit = 662 brSlots.map{ 663 s => s.valid && !(pd_reg(s.offset).valid && pd_reg(s.offset).isBr) 664 }.reduce(_||_) || 665 (tailSlot.valid && pred_ftb_entry.tailSlot.sharing && 666 !(pd_reg(tailSlot.offset).valid && pd_reg(tailSlot.offset).isBr)) 667 668 val jmpOffset = tailSlot.offset 669 val jmp_pd = pd_reg(jmpOffset) 670 val jal_false_hit = pred_ftb_entry.jmpValid && 671 ((pred_ftb_entry.isJal && !(jmp_pd.valid && jmp_pd.isJal)) || 672 (pred_ftb_entry.isJalr && !(jmp_pd.valid && jmp_pd.isJalr)) || 673 (pred_ftb_entry.isCall && !(jmp_pd.valid && jmp_pd.isCall)) || 674 (pred_ftb_entry.isRet && !(jmp_pd.valid && jmp_pd.isRet)) 675 ) 676 677 has_false_hit := br_false_hit || jal_false_hit || hit_pd_mispred_reg 678 XSDebug(has_false_hit, "FTB false hit by br or jal or hit_pd, startAddr: %x\n", pdWb.bits.pc(0)) 679 680 // assert(!has_false_hit) 681 } 682 683 when (has_false_hit) { 684 entry_hit_status(wb_idx_reg) := h_false_hit 685 } 686 687 688 // ********************************************************************** 689 // **************************** backend read **************************** 690 // ********************************************************************** 691 692 // pc reads 693 for ((req, i) <- io.toBackend.pc_reads.zipWithIndex) { 694 ftq_pc_mem.io.raddr(i) := req.ptr.value 695 req.data := ftq_pc_mem.io.rdata(i).getPc(RegNext(req.offset)) 696 } 697 // target read 698 io.toBackend.target_read.data := RegNext(update_target(io.toBackend.target_read.ptr.value)) 699 700 // ******************************************************************************* 701 // **************************** redirect from backend **************************** 702 // ******************************************************************************* 703 704 // redirect read cfiInfo, couples to redirectGen s2 705 ftq_redirect_sram.io.ren.init.last := backendRedirect.valid 706 ftq_redirect_sram.io.raddr.init.last := backendRedirect.bits.ftqIdx.value 707 708 ftb_entry_mem.io.raddr.init.last := backendRedirect.bits.ftqIdx.value 709 710 val stage3CfiInfo = ftq_redirect_sram.io.rdata.init.last 711 val fromBackendRedirect = WireInit(backendRedirectReg) 712 val backendRedirectCfi = fromBackendRedirect.bits.cfiUpdate 713 backendRedirectCfi.fromFtqRedirectSram(stage3CfiInfo) 714 715 val r_ftb_entry = ftb_entry_mem.io.rdata.init.last 716 val r_ftqOffset = fromBackendRedirect.bits.ftqOffset 717 718 when (entry_hit_status(fromBackendRedirect.bits.ftqIdx.value) === h_hit) { 719 backendRedirectCfi.shift := PopCount(r_ftb_entry.getBrMaskByOffset(r_ftqOffset)) +& 720 (backendRedirectCfi.pd.isBr && !r_ftb_entry.brIsSaved(r_ftqOffset) && 721 !r_ftb_entry.newBrCanNotInsert(r_ftqOffset)) 722 723 backendRedirectCfi.addIntoHist := backendRedirectCfi.pd.isBr && (r_ftb_entry.brIsSaved(r_ftqOffset) || 724 !r_ftb_entry.newBrCanNotInsert(r_ftqOffset)) 725 }.otherwise { 726 backendRedirectCfi.shift := (backendRedirectCfi.pd.isBr && backendRedirectCfi.taken).asUInt 727 backendRedirectCfi.addIntoHist := backendRedirectCfi.pd.isBr.asUInt 728 } 729 730 731 // *************************************************************************** 732 // **************************** redirect from ifu **************************** 733 // *************************************************************************** 734 val fromIfuRedirect = WireInit(0.U.asTypeOf(Valid(new Redirect))) 735 fromIfuRedirect.valid := pdWb.valid && pdWb.bits.misOffset.valid && !backendFlush 736 fromIfuRedirect.bits.ftqIdx := pdWb.bits.ftqIdx 737 fromIfuRedirect.bits.ftqOffset := pdWb.bits.misOffset.bits 738 fromIfuRedirect.bits.level := RedirectLevel.flushAfter 739 740 val ifuRedirectCfiUpdate = fromIfuRedirect.bits.cfiUpdate 741 ifuRedirectCfiUpdate.pc := pdWb.bits.pc(pdWb.bits.misOffset.bits) 742 ifuRedirectCfiUpdate.pd := pdWb.bits.pd(pdWb.bits.misOffset.bits) 743 ifuRedirectCfiUpdate.predTaken := cfiIndex_vec(pdWb.bits.ftqIdx.value).valid 744 ifuRedirectCfiUpdate.target := pdWb.bits.target 745 ifuRedirectCfiUpdate.taken := pdWb.bits.cfiOffset.valid 746 ifuRedirectCfiUpdate.isMisPred := pdWb.bits.misOffset.valid 747 748 val ifuRedirectReg = RegNext(fromIfuRedirect, init=0.U.asTypeOf(Valid(new Redirect))) 749 val ifuRedirectToBpu = WireInit(ifuRedirectReg) 750 ifuFlush := fromIfuRedirect.valid || ifuRedirectToBpu.valid 751 752 ftq_redirect_sram.io.ren.head := fromIfuRedirect.valid 753 ftq_redirect_sram.io.raddr.head := fromIfuRedirect.bits.ftqIdx.value 754 755 ftb_entry_mem.io.raddr.head := fromIfuRedirect.bits.ftqIdx.value 756 757 val toBpuCfi = ifuRedirectToBpu.bits.cfiUpdate 758 toBpuCfi.fromFtqRedirectSram(ftq_redirect_sram.io.rdata.head) 759 when (ifuRedirectReg.bits.cfiUpdate.pd.isRet) { 760 toBpuCfi.target := toBpuCfi.rasEntry.retAddr 761 } 762 763 // ********************************************************************* 764 // **************************** wb from exu **************************** 765 // ********************************************************************* 766 767 class RedirectGen(implicit p: Parameters) extends XSModule 768 with HasCircularQueuePtrHelper { 769 val io = IO(new Bundle { 770 val in = Flipped((new CtrlToFtqIO).for_redirect_gen) 771 val stage1Pc = Input(Vec(numRedirectPcRead, UInt(VAddrBits.W))) 772 val out = Valid(new Redirect) 773 val s1_real_pc = Output(UInt(VAddrBits.W)) 774 val debug_diff = Flipped(Valid(new Redirect)) 775 }) 776 val s1_jumpTarget = io.in.s1_jumpTarget 777 val s1_uop = io.in.s1_oldest_exu_output.bits.uop 778 val s1_imm12_reg = s1_uop.ctrl.imm(11,0) 779 val s1_pd = s1_uop.cf.pd 780 val s1_isReplay = io.in.s1_redirect_onehot.last 781 val s1_isJump = io.in.s1_redirect_onehot.head 782 val real_pc = Mux1H(io.in.s1_redirect_onehot, io.stage1Pc) 783 val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s1_imm12_reg), XLEN) 784 val snpc = real_pc + Mux(s1_pd.isRVC, 2.U, 4.U) 785 val target = Mux(s1_isReplay, 786 real_pc, 787 Mux(io.in.s1_oldest_redirect.bits.cfiUpdate.taken, 788 Mux(s1_isJump, io.in.s1_jumpTarget, brTarget), 789 snpc 790 ) 791 ) 792 793 val redirectGenRes = WireInit(io.in.rawRedirect) 794 redirectGenRes.bits.cfiUpdate.pc := real_pc 795 redirectGenRes.bits.cfiUpdate.pd := s1_pd 796 redirectGenRes.bits.cfiUpdate.target := target 797 798 val realRedirect = Wire(Valid(new Redirect)) 799 realRedirect.valid := redirectGenRes.valid || io.in.flushRedirect.valid 800 realRedirect.bits := Mux(io.in.flushRedirect.valid, io.in.flushRedirect.bits, redirectGenRes.bits) 801 802 when (io.in.flushRedirect.valid) { 803 realRedirect.bits.level := RedirectLevel.flush 804 realRedirect.bits.cfiUpdate.target := io.in.frontendFlushTarget 805 } 806 807 io.out := realRedirect 808 io.s1_real_pc := real_pc 809 XSError((io.debug_diff.valid || realRedirect.valid) && io.debug_diff.asUInt =/= io.out.asUInt, "redirect wrong") 810 811 } 812 813 val redirectGen = Module(new RedirectGen) 814 redirectGen.io.in <> io.fromBackend.for_redirect_gen 815 redirectGen.io.stage1Pc := io.toBackend.getRedirectPcReadData 816 redirectGen.io.debug_diff := io.fromBackend.redirect 817 backendRedirect := redirectGen.io.out 818 819 io.toBackend.redirect_s1_real_pc := redirectGen.io.s1_real_pc 820 821 def extractRedirectInfo(wb: Valid[Redirect]) = { 822 val ftqIdx = wb.bits.ftqIdx.value 823 val ftqOffset = wb.bits.ftqOffset 824 val taken = wb.bits.cfiUpdate.taken 825 val mispred = wb.bits.cfiUpdate.isMisPred 826 (wb.valid, ftqIdx, ftqOffset, taken, mispred) 827 } 828 829 // fix mispredict entry 830 val lastIsMispredict = RegNext( 831 backendRedirect.valid && backendRedirect.bits.level === RedirectLevel.flushAfter, init = false.B 832 ) 833 834 def updateCfiInfo(redirect: Valid[Redirect], isBackend: Boolean = true) = { 835 val (r_valid, r_idx, r_offset, r_taken, r_mispred) = extractRedirectInfo(redirect) 836 val cfiIndex_bits_wen = r_valid && r_taken && r_offset < cfiIndex_vec(r_idx).bits 837 val cfiIndex_valid_wen = r_valid && r_offset === cfiIndex_vec(r_idx).bits 838 when (cfiIndex_bits_wen || cfiIndex_valid_wen) { 839 cfiIndex_vec(r_idx).valid := cfiIndex_bits_wen || cfiIndex_valid_wen && r_taken 840 } 841 when (cfiIndex_bits_wen) { 842 cfiIndex_vec(r_idx).bits := r_offset 843 } 844 update_target(r_idx) := redirect.bits.cfiUpdate.target 845 if (isBackend) { 846 mispredict_vec(r_idx)(r_offset) := r_mispred 847 } 848 } 849 850 when(backendRedirectReg.valid && lastIsMispredict) { 851 updateCfiInfo(backendRedirectReg) 852 }.elsewhen (ifuRedirectToBpu.valid) { 853 updateCfiInfo(ifuRedirectToBpu, isBackend=false) 854 } 855 856 // *********************************************************************************** 857 // **************************** flush ptr and state queue **************************** 858 // *********************************************************************************** 859 860 val redirectVec = VecInit(backendRedirect, fromIfuRedirect) 861 862 // when redirect, we should reset ptrs and status queues 863 when(redirectVec.map(r => r.valid).reduce(_||_)){ 864 val r = PriorityMux(redirectVec.map(r => (r.valid -> r.bits))) 865 val notIfu = redirectVec.dropRight(1).map(r => r.valid).reduce(_||_) 866 val (idx, offset, flushItSelf) = (r.ftqIdx, r.ftqOffset, RedirectLevel.flushItself(r.level)) 867 val next = idx + 1.U 868 bpuPtr := next 869 ifuPtr := next 870 ifuWbPtr := next 871 when (notIfu) { 872 commitStateQueue(idx.value).zipWithIndex.foreach({ case (s, i) => 873 when(i.U > offset || i.U === offset && flushItSelf){ 874 s := c_invalid 875 } 876 }) 877 } 878 } 879 880 // only the valid bit is actually needed 881 io.toIfu.redirect.bits := backendRedirect.bits 882 io.toIfu.redirect.valid := stage2Flush 883 884 // commit 885 for (c <- io.fromBackend.rob_commits) { 886 when(c.valid) { 887 commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset) := c_commited 888 // TODO: remove this 889 // For instruction fusions, we also update the next instruction 890 when (c.bits.commitType === 4.U) { 891 commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset + 1.U) := c_commited 892 }.elsewhen(c.bits.commitType === 5.U) { 893 commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset + 2.U) := c_commited 894 }.elsewhen(c.bits.commitType === 6.U) { 895 val index = (c.bits.ftqIdx + 1.U).value 896 commitStateQueue(index)(0) := c_commited 897 }.elsewhen(c.bits.commitType === 7.U) { 898 val index = (c.bits.ftqIdx + 1.U).value 899 commitStateQueue(index)(1) := c_commited 900 } 901 } 902 } 903 904 // **************************************************************** 905 // **************************** to bpu **************************** 906 // **************************************************************** 907 908 io.toBpu.redirect <> Mux(fromBackendRedirect.valid, fromBackendRedirect, ifuRedirectToBpu) 909 910 val may_have_stall_from_bpu = RegInit(false.B) 911 val canCommit = commPtr =/= ifuWbPtr && !may_have_stall_from_bpu && 912 Cat(commitStateQueue(commPtr.value).map(s => { 913 s === c_invalid || s === c_commited 914 })).andR() 915 916 // commit reads 917 ftq_pc_mem.io.raddr.last := commPtr.value 918 val commit_pc_bundle = ftq_pc_mem.io.rdata.last 919 ftq_pd_mem.io.raddr.last := commPtr.value 920 val commit_pd = ftq_pd_mem.io.rdata.last 921 ftq_redirect_sram.io.ren.last := canCommit 922 ftq_redirect_sram.io.raddr.last := commPtr.value 923 val commit_spec_meta = ftq_redirect_sram.io.rdata.last 924 ftq_meta_1r_sram.io.ren(0) := canCommit 925 ftq_meta_1r_sram.io.raddr(0) := commPtr.value 926 val commit_meta = ftq_meta_1r_sram.io.rdata(0) 927 ftb_entry_mem.io.raddr.last := commPtr.value 928 val commit_ftb_entry = ftb_entry_mem.io.rdata.last 929 930 // need one cycle to read mem and srams 931 val do_commit_ptr = RegNext(commPtr) 932 val do_commit = RegNext(canCommit, init=false.B) 933 when (canCommit) { commPtr := commPtr + 1.U } 934 val commit_state = RegNext(commitStateQueue(commPtr.value)) 935 val can_commit_cfi = WireInit(cfiIndex_vec(commPtr.value)) 936 when (commitStateQueue(commPtr.value)(can_commit_cfi.bits) =/= c_commited) { 937 can_commit_cfi.valid := false.B 938 } 939 val commit_cfi = RegNext(can_commit_cfi) 940 941 val commit_mispredict = VecInit((RegNext(mispredict_vec(commPtr.value)) zip commit_state).map { 942 case (mis, state) => mis && state === c_commited 943 }) 944 val can_commit_hit = entry_hit_status(commPtr.value) 945 val commit_hit = RegNext(can_commit_hit) 946 val commit_target = RegNext(update_target(commPtr.value)) 947 val commit_stage = RegNext(pred_stage(commPtr.value)) 948 val commit_valid = commit_hit === h_hit || commit_cfi.valid // hit or taken 949 950 val to_bpu_hit = can_commit_hit === h_hit || can_commit_hit === h_false_hit 951 may_have_stall_from_bpu := can_commit_cfi.valid && !to_bpu_hit && !may_have_stall_from_bpu 952 953 io.toBpu.update := DontCare 954 io.toBpu.update.valid := commit_valid && do_commit 955 val update = io.toBpu.update.bits 956 update.false_hit := commit_hit === h_false_hit 957 update.pc := commit_pc_bundle.startAddr 958 update.meta := commit_meta.meta 959 update.full_target := commit_target 960 update.from_stage := commit_stage 961 update.fromFtqRedirectSram(commit_spec_meta) 962 963 val commit_real_hit = commit_hit === h_hit 964 val update_ftb_entry = update.ftb_entry 965 966 val ftbEntryGen = Module(new FTBEntryGen).io 967 ftbEntryGen.start_addr := commit_pc_bundle.startAddr 968 ftbEntryGen.old_entry := commit_ftb_entry 969 ftbEntryGen.pd := commit_pd 970 ftbEntryGen.cfiIndex := commit_cfi 971 ftbEntryGen.target := commit_target 972 ftbEntryGen.hit := commit_real_hit 973 ftbEntryGen.mispredict_vec := commit_mispredict 974 975 update_ftb_entry := ftbEntryGen.new_entry 976 update.new_br_insert_pos := ftbEntryGen.new_br_insert_pos 977 update.mispred_mask := ftbEntryGen.mispred_mask 978 update.old_entry := ftbEntryGen.is_old_entry 979 update.pred_hit := commit_hit === h_hit || commit_hit === h_false_hit 980 981 update.is_minimal := false.B 982 update.full_pred.fromFtbEntry(ftbEntryGen.new_entry, update.pc) 983 update.full_pred.br_taken_mask := ftbEntryGen.taken_mask 984 update.full_pred.jalr_target := commit_target 985 update.full_pred.hit := true.B 986 when (update.full_pred.is_jalr) { 987 update.full_pred.targets.last := commit_target 988 } 989 990 // **************************************************************** 991 // *********************** to prefetch **************************** 992 // **************************************************************** 993 994 if(cacheParams.hasPrefetch){ 995 val prefetchPtr = RegInit(FtqPtr(false.B, 0.U)) 996 prefetchPtr := prefetchPtr + io.toPrefetch.req.fire() 997 998 when (bpu_s2_resp.valid && bpu_s2_resp.hasRedirect && !isBefore(prefetchPtr, bpu_s2_resp.ftq_idx)) { 999 prefetchPtr := bpu_s2_resp.ftq_idx 1000 } 1001 1002 when (bpu_s3_resp.valid && bpu_s3_resp.hasRedirect && !isBefore(prefetchPtr, bpu_s3_resp.ftq_idx)) { 1003 prefetchPtr := bpu_s3_resp.ftq_idx 1004 // XSError(true.B, "\ns3_redirect mechanism not implemented!\n") 1005 } 1006 1007 io.toPrefetch.req.valid := prefetchPtr =/= bpuPtr && entry_fetch_status(prefetchPtr.value) === f_to_send 1008 io.toPrefetch.req.bits.target := update_target(prefetchPtr.value) 1009 1010 when(redirectVec.map(r => r.valid).reduce(_||_)){ 1011 val r = PriorityMux(redirectVec.map(r => (r.valid -> r.bits))) 1012 val next = r.ftqIdx + 1.U 1013 prefetchPtr := next 1014 } 1015 1016 XSError(isBefore(bpuPtr, prefetchPtr) && !isFull(bpuPtr, prefetchPtr), "\nprefetchPtr is before bpuPtr!\n") 1017 } 1018 else { 1019 io.toPrefetch.req <> DontCare 1020 } 1021 1022 // ****************************************************************************** 1023 // **************************** commit perf counters **************************** 1024 // ****************************************************************************** 1025 1026 val commit_inst_mask = VecInit(commit_state.map(c => c === c_commited && do_commit)).asUInt 1027 val commit_mispred_mask = commit_mispredict.asUInt 1028 val commit_not_mispred_mask = ~commit_mispred_mask 1029 1030 val commit_br_mask = commit_pd.brMask.asUInt 1031 val commit_jmp_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.jmpInfo.valid.asTypeOf(UInt(1.W))) 1032 val commit_cfi_mask = (commit_br_mask | commit_jmp_mask) 1033 1034 val mbpInstrs = commit_inst_mask & commit_cfi_mask 1035 1036 val mbpRights = mbpInstrs & commit_not_mispred_mask 1037 val mbpWrongs = mbpInstrs & commit_mispred_mask 1038 1039 io.bpuInfo.bpRight := PopCount(mbpRights) 1040 io.bpuInfo.bpWrong := PopCount(mbpWrongs) 1041 1042 // Cfi Info 1043 for (i <- 0 until PredictWidth) { 1044 val pc = commit_pc_bundle.startAddr + (i * instBytes).U 1045 val v = commit_state(i) === c_commited 1046 val isBr = commit_pd.brMask(i) 1047 val isJmp = commit_pd.jmpInfo.valid && commit_pd.jmpOffset === i.U 1048 val isCfi = isBr || isJmp 1049 val isTaken = commit_cfi.valid && commit_cfi.bits === i.U 1050 val misPred = commit_mispredict(i) 1051 // val ghist = commit_spec_meta.ghist.predHist 1052 val histPtr = commit_spec_meta.histPtr 1053 val predCycle = commit_meta.meta(63, 0) 1054 val target = commit_target 1055 1056 val brIdx = OHToUInt(Reverse(Cat(update_ftb_entry.brValids.zip(update_ftb_entry.brOffset).map{case(v, offset) => v && offset === i.U}))) 1057 val inFtbEntry = update_ftb_entry.brValids.zip(update_ftb_entry.brOffset).map{case(v, offset) => v && offset === i.U}.reduce(_||_) 1058 val addIntoHist = ((commit_hit === h_hit) && inFtbEntry) || ((!(commit_hit === h_hit) && i.U === commit_cfi.bits && isBr && commit_cfi.valid)) 1059 XSDebug(v && do_commit && isCfi, p"cfi_update: isBr(${isBr}) pc(${Hexadecimal(pc)}) " + 1060 p"taken(${isTaken}) mispred(${misPred}) cycle($predCycle) hist(${histPtr.value}) " + 1061 p"startAddr(${Hexadecimal(commit_pc_bundle.startAddr)}) AddIntoHist(${addIntoHist}) " + 1062 p"brInEntry(${inFtbEntry}) brIdx(${brIdx}) target(${Hexadecimal(target)})\n") 1063 } 1064 1065 val enq = io.fromBpu.resp 1066 val perf_redirect = backendRedirect 1067 1068 XSPerfAccumulate("entry", validEntries) 1069 XSPerfAccumulate("bpu_to_ftq_stall", enq.valid && !enq.ready) 1070 XSPerfAccumulate("mispredictRedirect", perf_redirect.valid && RedirectLevel.flushAfter === perf_redirect.bits.level) 1071 XSPerfAccumulate("replayRedirect", perf_redirect.valid && RedirectLevel.flushItself(perf_redirect.bits.level)) 1072 XSPerfAccumulate("predecodeRedirect", fromIfuRedirect.valid) 1073 1074 XSPerfAccumulate("to_ifu_bubble", io.toIfu.req.ready && !io.toIfu.req.valid) 1075 1076 XSPerfAccumulate("to_ifu_stall", io.toIfu.req.valid && !io.toIfu.req.ready) 1077 XSPerfAccumulate("from_bpu_real_bubble", !enq.valid && enq.ready && allowBpuIn) 1078 XSPerfAccumulate("bpu_to_ifu_bubble", bpuPtr === ifuPtr) 1079 1080 val from_bpu = io.fromBpu.resp.bits 1081 def in_entry_len_map_gen(resp: BranchPredictionBundle)(stage: String) = { 1082 assert(!resp.is_minimal) 1083 val entry_len = (resp.ftb_entry.getFallThrough(resp.pc) - resp.pc) >> instOffsetBits 1084 val entry_len_recording_vec = (1 to PredictWidth+1).map(i => entry_len === i.U) 1085 val entry_len_map = (1 to PredictWidth+1).map(i => 1086 f"${stage}_ftb_entry_len_$i" -> (entry_len_recording_vec(i-1) && resp.valid) 1087 ).foldLeft(Map[String, UInt]())(_+_) 1088 entry_len_map 1089 } 1090 val s2_entry_len_map = in_entry_len_map_gen(from_bpu.s2)("s2") 1091 val s3_entry_len_map = in_entry_len_map_gen(from_bpu.s3)("s3") 1092 1093 val to_ifu = io.toIfu.req.bits 1094 1095 1096 1097 val commit_num_inst_recording_vec = (1 to PredictWidth).map(i => PopCount(commit_inst_mask) === i.U) 1098 val commit_num_inst_map = (1 to PredictWidth).map(i => 1099 f"commit_num_inst_$i" -> (commit_num_inst_recording_vec(i-1) && do_commit) 1100 ).foldLeft(Map[String, UInt]())(_+_) 1101 1102 1103 1104 val commit_jal_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasJal.asTypeOf(UInt(1.W))) 1105 val commit_jalr_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasJalr.asTypeOf(UInt(1.W))) 1106 val commit_call_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasCall.asTypeOf(UInt(1.W))) 1107 val commit_ret_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasRet.asTypeOf(UInt(1.W))) 1108 1109 1110 val mbpBRights = mbpRights & commit_br_mask 1111 val mbpJRights = mbpRights & commit_jal_mask 1112 val mbpIRights = mbpRights & commit_jalr_mask 1113 val mbpCRights = mbpRights & commit_call_mask 1114 val mbpRRights = mbpRights & commit_ret_mask 1115 1116 val mbpBWrongs = mbpWrongs & commit_br_mask 1117 val mbpJWrongs = mbpWrongs & commit_jal_mask 1118 val mbpIWrongs = mbpWrongs & commit_jalr_mask 1119 val mbpCWrongs = mbpWrongs & commit_call_mask 1120 val mbpRWrongs = mbpWrongs & commit_ret_mask 1121 1122 val commit_pred_stage = RegNext(pred_stage(commPtr.value)) 1123 1124 def pred_stage_map(src: UInt, name: String) = { 1125 (0 until numBpStages).map(i => 1126 f"${name}_stage_${i+1}" -> PopCount(src.asBools.map(_ && commit_pred_stage === BP_STAGES(i))) 1127 ).foldLeft(Map[String, UInt]())(_+_) 1128 } 1129 1130 val mispred_stage_map = pred_stage_map(mbpWrongs, "mispredict") 1131 val br_mispred_stage_map = pred_stage_map(mbpBWrongs, "br_mispredict") 1132 val jalr_mispred_stage_map = pred_stage_map(mbpIWrongs, "jalr_mispredict") 1133 val correct_stage_map = pred_stage_map(mbpRights, "correct") 1134 val br_correct_stage_map = pred_stage_map(mbpBRights, "br_correct") 1135 val jalr_correct_stage_map = pred_stage_map(mbpIRights, "jalr_correct") 1136 1137 val update_valid = io.toBpu.update.valid 1138 def u(cond: Bool) = update_valid && cond 1139 val ftb_false_hit = u(update.false_hit) 1140 // assert(!ftb_false_hit) 1141 val ftb_hit = u(commit_hit === h_hit) 1142 1143 val ftb_new_entry = u(ftbEntryGen.is_init_entry) 1144 val ftb_new_entry_only_br = ftb_new_entry && !update_ftb_entry.jmpValid 1145 val ftb_new_entry_only_jmp = ftb_new_entry && !update_ftb_entry.brValids(0) 1146 val ftb_new_entry_has_br_and_jmp = ftb_new_entry && update_ftb_entry.brValids(0) && update_ftb_entry.jmpValid 1147 1148 val ftb_old_entry = u(ftbEntryGen.is_old_entry) 1149 1150 val ftb_modified_entry = u(ftbEntryGen.is_new_br || ftbEntryGen.is_jalr_target_modified || ftbEntryGen.is_always_taken_modified) 1151 val ftb_modified_entry_new_br = u(ftbEntryGen.is_new_br) 1152 val ftb_modified_entry_jalr_target_modified = u(ftbEntryGen.is_jalr_target_modified) 1153 val ftb_modified_entry_br_full = ftb_modified_entry && ftbEntryGen.is_br_full 1154 val ftb_modified_entry_always_taken = ftb_modified_entry && ftbEntryGen.is_always_taken_modified 1155 1156 val ftb_entry_len = (ftbEntryGen.new_entry.getFallThrough(update.pc) - update.pc) >> instOffsetBits 1157 val ftb_entry_len_recording_vec = (1 to PredictWidth+1).map(i => ftb_entry_len === i.U) 1158 val ftb_init_entry_len_map = (1 to PredictWidth+1).map(i => 1159 f"ftb_init_entry_len_$i" -> (ftb_entry_len_recording_vec(i-1) && ftb_new_entry) 1160 ).foldLeft(Map[String, UInt]())(_+_) 1161 val ftb_modified_entry_len_map = (1 to PredictWidth+1).map(i => 1162 f"ftb_modified_entry_len_$i" -> (ftb_entry_len_recording_vec(i-1) && ftb_modified_entry) 1163 ).foldLeft(Map[String, UInt]())(_+_) 1164 1165 val ftq_occupancy_map = (0 to FtqSize).map(i => 1166 f"ftq_has_entry_$i" ->( validEntries === i.U) 1167 ).foldLeft(Map[String, UInt]())(_+_) 1168 1169 val perfCountsMap = Map( 1170 "BpInstr" -> PopCount(mbpInstrs), 1171 "BpBInstr" -> PopCount(mbpBRights | mbpBWrongs), 1172 "BpRight" -> PopCount(mbpRights), 1173 "BpWrong" -> PopCount(mbpWrongs), 1174 "BpBRight" -> PopCount(mbpBRights), 1175 "BpBWrong" -> PopCount(mbpBWrongs), 1176 "BpJRight" -> PopCount(mbpJRights), 1177 "BpJWrong" -> PopCount(mbpJWrongs), 1178 "BpIRight" -> PopCount(mbpIRights), 1179 "BpIWrong" -> PopCount(mbpIWrongs), 1180 "BpCRight" -> PopCount(mbpCRights), 1181 "BpCWrong" -> PopCount(mbpCWrongs), 1182 "BpRRight" -> PopCount(mbpRRights), 1183 "BpRWrong" -> PopCount(mbpRWrongs), 1184 1185 "ftb_false_hit" -> PopCount(ftb_false_hit), 1186 "ftb_hit" -> PopCount(ftb_hit), 1187 "ftb_new_entry" -> PopCount(ftb_new_entry), 1188 "ftb_new_entry_only_br" -> PopCount(ftb_new_entry_only_br), 1189 "ftb_new_entry_only_jmp" -> PopCount(ftb_new_entry_only_jmp), 1190 "ftb_new_entry_has_br_and_jmp" -> PopCount(ftb_new_entry_has_br_and_jmp), 1191 "ftb_old_entry" -> PopCount(ftb_old_entry), 1192 "ftb_modified_entry" -> PopCount(ftb_modified_entry), 1193 "ftb_modified_entry_new_br" -> PopCount(ftb_modified_entry_new_br), 1194 "ftb_jalr_target_modified" -> PopCount(ftb_modified_entry_jalr_target_modified), 1195 "ftb_modified_entry_br_full" -> PopCount(ftb_modified_entry_br_full), 1196 "ftb_modified_entry_always_taken" -> PopCount(ftb_modified_entry_always_taken) 1197 ) ++ ftb_init_entry_len_map ++ ftb_modified_entry_len_map ++ s2_entry_len_map ++ 1198 s3_entry_len_map ++ commit_num_inst_map ++ ftq_occupancy_map ++ 1199 mispred_stage_map ++ br_mispred_stage_map ++ jalr_mispred_stage_map ++ 1200 correct_stage_map ++ br_correct_stage_map ++ jalr_correct_stage_map 1201 1202 for((key, value) <- perfCountsMap) { 1203 XSPerfAccumulate(key, value) 1204 } 1205 1206 // --------------------------- Debug -------------------------------- 1207 // XSDebug(enq_fire, p"enq! " + io.fromBpu.resp.bits.toPrintable) 1208 XSDebug(io.toIfu.req.fire, p"fire to ifu " + io.toIfu.req.bits.toPrintable) 1209 XSDebug(do_commit, p"deq! [ptr] $do_commit_ptr\n") 1210 XSDebug(true.B, p"[bpuPtr] $bpuPtr, [ifuPtr] $ifuPtr, [ifuWbPtr] $ifuWbPtr [commPtr] $commPtr\n") 1211 XSDebug(true.B, p"[in] v:${io.fromBpu.resp.valid} r:${io.fromBpu.resp.ready} " + 1212 p"[out] v:${io.toIfu.req.valid} r:${io.toIfu.req.ready}\n") 1213 XSDebug(do_commit, p"[deq info] cfiIndex: $commit_cfi, $commit_pc_bundle, target: ${Hexadecimal(commit_target)}\n") 1214 1215 // def ubtbCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 1216 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 1217 // case (((valid, pd), ans), taken) => 1218 // Mux(valid && pd.isBr, 1219 // isWrong ^ Mux(ans.hit.asBool, 1220 // Mux(ans.taken.asBool, taken && ans.target === commitEntry.target, 1221 // !taken), 1222 // !taken), 1223 // false.B) 1224 // } 1225 // } 1226 1227 // def btbCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 1228 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 1229 // case (((valid, pd), ans), taken) => 1230 // Mux(valid && pd.isBr, 1231 // isWrong ^ Mux(ans.hit.asBool, 1232 // Mux(ans.taken.asBool, taken && ans.target === commitEntry.target, 1233 // !taken), 1234 // !taken), 1235 // false.B) 1236 // } 1237 // } 1238 1239 // def tageCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 1240 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 1241 // case (((valid, pd), ans), taken) => 1242 // Mux(valid && pd.isBr, 1243 // isWrong ^ (ans.taken.asBool === taken), 1244 // false.B) 1245 // } 1246 // } 1247 1248 // def loopCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 1249 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 1250 // case (((valid, pd), ans), taken) => 1251 // Mux(valid && (pd.isBr) && ans.hit.asBool, 1252 // isWrong ^ (!taken), 1253 // false.B) 1254 // } 1255 // } 1256 1257 // def rasCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 1258 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 1259 // case (((valid, pd), ans), taken) => 1260 // Mux(valid && pd.isRet.asBool /*&& taken*/ && ans.hit.asBool, 1261 // isWrong ^ (ans.target === commitEntry.target), 1262 // false.B) 1263 // } 1264 // } 1265 1266 // val ubtbRights = ubtbCheck(commitEntry, commitEntry.metas.map(_.ubtbAns), false.B) 1267 // val ubtbWrongs = ubtbCheck(commitEntry, commitEntry.metas.map(_.ubtbAns), true.B) 1268 // // btb and ubtb pred jal and jalr as well 1269 // val btbRights = btbCheck(commitEntry, commitEntry.metas.map(_.btbAns), false.B) 1270 // val btbWrongs = btbCheck(commitEntry, commitEntry.metas.map(_.btbAns), true.B) 1271 // val tageRights = tageCheck(commitEntry, commitEntry.metas.map(_.tageAns), false.B) 1272 // val tageWrongs = tageCheck(commitEntry, commitEntry.metas.map(_.tageAns), true.B) 1273 1274 // val loopRights = loopCheck(commitEntry, commitEntry.metas.map(_.loopAns), false.B) 1275 // val loopWrongs = loopCheck(commitEntry, commitEntry.metas.map(_.loopAns), true.B) 1276 1277 // val rasRights = rasCheck(commitEntry, commitEntry.metas.map(_.rasAns), false.B) 1278 // val rasWrongs = rasCheck(commitEntry, commitEntry.metas.map(_.rasAns), true.B) 1279 1280 val perfEvents = Seq( 1281 ("bpu_s2_redirect ", bpu_s2_redirect ), 1282 ("bpu_s3_redirect ", bpu_s3_redirect ), 1283 ("bpu_to_ftq_stall ", enq.valid && ~enq.ready ), 1284 ("mispredictRedirect ", perf_redirect.valid && RedirectLevel.flushAfter === perf_redirect.bits.level), 1285 ("replayRedirect ", perf_redirect.valid && RedirectLevel.flushItself(perf_redirect.bits.level) ), 1286 ("predecodeRedirect ", fromIfuRedirect.valid ), 1287 ("to_ifu_bubble ", io.toIfu.req.ready && !io.toIfu.req.valid ), 1288 ("from_bpu_real_bubble ", !enq.valid && enq.ready && allowBpuIn ), 1289 ("BpInstr ", PopCount(mbpInstrs) ), 1290 ("BpBInstr ", PopCount(mbpBRights | mbpBWrongs) ), 1291 ("BpRight ", PopCount(mbpRights) ), 1292 ("BpWrong ", PopCount(mbpWrongs) ), 1293 ("BpBRight ", PopCount(mbpBRights) ), 1294 ("BpBWrong ", PopCount(mbpBWrongs) ), 1295 ("BpJRight ", PopCount(mbpJRights) ), 1296 ("BpJWrong ", PopCount(mbpJWrongs) ), 1297 ("BpIRight ", PopCount(mbpIRights) ), 1298 ("BpIWrong ", PopCount(mbpIWrongs) ), 1299 ("BpCRight ", PopCount(mbpCRights) ), 1300 ("BpCWrong ", PopCount(mbpCWrongs) ), 1301 ("BpRRight ", PopCount(mbpRRights) ), 1302 ("BpRWrong ", PopCount(mbpRWrongs) ), 1303 ("ftb_false_hit ", PopCount(ftb_false_hit) ), 1304 ("ftb_hit ", PopCount(ftb_hit) ), 1305 ) 1306 generatePerfEvent() 1307} 1308