109c6f1ddSLingrui98/*************************************************************************************** 209c6f1ddSLingrui98* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 309c6f1ddSLingrui98* Copyright (c) 2020-2021 Peng Cheng Laboratory 409c6f1ddSLingrui98* 509c6f1ddSLingrui98* XiangShan is licensed under Mulan PSL v2. 609c6f1ddSLingrui98* You can use this software according to the terms and conditions of the Mulan PSL v2. 709c6f1ddSLingrui98* You may obtain a copy of Mulan PSL v2 at: 809c6f1ddSLingrui98* http://license.coscl.org.cn/MulanPSL2 909c6f1ddSLingrui98* 1009c6f1ddSLingrui98* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 1109c6f1ddSLingrui98* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 1209c6f1ddSLingrui98* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 1309c6f1ddSLingrui98* 1409c6f1ddSLingrui98* See the Mulan PSL v2 for more details. 1509c6f1ddSLingrui98***************************************************************************************/ 1609c6f1ddSLingrui98 1709c6f1ddSLingrui98package xiangshan.frontend 1809c6f1ddSLingrui98 1909c6f1ddSLingrui98import chipsalliance.rocketchip.config.Parameters 2009c6f1ddSLingrui98import chisel3._ 2109c6f1ddSLingrui98import chisel3.util._ 221ca0e4f3SYinan Xuimport utils._ 2309c6f1ddSLingrui98import xiangshan._ 24e30430c2SJayimport xiangshan.frontend.icache._ 251ca0e4f3SYinan Xuimport xiangshan.backend.CtrlToFtqIO 262e1be6e1SSteve Gouimport xiangshan.backend.decode.ImmUnion 2709c6f1ddSLingrui98 2809c6f1ddSLingrui98class FtqPtr(implicit p: Parameters) extends CircularQueuePtr[FtqPtr]( 2909c6f1ddSLingrui98 p => p(XSCoreParamsKey).FtqSize 3009c6f1ddSLingrui98){ 3109c6f1ddSLingrui98} 3209c6f1ddSLingrui98 3309c6f1ddSLingrui98object FtqPtr { 3409c6f1ddSLingrui98 def apply(f: Bool, v: UInt)(implicit p: Parameters): FtqPtr = { 3509c6f1ddSLingrui98 val ptr = Wire(new FtqPtr) 3609c6f1ddSLingrui98 ptr.flag := f 3709c6f1ddSLingrui98 ptr.value := v 3809c6f1ddSLingrui98 ptr 3909c6f1ddSLingrui98 } 4009c6f1ddSLingrui98 def inverse(ptr: FtqPtr)(implicit p: Parameters): FtqPtr = { 4109c6f1ddSLingrui98 apply(!ptr.flag, ptr.value) 4209c6f1ddSLingrui98 } 4309c6f1ddSLingrui98} 4409c6f1ddSLingrui98 4509c6f1ddSLingrui98class FtqNRSRAM[T <: Data](gen: T, numRead: Int)(implicit p: Parameters) extends XSModule { 4609c6f1ddSLingrui98 4709c6f1ddSLingrui98 val io = IO(new Bundle() { 4809c6f1ddSLingrui98 val raddr = Input(Vec(numRead, UInt(log2Up(FtqSize).W))) 4909c6f1ddSLingrui98 val ren = Input(Vec(numRead, Bool())) 5009c6f1ddSLingrui98 val rdata = Output(Vec(numRead, gen)) 5109c6f1ddSLingrui98 val waddr = Input(UInt(log2Up(FtqSize).W)) 5209c6f1ddSLingrui98 val wen = Input(Bool()) 5309c6f1ddSLingrui98 val wdata = Input(gen) 5409c6f1ddSLingrui98 }) 5509c6f1ddSLingrui98 5609c6f1ddSLingrui98 for(i <- 0 until numRead){ 5709c6f1ddSLingrui98 val sram = Module(new SRAMTemplate(gen, FtqSize)) 5809c6f1ddSLingrui98 sram.io.r.req.valid := io.ren(i) 5909c6f1ddSLingrui98 sram.io.r.req.bits.setIdx := io.raddr(i) 6009c6f1ddSLingrui98 io.rdata(i) := sram.io.r.resp.data(0) 6109c6f1ddSLingrui98 sram.io.w.req.valid := io.wen 6209c6f1ddSLingrui98 sram.io.w.req.bits.setIdx := io.waddr 6309c6f1ddSLingrui98 sram.io.w.req.bits.data := VecInit(io.wdata) 6409c6f1ddSLingrui98 } 6509c6f1ddSLingrui98 6609c6f1ddSLingrui98} 6709c6f1ddSLingrui98 6809c6f1ddSLingrui98class Ftq_RF_Components(implicit p: Parameters) extends XSBundle with BPUUtils { 6909c6f1ddSLingrui98 val startAddr = UInt(VAddrBits.W) 70b37e4b45SLingrui98 val nextLineAddr = UInt(VAddrBits.W) 7109c6f1ddSLingrui98 val isNextMask = Vec(PredictWidth, Bool()) 72b37e4b45SLingrui98 val fallThruError = Bool() 73b37e4b45SLingrui98 // val carry = Bool() 7409c6f1ddSLingrui98 def getPc(offset: UInt) = { 7585215037SLingrui98 def getHigher(pc: UInt) = pc(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 7685215037SLingrui98 def getOffset(pc: UInt) = pc(log2Ceil(PredictWidth)+instOffsetBits, instOffsetBits) 77b37e4b45SLingrui98 Cat(getHigher(Mux(isNextMask(offset) && startAddr(log2Ceil(PredictWidth)+instOffsetBits), nextLineAddr, startAddr)), 7809c6f1ddSLingrui98 getOffset(startAddr)+offset, 0.U(instOffsetBits.W)) 7909c6f1ddSLingrui98 } 8009c6f1ddSLingrui98 def fromBranchPrediction(resp: BranchPredictionBundle) = { 81a229ab6cSLingrui98 def carryPos(addr: UInt) = addr(instOffsetBits+log2Ceil(PredictWidth)+1) 8209c6f1ddSLingrui98 this.startAddr := resp.pc 83a60a2901SLingrui98 this.nextLineAddr := resp.pc + (FetchWidth * 4 * 2).U // may be broken on other configs 8409c6f1ddSLingrui98 this.isNextMask := VecInit((0 until PredictWidth).map(i => 8509c6f1ddSLingrui98 (resp.pc(log2Ceil(PredictWidth), 1) +& i.U)(log2Ceil(PredictWidth)).asBool() 8609c6f1ddSLingrui98 )) 87b37e4b45SLingrui98 this.fallThruError := resp.fallThruError 8809c6f1ddSLingrui98 this 8909c6f1ddSLingrui98 } 9009c6f1ddSLingrui98 override def toPrintable: Printable = { 91b37e4b45SLingrui98 p"startAddr:${Hexadecimal(startAddr)}" 9209c6f1ddSLingrui98 } 9309c6f1ddSLingrui98} 9409c6f1ddSLingrui98 9509c6f1ddSLingrui98class Ftq_pd_Entry(implicit p: Parameters) extends XSBundle { 9609c6f1ddSLingrui98 val brMask = Vec(PredictWidth, Bool()) 9709c6f1ddSLingrui98 val jmpInfo = ValidUndirectioned(Vec(3, Bool())) 9809c6f1ddSLingrui98 val jmpOffset = UInt(log2Ceil(PredictWidth).W) 9909c6f1ddSLingrui98 val jalTarget = UInt(VAddrBits.W) 10009c6f1ddSLingrui98 val rvcMask = Vec(PredictWidth, Bool()) 10109c6f1ddSLingrui98 def hasJal = jmpInfo.valid && !jmpInfo.bits(0) 10209c6f1ddSLingrui98 def hasJalr = jmpInfo.valid && jmpInfo.bits(0) 10309c6f1ddSLingrui98 def hasCall = jmpInfo.valid && jmpInfo.bits(1) 10409c6f1ddSLingrui98 def hasRet = jmpInfo.valid && jmpInfo.bits(2) 10509c6f1ddSLingrui98 10609c6f1ddSLingrui98 def fromPdWb(pdWb: PredecodeWritebackBundle) = { 10709c6f1ddSLingrui98 val pds = pdWb.pd 10809c6f1ddSLingrui98 this.brMask := VecInit(pds.map(pd => pd.isBr && pd.valid)) 10909c6f1ddSLingrui98 this.jmpInfo.valid := VecInit(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid)).asUInt.orR 11009c6f1ddSLingrui98 this.jmpInfo.bits := ParallelPriorityMux(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid), 11109c6f1ddSLingrui98 pds.map(pd => VecInit(pd.isJalr, pd.isCall, pd.isRet))) 11209c6f1ddSLingrui98 this.jmpOffset := ParallelPriorityEncoder(pds.map(pd => (pd.isJal || pd.isJalr) && pd.valid)) 11309c6f1ddSLingrui98 this.rvcMask := VecInit(pds.map(pd => pd.isRVC)) 11409c6f1ddSLingrui98 this.jalTarget := pdWb.jalTarget 11509c6f1ddSLingrui98 } 11609c6f1ddSLingrui98 11709c6f1ddSLingrui98 def toPd(offset: UInt) = { 11809c6f1ddSLingrui98 require(offset.getWidth == log2Ceil(PredictWidth)) 11909c6f1ddSLingrui98 val pd = Wire(new PreDecodeInfo) 12009c6f1ddSLingrui98 pd.valid := true.B 12109c6f1ddSLingrui98 pd.isRVC := rvcMask(offset) 12209c6f1ddSLingrui98 val isBr = brMask(offset) 12309c6f1ddSLingrui98 val isJalr = offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(0) 12409c6f1ddSLingrui98 pd.brType := Cat(offset === jmpOffset && jmpInfo.valid, isJalr || isBr) 12509c6f1ddSLingrui98 pd.isCall := offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(1) 12609c6f1ddSLingrui98 pd.isRet := offset === jmpOffset && jmpInfo.valid && jmpInfo.bits(2) 12709c6f1ddSLingrui98 pd 12809c6f1ddSLingrui98 } 12909c6f1ddSLingrui98} 13009c6f1ddSLingrui98 13109c6f1ddSLingrui98 13209c6f1ddSLingrui98 13309c6f1ddSLingrui98class Ftq_Redirect_SRAMEntry(implicit p: Parameters) extends XSBundle with HasBPUConst { 13409c6f1ddSLingrui98 val rasSp = UInt(log2Ceil(RasSize).W) 13509c6f1ddSLingrui98 val rasEntry = new RASEntry 136b37e4b45SLingrui98 // val specCnt = Vec(numBr, UInt(10.W)) 137c2ad24ebSLingrui98 // val ghist = new ShiftingGlobalHistory 138dd6c0695SLingrui98 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 13967402d75SLingrui98 val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos) 14067402d75SLingrui98 val lastBrNumOH = UInt((numBr+1).W) 14167402d75SLingrui98 142c2ad24ebSLingrui98 val histPtr = new CGHPtr 14309c6f1ddSLingrui98 14409c6f1ddSLingrui98 def fromBranchPrediction(resp: BranchPredictionBundle) = { 145b37e4b45SLingrui98 assert(!resp.is_minimal) 14609c6f1ddSLingrui98 this.rasSp := resp.rasSp 14709c6f1ddSLingrui98 this.rasEntry := resp.rasTop 148dd6c0695SLingrui98 this.folded_hist := resp.folded_hist 14967402d75SLingrui98 this.afhob := resp.afhob 15067402d75SLingrui98 this.lastBrNumOH := resp.lastBrNumOH 151c2ad24ebSLingrui98 this.histPtr := resp.histPtr 15209c6f1ddSLingrui98 this 15309c6f1ddSLingrui98 } 15409c6f1ddSLingrui98} 15509c6f1ddSLingrui98 15609c6f1ddSLingrui98class Ftq_1R_SRAMEntry(implicit p: Parameters) extends XSBundle with HasBPUConst { 15709c6f1ddSLingrui98 val meta = UInt(MaxMetaLength.W) 15809c6f1ddSLingrui98} 15909c6f1ddSLingrui98 16009c6f1ddSLingrui98class Ftq_Pred_Info(implicit p: Parameters) extends XSBundle { 16109c6f1ddSLingrui98 val target = UInt(VAddrBits.W) 16209c6f1ddSLingrui98 val cfiIndex = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 16309c6f1ddSLingrui98} 16409c6f1ddSLingrui98 16509c6f1ddSLingrui98 16609c6f1ddSLingrui98class FtqRead[T <: Data](private val gen: T)(implicit p: Parameters) extends XSBundle { 16709c6f1ddSLingrui98 val ptr = Output(new FtqPtr) 16809c6f1ddSLingrui98 val offset = Output(UInt(log2Ceil(PredictWidth).W)) 16909c6f1ddSLingrui98 val data = Input(gen) 17009c6f1ddSLingrui98 def apply(ptr: FtqPtr, offset: UInt) = { 17109c6f1ddSLingrui98 this.ptr := ptr 17209c6f1ddSLingrui98 this.offset := offset 17309c6f1ddSLingrui98 this.data 17409c6f1ddSLingrui98 } 17509c6f1ddSLingrui98} 17609c6f1ddSLingrui98 17709c6f1ddSLingrui98 17809c6f1ddSLingrui98class FtqToBpuIO(implicit p: Parameters) extends XSBundle { 17909c6f1ddSLingrui98 val redirect = Valid(new BranchPredictionRedirect) 18009c6f1ddSLingrui98 val update = Valid(new BranchPredictionUpdate) 18109c6f1ddSLingrui98 val enq_ptr = Output(new FtqPtr) 18209c6f1ddSLingrui98} 18309c6f1ddSLingrui98 18409c6f1ddSLingrui98class FtqToIfuIO(implicit p: Parameters) extends XSBundle with HasCircularQueuePtrHelper { 18509c6f1ddSLingrui98 val req = Decoupled(new FetchRequestBundle) 18609c6f1ddSLingrui98 val redirect = Valid(new Redirect) 18709c6f1ddSLingrui98 val flushFromBpu = new Bundle { 18809c6f1ddSLingrui98 // when ifu pipeline is not stalled, 18909c6f1ddSLingrui98 // a packet from bpu s3 can reach f1 at most 19009c6f1ddSLingrui98 val s2 = Valid(new FtqPtr) 191cb4f77ceSLingrui98 val s3 = Valid(new FtqPtr) 19209c6f1ddSLingrui98 def shouldFlushBy(src: Valid[FtqPtr], idx_to_flush: FtqPtr) = { 19309c6f1ddSLingrui98 src.valid && !isAfter(src.bits, idx_to_flush) 19409c6f1ddSLingrui98 } 19509c6f1ddSLingrui98 def shouldFlushByStage2(idx: FtqPtr) = shouldFlushBy(s2, idx) 196cb4f77ceSLingrui98 def shouldFlushByStage3(idx: FtqPtr) = shouldFlushBy(s3, idx) 19709c6f1ddSLingrui98 } 19809c6f1ddSLingrui98} 19909c6f1ddSLingrui98 200*c5c5edaeSJeniusclass FtqToICacheIO(implicit p: Parameters) extends XSBundle with HasCircularQueuePtrHelper { 201*c5c5edaeSJenius //NOTE: req.bits must be prepare in T cycle 202*c5c5edaeSJenius // while req.valid is set true in T + 1 cycle 203*c5c5edaeSJenius val req = Decoupled(new FtqToICacheRequestBundle) 204*c5c5edaeSJenius} 205*c5c5edaeSJenius 20609c6f1ddSLingrui98trait HasBackendRedirectInfo extends HasXSParameter { 2072e1be6e1SSteve Gou def numRedirectPcRead = exuParameters.JmpCnt + exuParameters.AluCnt + 1 20809c6f1ddSLingrui98 def isLoadReplay(r: Valid[Redirect]) = r.bits.flushItself() 20909c6f1ddSLingrui98} 21009c6f1ddSLingrui98 21109c6f1ddSLingrui98class FtqToCtrlIO(implicit p: Parameters) extends XSBundle with HasBackendRedirectInfo { 212b56f947eSYinan Xu // write to backend pc mem 213b56f947eSYinan Xu val pc_mem_wen = Output(Bool()) 214b56f947eSYinan Xu val pc_mem_waddr = Output(UInt(log2Ceil(FtqSize).W)) 215b56f947eSYinan Xu val pc_mem_wdata = Output(new Ftq_RF_Components) 216b56f947eSYinan Xu val target = Output(UInt(VAddrBits.W)) 217b56f947eSYinan Xu // predecode correct target 218b56f947eSYinan Xu val pd_redirect_waddr = Valid(UInt(log2Ceil(FtqSize).W)) 219b56f947eSYinan Xu val pd_redirect_target = Output(UInt(VAddrBits.W)) 22009c6f1ddSLingrui98} 22109c6f1ddSLingrui98 22209c6f1ddSLingrui98 22309c6f1ddSLingrui98class FTBEntryGen(implicit p: Parameters) extends XSModule with HasBackendRedirectInfo with HasBPUParameter { 22409c6f1ddSLingrui98 val io = IO(new Bundle { 22509c6f1ddSLingrui98 val start_addr = Input(UInt(VAddrBits.W)) 22609c6f1ddSLingrui98 val old_entry = Input(new FTBEntry) 22709c6f1ddSLingrui98 val pd = Input(new Ftq_pd_Entry) 22809c6f1ddSLingrui98 val cfiIndex = Flipped(Valid(UInt(log2Ceil(PredictWidth).W))) 22909c6f1ddSLingrui98 val target = Input(UInt(VAddrBits.W)) 23009c6f1ddSLingrui98 val hit = Input(Bool()) 23109c6f1ddSLingrui98 val mispredict_vec = Input(Vec(PredictWidth, Bool())) 23209c6f1ddSLingrui98 23309c6f1ddSLingrui98 val new_entry = Output(new FTBEntry) 23409c6f1ddSLingrui98 val new_br_insert_pos = Output(Vec(numBr, Bool())) 23509c6f1ddSLingrui98 val taken_mask = Output(Vec(numBr, Bool())) 23609c6f1ddSLingrui98 val mispred_mask = Output(Vec(numBr+1, Bool())) 23709c6f1ddSLingrui98 23809c6f1ddSLingrui98 // for perf counters 23909c6f1ddSLingrui98 val is_init_entry = Output(Bool()) 24009c6f1ddSLingrui98 val is_old_entry = Output(Bool()) 24109c6f1ddSLingrui98 val is_new_br = Output(Bool()) 24209c6f1ddSLingrui98 val is_jalr_target_modified = Output(Bool()) 24309c6f1ddSLingrui98 val is_always_taken_modified = Output(Bool()) 24409c6f1ddSLingrui98 val is_br_full = Output(Bool()) 24509c6f1ddSLingrui98 }) 24609c6f1ddSLingrui98 24709c6f1ddSLingrui98 // no mispredictions detected at predecode 24809c6f1ddSLingrui98 val hit = io.hit 24909c6f1ddSLingrui98 val pd = io.pd 25009c6f1ddSLingrui98 25109c6f1ddSLingrui98 val init_entry = WireInit(0.U.asTypeOf(new FTBEntry)) 25209c6f1ddSLingrui98 25309c6f1ddSLingrui98 25409c6f1ddSLingrui98 val cfi_is_br = pd.brMask(io.cfiIndex.bits) && io.cfiIndex.valid 25509c6f1ddSLingrui98 val entry_has_jmp = pd.jmpInfo.valid 25609c6f1ddSLingrui98 val new_jmp_is_jal = entry_has_jmp && !pd.jmpInfo.bits(0) && io.cfiIndex.valid 25709c6f1ddSLingrui98 val new_jmp_is_jalr = entry_has_jmp && pd.jmpInfo.bits(0) && io.cfiIndex.valid 25809c6f1ddSLingrui98 val new_jmp_is_call = entry_has_jmp && pd.jmpInfo.bits(1) && io.cfiIndex.valid 25909c6f1ddSLingrui98 val new_jmp_is_ret = entry_has_jmp && pd.jmpInfo.bits(2) && io.cfiIndex.valid 26009c6f1ddSLingrui98 val last_jmp_rvi = entry_has_jmp && pd.jmpOffset === (PredictWidth-1).U && !pd.rvcMask.last 261a60a2901SLingrui98 // val last_br_rvi = cfi_is_br && io.cfiIndex.bits === (PredictWidth-1).U && !pd.rvcMask.last 26209c6f1ddSLingrui98 26309c6f1ddSLingrui98 val cfi_is_jal = io.cfiIndex.bits === pd.jmpOffset && new_jmp_is_jal 26409c6f1ddSLingrui98 val cfi_is_jalr = io.cfiIndex.bits === pd.jmpOffset && new_jmp_is_jalr 26509c6f1ddSLingrui98 266a60a2901SLingrui98 def carryPos = log2Ceil(PredictWidth)+instOffsetBits 26709c6f1ddSLingrui98 def getLower(pc: UInt) = pc(carryPos-1, instOffsetBits) 26809c6f1ddSLingrui98 // if not hit, establish a new entry 26909c6f1ddSLingrui98 init_entry.valid := true.B 27009c6f1ddSLingrui98 // tag is left for ftb to assign 271eeb5ff92SLingrui98 272eeb5ff92SLingrui98 // case br 273eeb5ff92SLingrui98 val init_br_slot = init_entry.getSlotForBr(0) 274eeb5ff92SLingrui98 when (cfi_is_br) { 275eeb5ff92SLingrui98 init_br_slot.valid := true.B 276eeb5ff92SLingrui98 init_br_slot.offset := io.cfiIndex.bits 277b37e4b45SLingrui98 init_br_slot.setLowerStatByTarget(io.start_addr, io.target, numBr == 1) 278eeb5ff92SLingrui98 init_entry.always_taken(0) := true.B // set to always taken on init 279eeb5ff92SLingrui98 } 280eeb5ff92SLingrui98 281eeb5ff92SLingrui98 // case jmp 282eeb5ff92SLingrui98 when (entry_has_jmp) { 283eeb5ff92SLingrui98 init_entry.tailSlot.offset := pd.jmpOffset 284eeb5ff92SLingrui98 init_entry.tailSlot.valid := new_jmp_is_jal || new_jmp_is_jalr 285eeb5ff92SLingrui98 init_entry.tailSlot.setLowerStatByTarget(io.start_addr, Mux(cfi_is_jalr, io.target, pd.jalTarget), isShare=false) 286eeb5ff92SLingrui98 } 287eeb5ff92SLingrui98 28809c6f1ddSLingrui98 val jmpPft = getLower(io.start_addr) +& pd.jmpOffset +& Mux(pd.rvcMask(pd.jmpOffset), 1.U, 2.U) 289a60a2901SLingrui98 init_entry.pftAddr := Mux(entry_has_jmp && !last_jmp_rvi, jmpPft, getLower(io.start_addr)) 290a60a2901SLingrui98 init_entry.carry := Mux(entry_has_jmp && !last_jmp_rvi, jmpPft(carryPos-instOffsetBits), true.B) 29109c6f1ddSLingrui98 init_entry.isJalr := new_jmp_is_jalr 29209c6f1ddSLingrui98 init_entry.isCall := new_jmp_is_call 29309c6f1ddSLingrui98 init_entry.isRet := new_jmp_is_ret 294f4ebc4b2SLingrui98 // that means fall thru points to the middle of an inst 295ae409b75SSteve Gou init_entry.last_may_be_rvi_call := pd.jmpOffset === (PredictWidth-1).U && !pd.rvcMask(pd.jmpOffset) 29609c6f1ddSLingrui98 29709c6f1ddSLingrui98 // if hit, check whether a new cfi(only br is possible) is detected 29809c6f1ddSLingrui98 val oe = io.old_entry 299eeb5ff92SLingrui98 val br_recorded_vec = oe.getBrRecordedVec(io.cfiIndex.bits) 30009c6f1ddSLingrui98 val br_recorded = br_recorded_vec.asUInt.orR 30109c6f1ddSLingrui98 val is_new_br = cfi_is_br && !br_recorded 30209c6f1ddSLingrui98 val new_br_offset = io.cfiIndex.bits 30309c6f1ddSLingrui98 // vec(i) means new br will be inserted BEFORE old br(i) 304eeb5ff92SLingrui98 val allBrSlotsVec = oe.allSlotsForBr 30509c6f1ddSLingrui98 val new_br_insert_onehot = VecInit((0 until numBr).map{ 30609c6f1ddSLingrui98 i => i match { 307eeb5ff92SLingrui98 case 0 => 308eeb5ff92SLingrui98 !allBrSlotsVec(0).valid || new_br_offset < allBrSlotsVec(0).offset 309eeb5ff92SLingrui98 case idx => 310eeb5ff92SLingrui98 allBrSlotsVec(idx-1).valid && new_br_offset > allBrSlotsVec(idx-1).offset && 311eeb5ff92SLingrui98 (!allBrSlotsVec(idx).valid || new_br_offset < allBrSlotsVec(idx).offset) 31209c6f1ddSLingrui98 } 31309c6f1ddSLingrui98 }) 31409c6f1ddSLingrui98 31509c6f1ddSLingrui98 val old_entry_modified = WireInit(io.old_entry) 31609c6f1ddSLingrui98 for (i <- 0 until numBr) { 317eeb5ff92SLingrui98 val slot = old_entry_modified.allSlotsForBr(i) 318eeb5ff92SLingrui98 when (new_br_insert_onehot(i)) { 319eeb5ff92SLingrui98 slot.valid := true.B 320eeb5ff92SLingrui98 slot.offset := new_br_offset 321b37e4b45SLingrui98 slot.setLowerStatByTarget(io.start_addr, io.target, i == numBr-1) 322eeb5ff92SLingrui98 old_entry_modified.always_taken(i) := true.B 323eeb5ff92SLingrui98 }.elsewhen (new_br_offset > oe.allSlotsForBr(i).offset) { 324eeb5ff92SLingrui98 old_entry_modified.always_taken(i) := false.B 325eeb5ff92SLingrui98 // all other fields remain unchanged 326eeb5ff92SLingrui98 }.otherwise { 327eeb5ff92SLingrui98 // case i == 0, remain unchanged 328eeb5ff92SLingrui98 if (i != 0) { 329b37e4b45SLingrui98 val noNeedToMoveFromFormerSlot = (i == numBr-1).B && !oe.brSlots.last.valid 330eeb5ff92SLingrui98 when (!noNeedToMoveFromFormerSlot) { 331eeb5ff92SLingrui98 slot.fromAnotherSlot(oe.allSlotsForBr(i-1)) 332eeb5ff92SLingrui98 old_entry_modified.always_taken(i) := oe.always_taken(i) 33309c6f1ddSLingrui98 } 334eeb5ff92SLingrui98 } 335eeb5ff92SLingrui98 } 336eeb5ff92SLingrui98 } 33709c6f1ddSLingrui98 338eeb5ff92SLingrui98 // two circumstances: 339eeb5ff92SLingrui98 // 1. oe: | br | j |, new br should be in front of j, thus addr of j should be new pft 340eeb5ff92SLingrui98 // 2. oe: | br | br |, new br could be anywhere between, thus new pft is the addr of either 341eeb5ff92SLingrui98 // the previous last br or the new br 342eeb5ff92SLingrui98 val may_have_to_replace = oe.noEmptySlotForNewBr 343eeb5ff92SLingrui98 val pft_need_to_change = is_new_br && may_have_to_replace 34409c6f1ddSLingrui98 // it should either be the given last br or the new br 34509c6f1ddSLingrui98 when (pft_need_to_change) { 346eeb5ff92SLingrui98 val new_pft_offset = 347710a8720SLingrui98 Mux(!new_br_insert_onehot.asUInt.orR, 348710a8720SLingrui98 new_br_offset, oe.allSlotsForBr.last.offset) 349eeb5ff92SLingrui98 350710a8720SLingrui98 // set jmp to invalid 35109c6f1ddSLingrui98 old_entry_modified.pftAddr := getLower(io.start_addr) + new_pft_offset 35209c6f1ddSLingrui98 old_entry_modified.carry := (getLower(io.start_addr) +& new_pft_offset).head(1).asBool 353f4ebc4b2SLingrui98 old_entry_modified.last_may_be_rvi_call := false.B 35409c6f1ddSLingrui98 old_entry_modified.isCall := false.B 35509c6f1ddSLingrui98 old_entry_modified.isRet := false.B 356eeb5ff92SLingrui98 old_entry_modified.isJalr := false.B 35709c6f1ddSLingrui98 } 35809c6f1ddSLingrui98 35909c6f1ddSLingrui98 val old_entry_jmp_target_modified = WireInit(oe) 360710a8720SLingrui98 val old_target = oe.tailSlot.getTarget(io.start_addr) // may be wrong because we store only 20 lowest bits 361b37e4b45SLingrui98 val old_tail_is_jmp = !oe.tailSlot.sharing 362eeb5ff92SLingrui98 val jalr_target_modified = cfi_is_jalr && (old_target =/= io.target) && old_tail_is_jmp // TODO: pass full jalr target 3633bcae573SLingrui98 when (jalr_target_modified) { 36409c6f1ddSLingrui98 old_entry_jmp_target_modified.setByJmpTarget(io.start_addr, io.target) 36509c6f1ddSLingrui98 old_entry_jmp_target_modified.always_taken := 0.U.asTypeOf(Vec(numBr, Bool())) 36609c6f1ddSLingrui98 } 36709c6f1ddSLingrui98 36809c6f1ddSLingrui98 val old_entry_always_taken = WireInit(oe) 36909c6f1ddSLingrui98 val always_taken_modified_vec = Wire(Vec(numBr, Bool())) // whether modified or not 37009c6f1ddSLingrui98 for (i <- 0 until numBr) { 37109c6f1ddSLingrui98 old_entry_always_taken.always_taken(i) := 37209c6f1ddSLingrui98 oe.always_taken(i) && io.cfiIndex.valid && oe.brValids(i) && io.cfiIndex.bits === oe.brOffset(i) 373710a8720SLingrui98 always_taken_modified_vec(i) := oe.always_taken(i) && !old_entry_always_taken.always_taken(i) 37409c6f1ddSLingrui98 } 37509c6f1ddSLingrui98 val always_taken_modified = always_taken_modified_vec.reduce(_||_) 37609c6f1ddSLingrui98 37709c6f1ddSLingrui98 37809c6f1ddSLingrui98 37909c6f1ddSLingrui98 val derived_from_old_entry = 38009c6f1ddSLingrui98 Mux(is_new_br, old_entry_modified, 3813bcae573SLingrui98 Mux(jalr_target_modified, old_entry_jmp_target_modified, old_entry_always_taken)) 38209c6f1ddSLingrui98 38309c6f1ddSLingrui98 38409c6f1ddSLingrui98 io.new_entry := Mux(!hit, init_entry, derived_from_old_entry) 38509c6f1ddSLingrui98 38609c6f1ddSLingrui98 io.new_br_insert_pos := new_br_insert_onehot 38709c6f1ddSLingrui98 io.taken_mask := VecInit((io.new_entry.brOffset zip io.new_entry.brValids).map{ 38809c6f1ddSLingrui98 case (off, v) => io.cfiIndex.bits === off && io.cfiIndex.valid && v 38909c6f1ddSLingrui98 }) 39009c6f1ddSLingrui98 for (i <- 0 until numBr) { 39109c6f1ddSLingrui98 io.mispred_mask(i) := io.new_entry.brValids(i) && io.mispredict_vec(io.new_entry.brOffset(i)) 39209c6f1ddSLingrui98 } 39309c6f1ddSLingrui98 io.mispred_mask.last := io.new_entry.jmpValid && io.mispredict_vec(pd.jmpOffset) 39409c6f1ddSLingrui98 39509c6f1ddSLingrui98 // for perf counters 39609c6f1ddSLingrui98 io.is_init_entry := !hit 3973bcae573SLingrui98 io.is_old_entry := hit && !is_new_br && !jalr_target_modified && !always_taken_modified 39809c6f1ddSLingrui98 io.is_new_br := hit && is_new_br 3993bcae573SLingrui98 io.is_jalr_target_modified := hit && jalr_target_modified 40009c6f1ddSLingrui98 io.is_always_taken_modified := hit && always_taken_modified 401eeb5ff92SLingrui98 io.is_br_full := hit && is_new_br && may_have_to_replace 40209c6f1ddSLingrui98} 40309c6f1ddSLingrui98 404*c5c5edaeSJeniusclass FtqPcMemWrapper(numOtherReads: Int)(implicit p: Parameters) extends XSModule with HasBackendRedirectInfo { 405*c5c5edaeSJenius val io = IO(new Bundle { 406*c5c5edaeSJenius val ifuPtr_w = Input(new FtqPtr) 407*c5c5edaeSJenius val ifuPtrPlus1_w = Input(new FtqPtr) 408*c5c5edaeSJenius val commPtr_w = Input(new FtqPtr) 409*c5c5edaeSJenius val ifuPtr_rdata = Output(new Ftq_RF_Components) 410*c5c5edaeSJenius val ifuPtrPlus1_rdata = Output(new Ftq_RF_Components) 411*c5c5edaeSJenius val commPtr_rdata = Output(new Ftq_RF_Components) 412*c5c5edaeSJenius 413*c5c5edaeSJenius val other_raddrs = Input(Vec(numOtherReads, UInt(log2Ceil(FtqSize).W))) 414*c5c5edaeSJenius val other_rdatas = Output(Vec(numOtherReads, new Ftq_RF_Components)) 415*c5c5edaeSJenius 416*c5c5edaeSJenius val wen = Input(Bool()) 417*c5c5edaeSJenius val waddr = Input(UInt(log2Ceil(FtqSize).W)) 418*c5c5edaeSJenius val wdata = Input(new Ftq_RF_Components) 419*c5c5edaeSJenius }) 420*c5c5edaeSJenius 421*c5c5edaeSJenius val num_pc_read = numOtherReads + 3 422*c5c5edaeSJenius val mem = Module(new SyncDataModuleTemplate(new Ftq_RF_Components, FtqSize, 423*c5c5edaeSJenius num_pc_read, 1, "FtqPC", concatData=false, Some(Seq.tabulate(num_pc_read)(i => false)))) 424*c5c5edaeSJenius mem.io.wen(0) := io.wen 425*c5c5edaeSJenius mem.io.waddr(0) := io.waddr 426*c5c5edaeSJenius mem.io.wdata(0) := io.wdata 427*c5c5edaeSJenius 428*c5c5edaeSJenius // read one cycle ahead for ftq loacl reads 429*c5c5edaeSJenius val raddr_vec = VecInit(io.other_raddrs ++ 430*c5c5edaeSJenius Seq(io.ifuPtr_w.value, io.ifuPtrPlus1_w.value, io.commPtr_w.value)) 431*c5c5edaeSJenius 432*c5c5edaeSJenius mem.io.raddr := raddr_vec 433*c5c5edaeSJenius 434*c5c5edaeSJenius io.other_rdatas := mem.io.rdata.dropRight(3) 435*c5c5edaeSJenius io.ifuPtr_rdata := mem.io.rdata.init.init.last 436*c5c5edaeSJenius io.ifuPtrPlus1_rdata := mem.io.rdata.init.last 437*c5c5edaeSJenius io.commPtr_rdata := mem.io.rdata.last 438*c5c5edaeSJenius} 439*c5c5edaeSJenius 44009c6f1ddSLingrui98class Ftq(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper 441e30430c2SJay with HasBackendRedirectInfo with BPUUtils with HasBPUConst with HasPerfEvents 442e30430c2SJay with HasICacheParameters{ 44309c6f1ddSLingrui98 val io = IO(new Bundle { 44409c6f1ddSLingrui98 val fromBpu = Flipped(new BpuToFtqIO) 44509c6f1ddSLingrui98 val fromIfu = Flipped(new IfuToFtqIO) 44609c6f1ddSLingrui98 val fromBackend = Flipped(new CtrlToFtqIO) 44709c6f1ddSLingrui98 44809c6f1ddSLingrui98 val toBpu = new FtqToBpuIO 44909c6f1ddSLingrui98 val toIfu = new FtqToIfuIO 450*c5c5edaeSJenius val toICache = new FtqToICacheIO 45109c6f1ddSLingrui98 val toBackend = new FtqToCtrlIO 45209c6f1ddSLingrui98 4537052722fSJay val toPrefetch = new FtqPrefechBundle 4547052722fSJay 45509c6f1ddSLingrui98 val bpuInfo = new Bundle { 45609c6f1ddSLingrui98 val bpRight = Output(UInt(XLEN.W)) 45709c6f1ddSLingrui98 val bpWrong = Output(UInt(XLEN.W)) 45809c6f1ddSLingrui98 } 45909c6f1ddSLingrui98 }) 46009c6f1ddSLingrui98 io.bpuInfo := DontCare 46109c6f1ddSLingrui98 4622e1be6e1SSteve Gou val backendRedirect = Wire(Valid(new Redirect)) 4632e1be6e1SSteve Gou val backendRedirectReg = RegNext(backendRedirect) 46409c6f1ddSLingrui98 465df5b4b8eSYinan Xu val stage2Flush = backendRedirect.valid 46609c6f1ddSLingrui98 val backendFlush = stage2Flush || RegNext(stage2Flush) 46709c6f1ddSLingrui98 val ifuFlush = Wire(Bool()) 46809c6f1ddSLingrui98 46909c6f1ddSLingrui98 val flush = stage2Flush || RegNext(stage2Flush) 47009c6f1ddSLingrui98 47109c6f1ddSLingrui98 val allowBpuIn, allowToIfu = WireInit(false.B) 47209c6f1ddSLingrui98 val flushToIfu = !allowToIfu 473df5b4b8eSYinan Xu allowBpuIn := !ifuFlush && !backendRedirect.valid && !backendRedirectReg.valid 474df5b4b8eSYinan Xu allowToIfu := !ifuFlush && !backendRedirect.valid && !backendRedirectReg.valid 47509c6f1ddSLingrui98 476e30430c2SJay val bpuPtr, ifuPtr, ifuWbPtr, commPtr = RegInit(FtqPtr(false.B, 0.U)) 477c9bc5480SLingrui98 val ifuPtrPlus1 = RegInit(FtqPtr(false.B, 1.U)) 478*c5c5edaeSJenius val ifuPtr_write = WireInit(ifuPtr) 479*c5c5edaeSJenius val ifuPtrPlus1_write = WireInit(ifuPtrPlus1) 480*c5c5edaeSJenius val ifuWbPtr_write = WireInit(ifuWbPtr) 481*c5c5edaeSJenius val commPtr_write = WireInit(commPtr) 482*c5c5edaeSJenius ifuPtr := ifuPtr_write 483*c5c5edaeSJenius ifuPtrPlus1 := ifuPtrPlus1_write 484*c5c5edaeSJenius ifuWbPtr := ifuWbPtr_write 485*c5c5edaeSJenius commPtr := commPtr_write 486*c5c5edaeSJenius 48709c6f1ddSLingrui98 val validEntries = distanceBetween(bpuPtr, commPtr) 48809c6f1ddSLingrui98 48909c6f1ddSLingrui98 // ********************************************************************** 49009c6f1ddSLingrui98 // **************************** enq from bpu **************************** 49109c6f1ddSLingrui98 // ********************************************************************** 49209c6f1ddSLingrui98 val new_entry_ready = validEntries < FtqSize.U 49309c6f1ddSLingrui98 io.fromBpu.resp.ready := new_entry_ready 49409c6f1ddSLingrui98 49509c6f1ddSLingrui98 val bpu_s2_resp = io.fromBpu.resp.bits.s2 496cb4f77ceSLingrui98 val bpu_s3_resp = io.fromBpu.resp.bits.s3 49709c6f1ddSLingrui98 val bpu_s2_redirect = bpu_s2_resp.valid && bpu_s2_resp.hasRedirect 498cb4f77ceSLingrui98 val bpu_s3_redirect = bpu_s3_resp.valid && bpu_s3_resp.hasRedirect 49909c6f1ddSLingrui98 50009c6f1ddSLingrui98 io.toBpu.enq_ptr := bpuPtr 50109c6f1ddSLingrui98 val enq_fire = io.fromBpu.resp.fire() && allowBpuIn // from bpu s1 502cb4f77ceSLingrui98 val bpu_in_fire = (io.fromBpu.resp.fire() || bpu_s2_redirect || bpu_s3_redirect) && allowBpuIn 50309c6f1ddSLingrui98 504b37e4b45SLingrui98 val bpu_in_resp = io.fromBpu.resp.bits.selectedResp 505b37e4b45SLingrui98 val bpu_in_stage = io.fromBpu.resp.bits.selectedRespIdx 50609c6f1ddSLingrui98 val bpu_in_resp_ptr = Mux(bpu_in_stage === BP_S1, bpuPtr, bpu_in_resp.ftq_idx) 50709c6f1ddSLingrui98 val bpu_in_resp_idx = bpu_in_resp_ptr.value 50809c6f1ddSLingrui98 509*c5c5edaeSJenius // read ports: jumpPc + redirects + loadPred + robFlush + ifuReq1 + ifuReq2 + commitUpdate 510*c5c5edaeSJenius val num_pc_read = 1+numRedirectPcRead+2+1+1+1 511*c5c5edaeSJenius val num_backend_read = 1+numRedirectPcRead+2 512*c5c5edaeSJenius val ftq_pc_mem = Module(new FtqPcMemWrapper(num_backend_read)) 513*c5c5edaeSJenius ftq_pc_mem.io.wen := bpu_in_fire 514*c5c5edaeSJenius ftq_pc_mem.io.waddr := bpu_in_resp_idx 515*c5c5edaeSJenius ftq_pc_mem.io.wdata.fromBranchPrediction(bpu_in_resp) 51609c6f1ddSLingrui98 51709c6f1ddSLingrui98 // ifuRedirect + backendRedirect + commit 51809c6f1ddSLingrui98 val ftq_redirect_sram = Module(new FtqNRSRAM(new Ftq_Redirect_SRAMEntry, 1+1+1)) 51909c6f1ddSLingrui98 // these info is intended to enq at the last stage of bpu 52009c6f1ddSLingrui98 ftq_redirect_sram.io.wen := io.fromBpu.resp.bits.lastStage.valid 52109c6f1ddSLingrui98 ftq_redirect_sram.io.waddr := io.fromBpu.resp.bits.lastStage.ftq_idx.value 52209c6f1ddSLingrui98 ftq_redirect_sram.io.wdata.fromBranchPrediction(io.fromBpu.resp.bits.lastStage) 52349cbc998SLingrui98 println(f"ftq redirect SRAM: entry ${ftq_redirect_sram.io.wdata.getWidth} * ${FtqSize} * 3") 52449cbc998SLingrui98 println(f"ftq redirect SRAM: ahead fh ${ftq_redirect_sram.io.wdata.afhob.getWidth} * ${FtqSize} * 3") 52509c6f1ddSLingrui98 52609c6f1ddSLingrui98 val ftq_meta_1r_sram = Module(new FtqNRSRAM(new Ftq_1R_SRAMEntry, 1)) 52709c6f1ddSLingrui98 // these info is intended to enq at the last stage of bpu 52809c6f1ddSLingrui98 ftq_meta_1r_sram.io.wen := io.fromBpu.resp.bits.lastStage.valid 52909c6f1ddSLingrui98 ftq_meta_1r_sram.io.waddr := io.fromBpu.resp.bits.lastStage.ftq_idx.value 53009c6f1ddSLingrui98 ftq_meta_1r_sram.io.wdata.meta := io.fromBpu.resp.bits.meta 53109c6f1ddSLingrui98 // ifuRedirect + backendRedirect + commit 53209c6f1ddSLingrui98 val ftb_entry_mem = Module(new SyncDataModuleTemplate(new FTBEntry, FtqSize, 1+1+1, 1)) 53309c6f1ddSLingrui98 ftb_entry_mem.io.wen(0) := io.fromBpu.resp.bits.lastStage.valid 53409c6f1ddSLingrui98 ftb_entry_mem.io.waddr(0) := io.fromBpu.resp.bits.lastStage.ftq_idx.value 53509c6f1ddSLingrui98 ftb_entry_mem.io.wdata(0) := io.fromBpu.resp.bits.lastStage.ftb_entry 53609c6f1ddSLingrui98 53709c6f1ddSLingrui98 53809c6f1ddSLingrui98 // multi-write 539b37e4b45SLingrui98 val update_target = Reg(Vec(FtqSize, UInt(VAddrBits.W))) // could be taken target or fallThrough 54009c6f1ddSLingrui98 val cfiIndex_vec = Reg(Vec(FtqSize, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))) 54109c6f1ddSLingrui98 val mispredict_vec = Reg(Vec(FtqSize, Vec(PredictWidth, Bool()))) 54209c6f1ddSLingrui98 val pred_stage = Reg(Vec(FtqSize, UInt(2.W))) 54309c6f1ddSLingrui98 54409c6f1ddSLingrui98 val c_invalid :: c_valid :: c_commited :: Nil = Enum(3) 54509c6f1ddSLingrui98 val commitStateQueue = RegInit(VecInit(Seq.fill(FtqSize) { 54609c6f1ddSLingrui98 VecInit(Seq.fill(PredictWidth)(c_invalid)) 54709c6f1ddSLingrui98 })) 54809c6f1ddSLingrui98 54909c6f1ddSLingrui98 val f_to_send :: f_sent :: Nil = Enum(2) 55009c6f1ddSLingrui98 val entry_fetch_status = RegInit(VecInit(Seq.fill(FtqSize)(f_sent))) 55109c6f1ddSLingrui98 55209c6f1ddSLingrui98 val h_not_hit :: h_false_hit :: h_hit :: Nil = Enum(3) 55309c6f1ddSLingrui98 val entry_hit_status = RegInit(VecInit(Seq.fill(FtqSize)(h_not_hit))) 55409c6f1ddSLingrui98 555f63797a4SLingrui98 // modify registers one cycle later to cut critical path 556f63797a4SLingrui98 val last_cycle_bpu_in = RegNext(bpu_in_fire) 557f63797a4SLingrui98 val last_cycle_bpu_in_idx = RegNext(bpu_in_resp_idx) 558f63797a4SLingrui98 val last_cycle_update_target = RegNext(bpu_in_resp.getTarget) 559f63797a4SLingrui98 val last_cycle_cfiIndex = RegNext(bpu_in_resp.cfiIndex) 560f63797a4SLingrui98 val last_cycle_bpu_in_stage = RegNext(bpu_in_stage) 561f63797a4SLingrui98 when (last_cycle_bpu_in) { 562f63797a4SLingrui98 entry_fetch_status(last_cycle_bpu_in_idx) := f_to_send 563f63797a4SLingrui98 commitStateQueue(last_cycle_bpu_in_idx) := VecInit(Seq.fill(PredictWidth)(c_invalid)) 564f63797a4SLingrui98 cfiIndex_vec(last_cycle_bpu_in_idx) := last_cycle_cfiIndex 565f63797a4SLingrui98 mispredict_vec(last_cycle_bpu_in_idx) := WireInit(VecInit(Seq.fill(PredictWidth)(false.B))) 566f63797a4SLingrui98 update_target(last_cycle_bpu_in_idx) := last_cycle_update_target 567f63797a4SLingrui98 pred_stage(last_cycle_bpu_in_idx) := last_cycle_bpu_in_stage 56809c6f1ddSLingrui98 } 56909c6f1ddSLingrui98 570f63797a4SLingrui98 57109c6f1ddSLingrui98 bpuPtr := bpuPtr + enq_fire 572c9bc5480SLingrui98 when (io.toIfu.req.fire && allowToIfu) { 573*c5c5edaeSJenius ifuPtr_write := ifuPtrPlus1 574*c5c5edaeSJenius ifuPtrPlus1_write := ifuPtrPlus1 + 1.U 575c9bc5480SLingrui98 } 57609c6f1ddSLingrui98 57709c6f1ddSLingrui98 // only use ftb result to assign hit status 57809c6f1ddSLingrui98 when (bpu_s2_resp.valid) { 579b37e4b45SLingrui98 entry_hit_status(bpu_s2_resp.ftq_idx.value) := Mux(bpu_s2_resp.full_pred.hit, h_hit, h_not_hit) 58009c6f1ddSLingrui98 } 58109c6f1ddSLingrui98 58209c6f1ddSLingrui98 5832f4a3aa4SLingrui98 io.toIfu.flushFromBpu.s2.valid := bpu_s2_redirect 58409c6f1ddSLingrui98 io.toIfu.flushFromBpu.s2.bits := bpu_s2_resp.ftq_idx 58509c6f1ddSLingrui98 when (bpu_s2_resp.valid && bpu_s2_resp.hasRedirect) { 58609c6f1ddSLingrui98 bpuPtr := bpu_s2_resp.ftq_idx + 1.U 58709c6f1ddSLingrui98 // only when ifuPtr runs ahead of bpu s2 resp should we recover it 58809c6f1ddSLingrui98 when (!isBefore(ifuPtr, bpu_s2_resp.ftq_idx)) { 589*c5c5edaeSJenius ifuPtr_write := bpu_s2_resp.ftq_idx 590*c5c5edaeSJenius ifuPtrPlus1_write := bpu_s2_resp.ftq_idx + 1.U 59109c6f1ddSLingrui98 } 59209c6f1ddSLingrui98 } 59309c6f1ddSLingrui98 594cb4f77ceSLingrui98 io.toIfu.flushFromBpu.s3.valid := bpu_s3_redirect 595cb4f77ceSLingrui98 io.toIfu.flushFromBpu.s3.bits := bpu_s3_resp.ftq_idx 596cb4f77ceSLingrui98 when (bpu_s3_resp.valid && bpu_s3_resp.hasRedirect) { 597cb4f77ceSLingrui98 bpuPtr := bpu_s3_resp.ftq_idx + 1.U 598cb4f77ceSLingrui98 // only when ifuPtr runs ahead of bpu s2 resp should we recover it 599cb4f77ceSLingrui98 when (!isBefore(ifuPtr, bpu_s3_resp.ftq_idx)) { 600*c5c5edaeSJenius ifuPtr_write := bpu_s3_resp.ftq_idx 601*c5c5edaeSJenius ifuPtrPlus1_write := bpu_s3_resp.ftq_idx + 1.U 602cb4f77ceSLingrui98 } 603cb4f77ceSLingrui98 } 604cb4f77ceSLingrui98 60509c6f1ddSLingrui98 XSError(isBefore(bpuPtr, ifuPtr) && !isFull(bpuPtr, ifuPtr), "\nifuPtr is before bpuPtr!\n") 60609c6f1ddSLingrui98 60709c6f1ddSLingrui98 // **************************************************************** 60809c6f1ddSLingrui98 // **************************** to ifu **************************** 60909c6f1ddSLingrui98 // **************************************************************** 610*c5c5edaeSJenius val bpu_in_bypass_buf = RegEnable(ftq_pc_mem.io.wdata, enable=bpu_in_fire) 61109c6f1ddSLingrui98 val bpu_in_bypass_ptr = RegNext(bpu_in_resp_ptr) 61209c6f1ddSLingrui98 val last_cycle_to_ifu_fire = RegNext(io.toIfu.req.fire) 61309c6f1ddSLingrui98 61409c6f1ddSLingrui98 // read pc and target 615*c5c5edaeSJenius // ftq_pc_mem.io.raddr.init.init.last := ifuPtr.value 616*c5c5edaeSJenius // ftq_pc_mem.io.raddr.init.last := ifuPtrPlus1.value 617*c5c5edaeSJenius 618*c5c5edaeSJenius //TODO: ftq_pc_mem read addr conncetion 619*c5c5edaeSJenius ftq_pc_mem.io.ifuPtr_w := DontCare 620*c5c5edaeSJenius ftq_pc_mem.io.ifuPtrPlus1_w := DontCare 621*c5c5edaeSJenius ftq_pc_mem.io.commPtr_w := DontCare 622*c5c5edaeSJenius 623*c5c5edaeSJenius dontTouch(ftq_pc_mem.io.ifuPtr_w) 624*c5c5edaeSJenius dontTouch(ftq_pc_mem.io.ifuPtrPlus1_w) 625*c5c5edaeSJenius dontTouch(ftq_pc_mem.io.commPtr_w) 62609c6f1ddSLingrui98 6275ff19bd8SLingrui98 io.toIfu.req.bits.ftqIdx := ifuPtr 628f63797a4SLingrui98 629*c5c5edaeSJenius val toICachePcBundle = Wire(new Ftq_RF_Components) 630b37e4b45SLingrui98 val toIfuPcBundle = Wire(new Ftq_RF_Components) 631f63797a4SLingrui98 val entry_is_to_send = WireInit(entry_fetch_status(ifuPtr.value) === f_to_send) 632f63797a4SLingrui98 val entry_next_addr = WireInit(update_target(ifuPtr.value)) 633f63797a4SLingrui98 val entry_ftq_offset = WireInit(cfiIndex_vec(ifuPtr.value)) 634f63797a4SLingrui98 6357052722fSJay 63609c6f1ddSLingrui98 when (last_cycle_bpu_in && bpu_in_bypass_ptr === ifuPtr) { 637b37e4b45SLingrui98 toIfuPcBundle := bpu_in_bypass_buf 638*c5c5edaeSJenius //this may become timing critical path 639*c5c5edaeSJenius toICachePcBundle := ftq_pc_mem.io.wdata 640f678dd91SSteve Gou entry_is_to_send := true.B 641f63797a4SLingrui98 entry_next_addr := last_cycle_update_target 642f63797a4SLingrui98 entry_ftq_offset := last_cycle_cfiIndex 64309c6f1ddSLingrui98 }.elsewhen (last_cycle_to_ifu_fire) { 644*c5c5edaeSJenius<<<<<<< HEAD 645*c5c5edaeSJenius<<<<<<< HEAD 646b37e4b45SLingrui98 toIfuPcBundle := ftq_pc_mem.io.rdata.init.last 647c9bc5480SLingrui98 entry_is_to_send := RegNext(entry_fetch_status(ifuPtrPlus1.value) === f_to_send) || 648c9bc5480SLingrui98 RegNext(last_cycle_bpu_in && bpu_in_bypass_ptr === (ifuPtrPlus1)) // reduce potential bubbles 64909c6f1ddSLingrui98 }.otherwise { 650b37e4b45SLingrui98 toIfuPcBundle := ftq_pc_mem.io.rdata.init.init.last 651*c5c5edaeSJenius======= 652*c5c5edaeSJenius toIfuPcBundle := RegNext(ftq_pc_mem.io.ifuPtrPlus1_rdata) 653*c5c5edaeSJenius toICachePcBundle := ftq_pc_mem.io.ifuPtrPlus1_rdata 654*c5c5edaeSJenius entry_is_to_send := RegNext(entry_fetch_status(ifuPtrPlus1.value) === f_to_send) || 655*c5c5edaeSJenius RegNext(last_cycle_bpu_in && bpu_in_bypass_ptr === (ifuPtrPlus1)) // reduce potential bubbles 656*c5c5edaeSJenius }.otherwise { 657*c5c5edaeSJenius toIfuPcBundle := RegNext(ftq_pc_mem.io.ifuPtr_rdata) 658*c5c5edaeSJenius toICachePcBundle := ftq_pc_mem.io.ifuPtr_rdata 659*c5c5edaeSJenius>>>>>>> 975c3e219 (RegNext ICache) 660*c5c5edaeSJenius======= 661*c5c5edaeSJenius toIfuPcBundle := RegNext(ftq_pc_mem.io.ifuPtrPlus1_rdata) 662*c5c5edaeSJenius entry_is_to_send := RegNext(entry_fetch_status(ifuPtrPlus1.value) === f_to_send) || 663*c5c5edaeSJenius RegNext(last_cycle_bpu_in && bpu_in_bypass_ptr === (ifuPtrPlus1)) // reduce potential bubbles 664*c5c5edaeSJenius }.otherwise { 665*c5c5edaeSJenius toIfuPcBundle := RegNext(ftq_pc_mem.io.ifuPtr_rdata) 666*c5c5edaeSJenius>>>>>>> f8ca2f16d ([WIP]ftq: read ftq_pc_mem one cycle ahead, reqs to be copied) 667f678dd91SSteve Gou entry_is_to_send := RegNext(entry_fetch_status(ifuPtr.value) === f_to_send) 66809c6f1ddSLingrui98 } 66909c6f1ddSLingrui98 670f678dd91SSteve Gou io.toIfu.req.valid := entry_is_to_send && ifuPtr =/= bpuPtr 671f63797a4SLingrui98 io.toIfu.req.bits.nextStartAddr := entry_next_addr 672f63797a4SLingrui98 io.toIfu.req.bits.ftqOffset := entry_ftq_offset 673b37e4b45SLingrui98 io.toIfu.req.bits.fromFtqPcBundle(toIfuPcBundle) 674*c5c5edaeSJenius<<<<<<< HEAD 675b37e4b45SLingrui98 676*c5c5edaeSJenius======= 677*c5c5edaeSJenius 678*c5c5edaeSJenius io.toICache.req.valid := entry_is_to_send && ifuPtr =/= bpuPtr 679*c5c5edaeSJenius io.toICache.req.bits.fromFtqPcBundle(toICachePcBundle) 680*c5c5edaeSJenius>>>>>>> 3d9e78c83 ([WIP]FTQ: add icache req port) 68109c6f1ddSLingrui98 // when fall through is smaller in value than start address, there must be a false hit 682b37e4b45SLingrui98 when (toIfuPcBundle.fallThruError && entry_hit_status(ifuPtr.value) === h_hit) { 68309c6f1ddSLingrui98 when (io.toIfu.req.fire && 684cb4f77ceSLingrui98 !(bpu_s2_redirect && bpu_s2_resp.ftq_idx === ifuPtr) && 685cb4f77ceSLingrui98 !(bpu_s3_redirect && bpu_s3_resp.ftq_idx === ifuPtr) 68609c6f1ddSLingrui98 ) { 68709c6f1ddSLingrui98 entry_hit_status(ifuPtr.value) := h_false_hit 688352db50aSLingrui98 // XSError(true.B, "FTB false hit by fallThroughError, startAddr: %x, fallTHru: %x\n", io.toIfu.req.bits.startAddr, io.toIfu.req.bits.nextStartAddr) 68909c6f1ddSLingrui98 } 690b37e4b45SLingrui98 XSDebug(true.B, "fallThruError! start:%x, fallThru:%x\n", io.toIfu.req.bits.startAddr, io.toIfu.req.bits.nextStartAddr) 69109c6f1ddSLingrui98 } 69209c6f1ddSLingrui98 693a60a2901SLingrui98 XSPerfAccumulate(f"fall_through_error_to_ifu", toIfuPcBundle.fallThruError && entry_hit_status(ifuPtr.value) === h_hit && 694a60a2901SLingrui98 io.toIfu.req.fire && !(bpu_s2_redirect && bpu_s2_resp.ftq_idx === ifuPtr) && !(bpu_s3_redirect && bpu_s3_resp.ftq_idx === ifuPtr)) 695a60a2901SLingrui98 69609c6f1ddSLingrui98 val ifu_req_should_be_flushed = 697cb4f77ceSLingrui98 io.toIfu.flushFromBpu.shouldFlushByStage2(io.toIfu.req.bits.ftqIdx) || 698cb4f77ceSLingrui98 io.toIfu.flushFromBpu.shouldFlushByStage3(io.toIfu.req.bits.ftqIdx) 69909c6f1ddSLingrui98 70009c6f1ddSLingrui98 when (io.toIfu.req.fire && !ifu_req_should_be_flushed) { 70109c6f1ddSLingrui98 entry_fetch_status(ifuPtr.value) := f_sent 70209c6f1ddSLingrui98 } 70309c6f1ddSLingrui98 70409c6f1ddSLingrui98 // ********************************************************************* 70509c6f1ddSLingrui98 // **************************** wb from ifu **************************** 70609c6f1ddSLingrui98 // ********************************************************************* 70709c6f1ddSLingrui98 val pdWb = io.fromIfu.pdWb 70809c6f1ddSLingrui98 val pds = pdWb.bits.pd 70909c6f1ddSLingrui98 val ifu_wb_valid = pdWb.valid 71009c6f1ddSLingrui98 val ifu_wb_idx = pdWb.bits.ftqIdx.value 71109c6f1ddSLingrui98 // read ports: commit update 71209c6f1ddSLingrui98 val ftq_pd_mem = Module(new SyncDataModuleTemplate(new Ftq_pd_Entry, FtqSize, 1, 1)) 71309c6f1ddSLingrui98 ftq_pd_mem.io.wen(0) := ifu_wb_valid 71409c6f1ddSLingrui98 ftq_pd_mem.io.waddr(0) := pdWb.bits.ftqIdx.value 71509c6f1ddSLingrui98 ftq_pd_mem.io.wdata(0).fromPdWb(pdWb.bits) 71609c6f1ddSLingrui98 71709c6f1ddSLingrui98 val hit_pd_valid = entry_hit_status(ifu_wb_idx) === h_hit && ifu_wb_valid 71809c6f1ddSLingrui98 val hit_pd_mispred = hit_pd_valid && pdWb.bits.misOffset.valid 71909c6f1ddSLingrui98 val hit_pd_mispred_reg = RegNext(hit_pd_mispred, init=false.B) 720005e809bSJiuyang Liu val pd_reg = RegEnable(pds, pdWb.valid) 721005e809bSJiuyang Liu val start_pc_reg = RegEnable(pdWb.bits.pc(0), pdWb.valid) 722005e809bSJiuyang Liu val wb_idx_reg = RegEnable(ifu_wb_idx, pdWb.valid) 72309c6f1ddSLingrui98 72409c6f1ddSLingrui98 when (ifu_wb_valid) { 72509c6f1ddSLingrui98 val comm_stq_wen = VecInit(pds.map(_.valid).zip(pdWb.bits.instrRange).map{ 72609c6f1ddSLingrui98 case (v, inRange) => v && inRange 72709c6f1ddSLingrui98 }) 72809c6f1ddSLingrui98 (commitStateQueue(ifu_wb_idx) zip comm_stq_wen).map{ 72909c6f1ddSLingrui98 case (qe, v) => when (v) { qe := c_valid } 73009c6f1ddSLingrui98 } 73109c6f1ddSLingrui98 } 73209c6f1ddSLingrui98 733*c5c5edaeSJenius when (ifu_wb_valid) { 734*c5c5edaeSJenius ifuWbPtr_write := ifuWbPtr + 1.U 735*c5c5edaeSJenius } 73609c6f1ddSLingrui98 73709c6f1ddSLingrui98 ftb_entry_mem.io.raddr.head := ifu_wb_idx 73809c6f1ddSLingrui98 val has_false_hit = WireInit(false.B) 73909c6f1ddSLingrui98 when (RegNext(hit_pd_valid)) { 74009c6f1ddSLingrui98 // check for false hit 74109c6f1ddSLingrui98 val pred_ftb_entry = ftb_entry_mem.io.rdata.head 742eeb5ff92SLingrui98 val brSlots = pred_ftb_entry.brSlots 743eeb5ff92SLingrui98 val tailSlot = pred_ftb_entry.tailSlot 74409c6f1ddSLingrui98 // we check cfis that bpu predicted 74509c6f1ddSLingrui98 746eeb5ff92SLingrui98 // bpu predicted branches but denied by predecode 747eeb5ff92SLingrui98 val br_false_hit = 748eeb5ff92SLingrui98 brSlots.map{ 749eeb5ff92SLingrui98 s => s.valid && !(pd_reg(s.offset).valid && pd_reg(s.offset).isBr) 750eeb5ff92SLingrui98 }.reduce(_||_) || 751b37e4b45SLingrui98 (tailSlot.valid && pred_ftb_entry.tailSlot.sharing && 752eeb5ff92SLingrui98 !(pd_reg(tailSlot.offset).valid && pd_reg(tailSlot.offset).isBr)) 753eeb5ff92SLingrui98 754eeb5ff92SLingrui98 val jmpOffset = tailSlot.offset 75509c6f1ddSLingrui98 val jmp_pd = pd_reg(jmpOffset) 75609c6f1ddSLingrui98 val jal_false_hit = pred_ftb_entry.jmpValid && 75709c6f1ddSLingrui98 ((pred_ftb_entry.isJal && !(jmp_pd.valid && jmp_pd.isJal)) || 75809c6f1ddSLingrui98 (pred_ftb_entry.isJalr && !(jmp_pd.valid && jmp_pd.isJalr)) || 75909c6f1ddSLingrui98 (pred_ftb_entry.isCall && !(jmp_pd.valid && jmp_pd.isCall)) || 76009c6f1ddSLingrui98 (pred_ftb_entry.isRet && !(jmp_pd.valid && jmp_pd.isRet)) 76109c6f1ddSLingrui98 ) 76209c6f1ddSLingrui98 76309c6f1ddSLingrui98 has_false_hit := br_false_hit || jal_false_hit || hit_pd_mispred_reg 76465fddcf0Szoujr XSDebug(has_false_hit, "FTB false hit by br or jal or hit_pd, startAddr: %x\n", pdWb.bits.pc(0)) 76565fddcf0Szoujr 766352db50aSLingrui98 // assert(!has_false_hit) 76709c6f1ddSLingrui98 } 76809c6f1ddSLingrui98 76909c6f1ddSLingrui98 when (has_false_hit) { 77009c6f1ddSLingrui98 entry_hit_status(wb_idx_reg) := h_false_hit 77109c6f1ddSLingrui98 } 77209c6f1ddSLingrui98 77309c6f1ddSLingrui98 77409c6f1ddSLingrui98 // ********************************************************************** 775b56f947eSYinan Xu // ***************************** to backend ***************************** 77609c6f1ddSLingrui98 // ********************************************************************** 777*c5c5edaeSJenius<<<<<<< HEAD 778b56f947eSYinan Xu // to backend pc mem / target 779b56f947eSYinan Xu io.toBackend.pc_mem_wen := RegNext(last_cycle_bpu_in) 780b56f947eSYinan Xu io.toBackend.pc_mem_waddr := RegNext(last_cycle_bpu_in_idx) 781b56f947eSYinan Xu io.toBackend.pc_mem_wdata := RegNext(bpu_in_bypass_buf) 782b56f947eSYinan Xu io.toBackend.target := RegNext(last_cycle_update_target) 783*c5c5edaeSJenius======= 784*c5c5edaeSJenius 785*c5c5edaeSJenius // pc reads 786*c5c5edaeSJenius for ((req, i) <- io.toBackend.pc_reads.zipWithIndex) { 787*c5c5edaeSJenius ftq_pc_mem.io.other_raddrs(i) := req.ptr.value 788*c5c5edaeSJenius req.data := ftq_pc_mem.io.other_rdatas(i).getPc(RegNext(req.offset)) 789*c5c5edaeSJenius } 790*c5c5edaeSJenius // target read 791*c5c5edaeSJenius io.toBackend.target_read.data := RegNext(update_target(io.toBackend.target_read.ptr.value)) 792*c5c5edaeSJenius>>>>>>> 8973c7ae8 ([WIP]ftq: read ftq_pc_mem one cycle ahead, reqs to be copied) 79309c6f1ddSLingrui98 79409c6f1ddSLingrui98 // ******************************************************************************* 79509c6f1ddSLingrui98 // **************************** redirect from backend **************************** 79609c6f1ddSLingrui98 // ******************************************************************************* 79709c6f1ddSLingrui98 79809c6f1ddSLingrui98 // redirect read cfiInfo, couples to redirectGen s2 7992e1be6e1SSteve Gou ftq_redirect_sram.io.ren.init.last := backendRedirect.valid 8002e1be6e1SSteve Gou ftq_redirect_sram.io.raddr.init.last := backendRedirect.bits.ftqIdx.value 80109c6f1ddSLingrui98 8022e1be6e1SSteve Gou ftb_entry_mem.io.raddr.init.last := backendRedirect.bits.ftqIdx.value 80309c6f1ddSLingrui98 80409c6f1ddSLingrui98 val stage3CfiInfo = ftq_redirect_sram.io.rdata.init.last 805df5b4b8eSYinan Xu val fromBackendRedirect = WireInit(backendRedirectReg) 80609c6f1ddSLingrui98 val backendRedirectCfi = fromBackendRedirect.bits.cfiUpdate 80709c6f1ddSLingrui98 backendRedirectCfi.fromFtqRedirectSram(stage3CfiInfo) 80809c6f1ddSLingrui98 80909c6f1ddSLingrui98 val r_ftb_entry = ftb_entry_mem.io.rdata.init.last 81009c6f1ddSLingrui98 val r_ftqOffset = fromBackendRedirect.bits.ftqOffset 81109c6f1ddSLingrui98 81209c6f1ddSLingrui98 when (entry_hit_status(fromBackendRedirect.bits.ftqIdx.value) === h_hit) { 81309c6f1ddSLingrui98 backendRedirectCfi.shift := PopCount(r_ftb_entry.getBrMaskByOffset(r_ftqOffset)) +& 81409c6f1ddSLingrui98 (backendRedirectCfi.pd.isBr && !r_ftb_entry.brIsSaved(r_ftqOffset) && 815eeb5ff92SLingrui98 !r_ftb_entry.newBrCanNotInsert(r_ftqOffset)) 81609c6f1ddSLingrui98 81709c6f1ddSLingrui98 backendRedirectCfi.addIntoHist := backendRedirectCfi.pd.isBr && (r_ftb_entry.brIsSaved(r_ftqOffset) || 818eeb5ff92SLingrui98 !r_ftb_entry.newBrCanNotInsert(r_ftqOffset)) 81909c6f1ddSLingrui98 }.otherwise { 82009c6f1ddSLingrui98 backendRedirectCfi.shift := (backendRedirectCfi.pd.isBr && backendRedirectCfi.taken).asUInt 82109c6f1ddSLingrui98 backendRedirectCfi.addIntoHist := backendRedirectCfi.pd.isBr.asUInt 82209c6f1ddSLingrui98 } 82309c6f1ddSLingrui98 82409c6f1ddSLingrui98 82509c6f1ddSLingrui98 // *************************************************************************** 82609c6f1ddSLingrui98 // **************************** redirect from ifu **************************** 82709c6f1ddSLingrui98 // *************************************************************************** 82809c6f1ddSLingrui98 val fromIfuRedirect = WireInit(0.U.asTypeOf(Valid(new Redirect))) 82909c6f1ddSLingrui98 fromIfuRedirect.valid := pdWb.valid && pdWb.bits.misOffset.valid && !backendFlush 83009c6f1ddSLingrui98 fromIfuRedirect.bits.ftqIdx := pdWb.bits.ftqIdx 83109c6f1ddSLingrui98 fromIfuRedirect.bits.ftqOffset := pdWb.bits.misOffset.bits 83209c6f1ddSLingrui98 fromIfuRedirect.bits.level := RedirectLevel.flushAfter 83309c6f1ddSLingrui98 83409c6f1ddSLingrui98 val ifuRedirectCfiUpdate = fromIfuRedirect.bits.cfiUpdate 83509c6f1ddSLingrui98 ifuRedirectCfiUpdate.pc := pdWb.bits.pc(pdWb.bits.misOffset.bits) 83609c6f1ddSLingrui98 ifuRedirectCfiUpdate.pd := pdWb.bits.pd(pdWb.bits.misOffset.bits) 83709c6f1ddSLingrui98 ifuRedirectCfiUpdate.predTaken := cfiIndex_vec(pdWb.bits.ftqIdx.value).valid 83809c6f1ddSLingrui98 ifuRedirectCfiUpdate.target := pdWb.bits.target 83909c6f1ddSLingrui98 ifuRedirectCfiUpdate.taken := pdWb.bits.cfiOffset.valid 84009c6f1ddSLingrui98 ifuRedirectCfiUpdate.isMisPred := pdWb.bits.misOffset.valid 84109c6f1ddSLingrui98 84209c6f1ddSLingrui98 val ifuRedirectReg = RegNext(fromIfuRedirect, init=0.U.asTypeOf(Valid(new Redirect))) 84309c6f1ddSLingrui98 val ifuRedirectToBpu = WireInit(ifuRedirectReg) 84409c6f1ddSLingrui98 ifuFlush := fromIfuRedirect.valid || ifuRedirectToBpu.valid 84509c6f1ddSLingrui98 84609c6f1ddSLingrui98 ftq_redirect_sram.io.ren.head := fromIfuRedirect.valid 84709c6f1ddSLingrui98 ftq_redirect_sram.io.raddr.head := fromIfuRedirect.bits.ftqIdx.value 84809c6f1ddSLingrui98 84909c6f1ddSLingrui98 ftb_entry_mem.io.raddr.head := fromIfuRedirect.bits.ftqIdx.value 85009c6f1ddSLingrui98 85109c6f1ddSLingrui98 val toBpuCfi = ifuRedirectToBpu.bits.cfiUpdate 85209c6f1ddSLingrui98 toBpuCfi.fromFtqRedirectSram(ftq_redirect_sram.io.rdata.head) 85309c6f1ddSLingrui98 when (ifuRedirectReg.bits.cfiUpdate.pd.isRet) { 85409c6f1ddSLingrui98 toBpuCfi.target := toBpuCfi.rasEntry.retAddr 85509c6f1ddSLingrui98 } 85609c6f1ddSLingrui98 85709c6f1ddSLingrui98 // ********************************************************************* 85809c6f1ddSLingrui98 // **************************** wb from exu **************************** 85909c6f1ddSLingrui98 // ********************************************************************* 86009c6f1ddSLingrui98 861b56f947eSYinan Xu backendRedirect := io.fromBackend.redirect 8622e1be6e1SSteve Gou 86309c6f1ddSLingrui98 def extractRedirectInfo(wb: Valid[Redirect]) = { 86409c6f1ddSLingrui98 val ftqIdx = wb.bits.ftqIdx.value 86509c6f1ddSLingrui98 val ftqOffset = wb.bits.ftqOffset 86609c6f1ddSLingrui98 val taken = wb.bits.cfiUpdate.taken 86709c6f1ddSLingrui98 val mispred = wb.bits.cfiUpdate.isMisPred 86809c6f1ddSLingrui98 (wb.valid, ftqIdx, ftqOffset, taken, mispred) 86909c6f1ddSLingrui98 } 87009c6f1ddSLingrui98 87109c6f1ddSLingrui98 // fix mispredict entry 87209c6f1ddSLingrui98 val lastIsMispredict = RegNext( 873df5b4b8eSYinan Xu backendRedirect.valid && backendRedirect.bits.level === RedirectLevel.flushAfter, init = false.B 87409c6f1ddSLingrui98 ) 87509c6f1ddSLingrui98 87609c6f1ddSLingrui98 def updateCfiInfo(redirect: Valid[Redirect], isBackend: Boolean = true) = { 87709c6f1ddSLingrui98 val (r_valid, r_idx, r_offset, r_taken, r_mispred) = extractRedirectInfo(redirect) 87809c6f1ddSLingrui98 val cfiIndex_bits_wen = r_valid && r_taken && r_offset < cfiIndex_vec(r_idx).bits 87909c6f1ddSLingrui98 val cfiIndex_valid_wen = r_valid && r_offset === cfiIndex_vec(r_idx).bits 88009c6f1ddSLingrui98 when (cfiIndex_bits_wen || cfiIndex_valid_wen) { 88109c6f1ddSLingrui98 cfiIndex_vec(r_idx).valid := cfiIndex_bits_wen || cfiIndex_valid_wen && r_taken 88209c6f1ddSLingrui98 } 88309c6f1ddSLingrui98 when (cfiIndex_bits_wen) { 88409c6f1ddSLingrui98 cfiIndex_vec(r_idx).bits := r_offset 88509c6f1ddSLingrui98 } 88609c6f1ddSLingrui98 update_target(r_idx) := redirect.bits.cfiUpdate.target 88709c6f1ddSLingrui98 if (isBackend) { 88809c6f1ddSLingrui98 mispredict_vec(r_idx)(r_offset) := r_mispred 88909c6f1ddSLingrui98 } 89009c6f1ddSLingrui98 } 89109c6f1ddSLingrui98 892b56f947eSYinan Xu // write to backend target vec 893b56f947eSYinan Xu io.toBackend.pd_redirect_waddr.valid := RegNext(fromIfuRedirect.valid) 894b56f947eSYinan Xu io.toBackend.pd_redirect_waddr.bits := RegNext(fromIfuRedirect.bits.ftqIdx.value) 895b56f947eSYinan Xu io.toBackend.pd_redirect_target := RegNext(fromIfuRedirect.bits.cfiUpdate.target) 896b56f947eSYinan Xu 897700e90abSYinan Xu // write to backend target vec 898700e90abSYinan Xu io.toBackend.pd_redirect_waddr.valid := RegNext(fromIfuRedirect.valid) 899700e90abSYinan Xu io.toBackend.pd_redirect_waddr.bits := RegNext(fromIfuRedirect.bits.ftqIdx.value) 900700e90abSYinan Xu io.toBackend.pd_redirect_target := RegNext(fromIfuRedirect.bits.cfiUpdate.target) 901700e90abSYinan Xu 902df5b4b8eSYinan Xu when(backendRedirectReg.valid && lastIsMispredict) { 903df5b4b8eSYinan Xu updateCfiInfo(backendRedirectReg) 90409c6f1ddSLingrui98 }.elsewhen (ifuRedirectToBpu.valid) { 90509c6f1ddSLingrui98 updateCfiInfo(ifuRedirectToBpu, isBackend=false) 90609c6f1ddSLingrui98 } 90709c6f1ddSLingrui98 90809c6f1ddSLingrui98 // *********************************************************************************** 90909c6f1ddSLingrui98 // **************************** flush ptr and state queue **************************** 91009c6f1ddSLingrui98 // *********************************************************************************** 91109c6f1ddSLingrui98 912df5b4b8eSYinan Xu val redirectVec = VecInit(backendRedirect, fromIfuRedirect) 91309c6f1ddSLingrui98 91409c6f1ddSLingrui98 // when redirect, we should reset ptrs and status queues 91509c6f1ddSLingrui98 when(redirectVec.map(r => r.valid).reduce(_||_)){ 9162f4a3aa4SLingrui98 val r = PriorityMux(redirectVec.map(r => (r.valid -> r.bits))) 91709c6f1ddSLingrui98 val notIfu = redirectVec.dropRight(1).map(r => r.valid).reduce(_||_) 9182f4a3aa4SLingrui98 val (idx, offset, flushItSelf) = (r.ftqIdx, r.ftqOffset, RedirectLevel.flushItself(r.level)) 91909c6f1ddSLingrui98 val next = idx + 1.U 92009c6f1ddSLingrui98 bpuPtr := next 921*c5c5edaeSJenius ifuPtr_write := next 922*c5c5edaeSJenius ifuWbPtr_write := next 923*c5c5edaeSJenius ifuPtrPlus1_write := idx + 2.U 92409c6f1ddSLingrui98 when (notIfu) { 92509c6f1ddSLingrui98 commitStateQueue(idx.value).zipWithIndex.foreach({ case (s, i) => 92609c6f1ddSLingrui98 when(i.U > offset || i.U === offset && flushItSelf){ 92709c6f1ddSLingrui98 s := c_invalid 92809c6f1ddSLingrui98 } 92909c6f1ddSLingrui98 }) 93009c6f1ddSLingrui98 } 93109c6f1ddSLingrui98 } 93209c6f1ddSLingrui98 93309c6f1ddSLingrui98 // only the valid bit is actually needed 934df5b4b8eSYinan Xu io.toIfu.redirect.bits := backendRedirect.bits 93509c6f1ddSLingrui98 io.toIfu.redirect.valid := stage2Flush 93609c6f1ddSLingrui98 93709c6f1ddSLingrui98 // commit 9389aca92b9SYinan Xu for (c <- io.fromBackend.rob_commits) { 93909c6f1ddSLingrui98 when(c.valid) { 94009c6f1ddSLingrui98 commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset) := c_commited 94188825c5cSYinan Xu // TODO: remove this 94288825c5cSYinan Xu // For instruction fusions, we also update the next instruction 943c3abb8b6SYinan Xu when (c.bits.commitType === 4.U) { 94488825c5cSYinan Xu commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset + 1.U) := c_commited 945c3abb8b6SYinan Xu }.elsewhen(c.bits.commitType === 5.U) { 94688825c5cSYinan Xu commitStateQueue(c.bits.ftqIdx.value)(c.bits.ftqOffset + 2.U) := c_commited 947c3abb8b6SYinan Xu }.elsewhen(c.bits.commitType === 6.U) { 94888825c5cSYinan Xu val index = (c.bits.ftqIdx + 1.U).value 94988825c5cSYinan Xu commitStateQueue(index)(0) := c_commited 950c3abb8b6SYinan Xu }.elsewhen(c.bits.commitType === 7.U) { 95188825c5cSYinan Xu val index = (c.bits.ftqIdx + 1.U).value 95288825c5cSYinan Xu commitStateQueue(index)(1) := c_commited 95388825c5cSYinan Xu } 95409c6f1ddSLingrui98 } 95509c6f1ddSLingrui98 } 95609c6f1ddSLingrui98 95709c6f1ddSLingrui98 // **************************************************************** 95809c6f1ddSLingrui98 // **************************** to bpu **************************** 95909c6f1ddSLingrui98 // **************************************************************** 96009c6f1ddSLingrui98 96109c6f1ddSLingrui98 io.toBpu.redirect <> Mux(fromBackendRedirect.valid, fromBackendRedirect, ifuRedirectToBpu) 96209c6f1ddSLingrui98 96302f21c16SLingrui98 val may_have_stall_from_bpu = Wire(Bool()) 96402f21c16SLingrui98 val bpu_ftb_update_stall = RegInit(0.U(2.W)) // 2-cycle stall, so we need 3 states 96502f21c16SLingrui98 may_have_stall_from_bpu := bpu_ftb_update_stall =/= 0.U 9665371700eSzoujr val canCommit = commPtr =/= ifuWbPtr && !may_have_stall_from_bpu && 96709c6f1ddSLingrui98 Cat(commitStateQueue(commPtr.value).map(s => { 96809c6f1ddSLingrui98 s === c_invalid || s === c_commited 96909c6f1ddSLingrui98 })).andR() 97009c6f1ddSLingrui98 97109c6f1ddSLingrui98 // commit reads 972*c5c5edaeSJenius // ftq_pc_mem.io.raddr.last := commPtr.value 973*c5c5edaeSJenius val commit_pc_bundle = RegNext(ftq_pc_mem.io.commPtr_rdata) 97409c6f1ddSLingrui98 ftq_pd_mem.io.raddr.last := commPtr.value 97509c6f1ddSLingrui98 val commit_pd = ftq_pd_mem.io.rdata.last 97609c6f1ddSLingrui98 ftq_redirect_sram.io.ren.last := canCommit 97709c6f1ddSLingrui98 ftq_redirect_sram.io.raddr.last := commPtr.value 97809c6f1ddSLingrui98 val commit_spec_meta = ftq_redirect_sram.io.rdata.last 97909c6f1ddSLingrui98 ftq_meta_1r_sram.io.ren(0) := canCommit 98009c6f1ddSLingrui98 ftq_meta_1r_sram.io.raddr(0) := commPtr.value 98109c6f1ddSLingrui98 val commit_meta = ftq_meta_1r_sram.io.rdata(0) 98209c6f1ddSLingrui98 ftb_entry_mem.io.raddr.last := commPtr.value 98309c6f1ddSLingrui98 val commit_ftb_entry = ftb_entry_mem.io.rdata.last 98409c6f1ddSLingrui98 98509c6f1ddSLingrui98 // need one cycle to read mem and srams 98609c6f1ddSLingrui98 val do_commit_ptr = RegNext(commPtr) 9875371700eSzoujr val do_commit = RegNext(canCommit, init=false.B) 988*c5c5edaeSJenius when (canCommit) { commPtr_write := commPtr + 1.U } 98909c6f1ddSLingrui98 val commit_state = RegNext(commitStateQueue(commPtr.value)) 9905371700eSzoujr val can_commit_cfi = WireInit(cfiIndex_vec(commPtr.value)) 9915371700eSzoujr when (commitStateQueue(commPtr.value)(can_commit_cfi.bits) =/= c_commited) { 9925371700eSzoujr can_commit_cfi.valid := false.B 99309c6f1ddSLingrui98 } 9945371700eSzoujr val commit_cfi = RegNext(can_commit_cfi) 99509c6f1ddSLingrui98 99609c6f1ddSLingrui98 val commit_mispredict = VecInit((RegNext(mispredict_vec(commPtr.value)) zip commit_state).map { 99709c6f1ddSLingrui98 case (mis, state) => mis && state === c_commited 99809c6f1ddSLingrui98 }) 9995371700eSzoujr val can_commit_hit = entry_hit_status(commPtr.value) 10005371700eSzoujr val commit_hit = RegNext(can_commit_hit) 100109c6f1ddSLingrui98 val commit_target = RegNext(update_target(commPtr.value)) 1002edc18578SLingrui98 val commit_stage = RegNext(pred_stage(commPtr.value)) 100309c6f1ddSLingrui98 val commit_valid = commit_hit === h_hit || commit_cfi.valid // hit or taken 100409c6f1ddSLingrui98 10055371700eSzoujr val to_bpu_hit = can_commit_hit === h_hit || can_commit_hit === h_false_hit 100602f21c16SLingrui98 switch (bpu_ftb_update_stall) { 100702f21c16SLingrui98 is (0.U) { 100802f21c16SLingrui98 when (can_commit_cfi.valid && !to_bpu_hit && canCommit) { 100902f21c16SLingrui98 bpu_ftb_update_stall := 2.U // 2-cycle stall 101002f21c16SLingrui98 } 101102f21c16SLingrui98 } 101202f21c16SLingrui98 is (2.U) { 101302f21c16SLingrui98 bpu_ftb_update_stall := 1.U 101402f21c16SLingrui98 } 101502f21c16SLingrui98 is (1.U) { 101602f21c16SLingrui98 bpu_ftb_update_stall := 0.U 101702f21c16SLingrui98 } 101802f21c16SLingrui98 is (3.U) { 101902f21c16SLingrui98 XSError(true.B, "bpu_ftb_update_stall should be 0, 1 or 2") 102002f21c16SLingrui98 } 102102f21c16SLingrui98 } 102209c6f1ddSLingrui98 102309c6f1ddSLingrui98 io.toBpu.update := DontCare 102409c6f1ddSLingrui98 io.toBpu.update.valid := commit_valid && do_commit 102509c6f1ddSLingrui98 val update = io.toBpu.update.bits 102609c6f1ddSLingrui98 update.false_hit := commit_hit === h_false_hit 102709c6f1ddSLingrui98 update.pc := commit_pc_bundle.startAddr 102809c6f1ddSLingrui98 update.meta := commit_meta.meta 10298ffcd86aSLingrui98 update.full_target := commit_target 1030edc18578SLingrui98 update.from_stage := commit_stage 103109c6f1ddSLingrui98 update.fromFtqRedirectSram(commit_spec_meta) 103209c6f1ddSLingrui98 103309c6f1ddSLingrui98 val commit_real_hit = commit_hit === h_hit 103409c6f1ddSLingrui98 val update_ftb_entry = update.ftb_entry 103509c6f1ddSLingrui98 103609c6f1ddSLingrui98 val ftbEntryGen = Module(new FTBEntryGen).io 103709c6f1ddSLingrui98 ftbEntryGen.start_addr := commit_pc_bundle.startAddr 103809c6f1ddSLingrui98 ftbEntryGen.old_entry := commit_ftb_entry 103909c6f1ddSLingrui98 ftbEntryGen.pd := commit_pd 104009c6f1ddSLingrui98 ftbEntryGen.cfiIndex := commit_cfi 104109c6f1ddSLingrui98 ftbEntryGen.target := commit_target 104209c6f1ddSLingrui98 ftbEntryGen.hit := commit_real_hit 104309c6f1ddSLingrui98 ftbEntryGen.mispredict_vec := commit_mispredict 104409c6f1ddSLingrui98 104509c6f1ddSLingrui98 update_ftb_entry := ftbEntryGen.new_entry 104609c6f1ddSLingrui98 update.new_br_insert_pos := ftbEntryGen.new_br_insert_pos 104709c6f1ddSLingrui98 update.mispred_mask := ftbEntryGen.mispred_mask 104809c6f1ddSLingrui98 update.old_entry := ftbEntryGen.is_old_entry 1049edc18578SLingrui98 update.pred_hit := commit_hit === h_hit || commit_hit === h_false_hit 1050b37e4b45SLingrui98 1051b37e4b45SLingrui98 update.is_minimal := false.B 1052b37e4b45SLingrui98 update.full_pred.fromFtbEntry(ftbEntryGen.new_entry, update.pc) 1053b37e4b45SLingrui98 update.full_pred.br_taken_mask := ftbEntryGen.taken_mask 1054b37e4b45SLingrui98 update.full_pred.jalr_target := commit_target 1055b37e4b45SLingrui98 update.full_pred.hit := true.B 1056b37e4b45SLingrui98 when (update.full_pred.is_jalr) { 1057b37e4b45SLingrui98 update.full_pred.targets.last := commit_target 1058b37e4b45SLingrui98 } 105909c6f1ddSLingrui98 1060e30430c2SJay // **************************************************************** 1061e30430c2SJay // *********************** to prefetch **************************** 1062e30430c2SJay // **************************************************************** 1063e30430c2SJay 1064e30430c2SJay if(cacheParams.hasPrefetch){ 1065e30430c2SJay val prefetchPtr = RegInit(FtqPtr(false.B, 0.U)) 1066e30430c2SJay prefetchPtr := prefetchPtr + io.toPrefetch.req.fire() 1067e30430c2SJay 1068e30430c2SJay when (bpu_s2_resp.valid && bpu_s2_resp.hasRedirect && !isBefore(prefetchPtr, bpu_s2_resp.ftq_idx)) { 1069e30430c2SJay prefetchPtr := bpu_s2_resp.ftq_idx 1070e30430c2SJay } 1071e30430c2SJay 1072cb4f77ceSLingrui98 when (bpu_s3_resp.valid && bpu_s3_resp.hasRedirect && !isBefore(prefetchPtr, bpu_s3_resp.ftq_idx)) { 1073cb4f77ceSLingrui98 prefetchPtr := bpu_s3_resp.ftq_idx 1074a3c55791SJinYue // XSError(true.B, "\ns3_redirect mechanism not implemented!\n") 1075cb4f77ceSLingrui98 } 1076de7689fcSJay 1077f63797a4SLingrui98 1078f63797a4SLingrui98 val prefetch_is_to_send = WireInit(entry_fetch_status(prefetchPtr.value) === f_to_send) 1079f63797a4SLingrui98 val prefetch_addr = WireInit(update_target(prefetchPtr.value)) 1080f63797a4SLingrui98 1081f63797a4SLingrui98 when (last_cycle_bpu_in && bpu_in_bypass_ptr === prefetchPtr) { 1082f63797a4SLingrui98 prefetch_is_to_send := true.B 1083f63797a4SLingrui98 prefetch_addr := last_cycle_update_target 1084f63797a4SLingrui98 } 1085f63797a4SLingrui98 io.toPrefetch.req.valid := prefetchPtr =/= bpuPtr && prefetch_is_to_send 1086f63797a4SLingrui98 io.toPrefetch.req.bits.target := prefetch_addr 1087de7689fcSJay 1088de7689fcSJay when(redirectVec.map(r => r.valid).reduce(_||_)){ 1089de7689fcSJay val r = PriorityMux(redirectVec.map(r => (r.valid -> r.bits))) 1090de7689fcSJay val next = r.ftqIdx + 1.U 1091de7689fcSJay prefetchPtr := next 1092de7689fcSJay } 1093de7689fcSJay 1094de7689fcSJay XSError(isBefore(bpuPtr, prefetchPtr) && !isFull(bpuPtr, prefetchPtr), "\nprefetchPtr is before bpuPtr!\n") 1095e8747464SJenius XSError(isBefore(prefetchPtr, ifuPtr) && !isFull(ifuPtr, prefetchPtr), "\nifuPtr is before prefetchPtr!\n") 1096de7689fcSJay } 1097de7689fcSJay else { 1098de7689fcSJay io.toPrefetch.req <> DontCare 1099de7689fcSJay } 1100de7689fcSJay 110109c6f1ddSLingrui98 // ****************************************************************************** 110209c6f1ddSLingrui98 // **************************** commit perf counters **************************** 110309c6f1ddSLingrui98 // ****************************************************************************** 110409c6f1ddSLingrui98 110509c6f1ddSLingrui98 val commit_inst_mask = VecInit(commit_state.map(c => c === c_commited && do_commit)).asUInt 110609c6f1ddSLingrui98 val commit_mispred_mask = commit_mispredict.asUInt 110709c6f1ddSLingrui98 val commit_not_mispred_mask = ~commit_mispred_mask 110809c6f1ddSLingrui98 110909c6f1ddSLingrui98 val commit_br_mask = commit_pd.brMask.asUInt 111009c6f1ddSLingrui98 val commit_jmp_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.jmpInfo.valid.asTypeOf(UInt(1.W))) 111109c6f1ddSLingrui98 val commit_cfi_mask = (commit_br_mask | commit_jmp_mask) 111209c6f1ddSLingrui98 111309c6f1ddSLingrui98 val mbpInstrs = commit_inst_mask & commit_cfi_mask 111409c6f1ddSLingrui98 111509c6f1ddSLingrui98 val mbpRights = mbpInstrs & commit_not_mispred_mask 111609c6f1ddSLingrui98 val mbpWrongs = mbpInstrs & commit_mispred_mask 111709c6f1ddSLingrui98 111809c6f1ddSLingrui98 io.bpuInfo.bpRight := PopCount(mbpRights) 111909c6f1ddSLingrui98 io.bpuInfo.bpWrong := PopCount(mbpWrongs) 112009c6f1ddSLingrui98 112109c6f1ddSLingrui98 // Cfi Info 112209c6f1ddSLingrui98 for (i <- 0 until PredictWidth) { 112309c6f1ddSLingrui98 val pc = commit_pc_bundle.startAddr + (i * instBytes).U 112409c6f1ddSLingrui98 val v = commit_state(i) === c_commited 112509c6f1ddSLingrui98 val isBr = commit_pd.brMask(i) 112609c6f1ddSLingrui98 val isJmp = commit_pd.jmpInfo.valid && commit_pd.jmpOffset === i.U 112709c6f1ddSLingrui98 val isCfi = isBr || isJmp 112809c6f1ddSLingrui98 val isTaken = commit_cfi.valid && commit_cfi.bits === i.U 112909c6f1ddSLingrui98 val misPred = commit_mispredict(i) 1130c2ad24ebSLingrui98 // val ghist = commit_spec_meta.ghist.predHist 1131c2ad24ebSLingrui98 val histPtr = commit_spec_meta.histPtr 113209c6f1ddSLingrui98 val predCycle = commit_meta.meta(63, 0) 113309c6f1ddSLingrui98 val target = commit_target 113409c6f1ddSLingrui98 113509c6f1ddSLingrui98 val brIdx = OHToUInt(Reverse(Cat(update_ftb_entry.brValids.zip(update_ftb_entry.brOffset).map{case(v, offset) => v && offset === i.U}))) 113609c6f1ddSLingrui98 val inFtbEntry = update_ftb_entry.brValids.zip(update_ftb_entry.brOffset).map{case(v, offset) => v && offset === i.U}.reduce(_||_) 113709c6f1ddSLingrui98 val addIntoHist = ((commit_hit === h_hit) && inFtbEntry) || ((!(commit_hit === h_hit) && i.U === commit_cfi.bits && isBr && commit_cfi.valid)) 113809c6f1ddSLingrui98 XSDebug(v && do_commit && isCfi, p"cfi_update: isBr(${isBr}) pc(${Hexadecimal(pc)}) " + 1139c2ad24ebSLingrui98 p"taken(${isTaken}) mispred(${misPred}) cycle($predCycle) hist(${histPtr.value}) " + 114009c6f1ddSLingrui98 p"startAddr(${Hexadecimal(commit_pc_bundle.startAddr)}) AddIntoHist(${addIntoHist}) " + 114109c6f1ddSLingrui98 p"brInEntry(${inFtbEntry}) brIdx(${brIdx}) target(${Hexadecimal(target)})\n") 114209c6f1ddSLingrui98 } 114309c6f1ddSLingrui98 114409c6f1ddSLingrui98 val enq = io.fromBpu.resp 11452e1be6e1SSteve Gou val perf_redirect = backendRedirect 114609c6f1ddSLingrui98 114709c6f1ddSLingrui98 XSPerfAccumulate("entry", validEntries) 114809c6f1ddSLingrui98 XSPerfAccumulate("bpu_to_ftq_stall", enq.valid && !enq.ready) 114909c6f1ddSLingrui98 XSPerfAccumulate("mispredictRedirect", perf_redirect.valid && RedirectLevel.flushAfter === perf_redirect.bits.level) 115009c6f1ddSLingrui98 XSPerfAccumulate("replayRedirect", perf_redirect.valid && RedirectLevel.flushItself(perf_redirect.bits.level)) 115109c6f1ddSLingrui98 XSPerfAccumulate("predecodeRedirect", fromIfuRedirect.valid) 115209c6f1ddSLingrui98 115309c6f1ddSLingrui98 XSPerfAccumulate("to_ifu_bubble", io.toIfu.req.ready && !io.toIfu.req.valid) 115409c6f1ddSLingrui98 115509c6f1ddSLingrui98 XSPerfAccumulate("to_ifu_stall", io.toIfu.req.valid && !io.toIfu.req.ready) 115609c6f1ddSLingrui98 XSPerfAccumulate("from_bpu_real_bubble", !enq.valid && enq.ready && allowBpuIn) 115712cedb6fSLingrui98 XSPerfAccumulate("bpu_to_ifu_bubble", bpuPtr === ifuPtr) 115809c6f1ddSLingrui98 115909c6f1ddSLingrui98 val from_bpu = io.fromBpu.resp.bits 116009c6f1ddSLingrui98 def in_entry_len_map_gen(resp: BranchPredictionBundle)(stage: String) = { 1161b37e4b45SLingrui98 assert(!resp.is_minimal) 116209c6f1ddSLingrui98 val entry_len = (resp.ftb_entry.getFallThrough(resp.pc) - resp.pc) >> instOffsetBits 116309c6f1ddSLingrui98 val entry_len_recording_vec = (1 to PredictWidth+1).map(i => entry_len === i.U) 116409c6f1ddSLingrui98 val entry_len_map = (1 to PredictWidth+1).map(i => 116509c6f1ddSLingrui98 f"${stage}_ftb_entry_len_$i" -> (entry_len_recording_vec(i-1) && resp.valid) 116609c6f1ddSLingrui98 ).foldLeft(Map[String, UInt]())(_+_) 116709c6f1ddSLingrui98 entry_len_map 116809c6f1ddSLingrui98 } 116909c6f1ddSLingrui98 val s2_entry_len_map = in_entry_len_map_gen(from_bpu.s2)("s2") 1170cb4f77ceSLingrui98 val s3_entry_len_map = in_entry_len_map_gen(from_bpu.s3)("s3") 117109c6f1ddSLingrui98 117209c6f1ddSLingrui98 val to_ifu = io.toIfu.req.bits 117309c6f1ddSLingrui98 117409c6f1ddSLingrui98 117509c6f1ddSLingrui98 117609c6f1ddSLingrui98 val commit_num_inst_recording_vec = (1 to PredictWidth).map(i => PopCount(commit_inst_mask) === i.U) 117709c6f1ddSLingrui98 val commit_num_inst_map = (1 to PredictWidth).map(i => 117809c6f1ddSLingrui98 f"commit_num_inst_$i" -> (commit_num_inst_recording_vec(i-1) && do_commit) 117909c6f1ddSLingrui98 ).foldLeft(Map[String, UInt]())(_+_) 118009c6f1ddSLingrui98 118109c6f1ddSLingrui98 118209c6f1ddSLingrui98 118309c6f1ddSLingrui98 val commit_jal_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasJal.asTypeOf(UInt(1.W))) 118409c6f1ddSLingrui98 val commit_jalr_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasJalr.asTypeOf(UInt(1.W))) 118509c6f1ddSLingrui98 val commit_call_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasCall.asTypeOf(UInt(1.W))) 118609c6f1ddSLingrui98 val commit_ret_mask = UIntToOH(commit_pd.jmpOffset) & Fill(PredictWidth, commit_pd.hasRet.asTypeOf(UInt(1.W))) 118709c6f1ddSLingrui98 118809c6f1ddSLingrui98 118909c6f1ddSLingrui98 val mbpBRights = mbpRights & commit_br_mask 119009c6f1ddSLingrui98 val mbpJRights = mbpRights & commit_jal_mask 119109c6f1ddSLingrui98 val mbpIRights = mbpRights & commit_jalr_mask 119209c6f1ddSLingrui98 val mbpCRights = mbpRights & commit_call_mask 119309c6f1ddSLingrui98 val mbpRRights = mbpRights & commit_ret_mask 119409c6f1ddSLingrui98 119509c6f1ddSLingrui98 val mbpBWrongs = mbpWrongs & commit_br_mask 119609c6f1ddSLingrui98 val mbpJWrongs = mbpWrongs & commit_jal_mask 119709c6f1ddSLingrui98 val mbpIWrongs = mbpWrongs & commit_jalr_mask 119809c6f1ddSLingrui98 val mbpCWrongs = mbpWrongs & commit_call_mask 119909c6f1ddSLingrui98 val mbpRWrongs = mbpWrongs & commit_ret_mask 120009c6f1ddSLingrui98 12011d7e5011SLingrui98 val commit_pred_stage = RegNext(pred_stage(commPtr.value)) 12021d7e5011SLingrui98 12031d7e5011SLingrui98 def pred_stage_map(src: UInt, name: String) = { 12041d7e5011SLingrui98 (0 until numBpStages).map(i => 12051d7e5011SLingrui98 f"${name}_stage_${i+1}" -> PopCount(src.asBools.map(_ && commit_pred_stage === BP_STAGES(i))) 12061d7e5011SLingrui98 ).foldLeft(Map[String, UInt]())(_+_) 12071d7e5011SLingrui98 } 12081d7e5011SLingrui98 12091d7e5011SLingrui98 val mispred_stage_map = pred_stage_map(mbpWrongs, "mispredict") 12101d7e5011SLingrui98 val br_mispred_stage_map = pred_stage_map(mbpBWrongs, "br_mispredict") 12111d7e5011SLingrui98 val jalr_mispred_stage_map = pred_stage_map(mbpIWrongs, "jalr_mispredict") 12121d7e5011SLingrui98 val correct_stage_map = pred_stage_map(mbpRights, "correct") 12131d7e5011SLingrui98 val br_correct_stage_map = pred_stage_map(mbpBRights, "br_correct") 12141d7e5011SLingrui98 val jalr_correct_stage_map = pred_stage_map(mbpIRights, "jalr_correct") 12151d7e5011SLingrui98 121609c6f1ddSLingrui98 val update_valid = io.toBpu.update.valid 121709c6f1ddSLingrui98 def u(cond: Bool) = update_valid && cond 121809c6f1ddSLingrui98 val ftb_false_hit = u(update.false_hit) 121965fddcf0Szoujr // assert(!ftb_false_hit) 122009c6f1ddSLingrui98 val ftb_hit = u(commit_hit === h_hit) 122109c6f1ddSLingrui98 122209c6f1ddSLingrui98 val ftb_new_entry = u(ftbEntryGen.is_init_entry) 1223b37e4b45SLingrui98 val ftb_new_entry_only_br = ftb_new_entry && !update_ftb_entry.jmpValid 1224b37e4b45SLingrui98 val ftb_new_entry_only_jmp = ftb_new_entry && !update_ftb_entry.brValids(0) 1225b37e4b45SLingrui98 val ftb_new_entry_has_br_and_jmp = ftb_new_entry && update_ftb_entry.brValids(0) && update_ftb_entry.jmpValid 122609c6f1ddSLingrui98 122709c6f1ddSLingrui98 val ftb_old_entry = u(ftbEntryGen.is_old_entry) 122809c6f1ddSLingrui98 122909c6f1ddSLingrui98 val ftb_modified_entry = u(ftbEntryGen.is_new_br || ftbEntryGen.is_jalr_target_modified || ftbEntryGen.is_always_taken_modified) 123009c6f1ddSLingrui98 val ftb_modified_entry_new_br = u(ftbEntryGen.is_new_br) 123109c6f1ddSLingrui98 val ftb_modified_entry_jalr_target_modified = u(ftbEntryGen.is_jalr_target_modified) 123209c6f1ddSLingrui98 val ftb_modified_entry_br_full = ftb_modified_entry && ftbEntryGen.is_br_full 123309c6f1ddSLingrui98 val ftb_modified_entry_always_taken = ftb_modified_entry && ftbEntryGen.is_always_taken_modified 123409c6f1ddSLingrui98 123509c6f1ddSLingrui98 val ftb_entry_len = (ftbEntryGen.new_entry.getFallThrough(update.pc) - update.pc) >> instOffsetBits 123609c6f1ddSLingrui98 val ftb_entry_len_recording_vec = (1 to PredictWidth+1).map(i => ftb_entry_len === i.U) 123709c6f1ddSLingrui98 val ftb_init_entry_len_map = (1 to PredictWidth+1).map(i => 123809c6f1ddSLingrui98 f"ftb_init_entry_len_$i" -> (ftb_entry_len_recording_vec(i-1) && ftb_new_entry) 123909c6f1ddSLingrui98 ).foldLeft(Map[String, UInt]())(_+_) 124009c6f1ddSLingrui98 val ftb_modified_entry_len_map = (1 to PredictWidth+1).map(i => 124109c6f1ddSLingrui98 f"ftb_modified_entry_len_$i" -> (ftb_entry_len_recording_vec(i-1) && ftb_modified_entry) 124209c6f1ddSLingrui98 ).foldLeft(Map[String, UInt]())(_+_) 124309c6f1ddSLingrui98 124409c6f1ddSLingrui98 val ftq_occupancy_map = (0 to FtqSize).map(i => 124509c6f1ddSLingrui98 f"ftq_has_entry_$i" ->( validEntries === i.U) 124609c6f1ddSLingrui98 ).foldLeft(Map[String, UInt]())(_+_) 124709c6f1ddSLingrui98 124809c6f1ddSLingrui98 val perfCountsMap = Map( 124909c6f1ddSLingrui98 "BpInstr" -> PopCount(mbpInstrs), 125009c6f1ddSLingrui98 "BpBInstr" -> PopCount(mbpBRights | mbpBWrongs), 125109c6f1ddSLingrui98 "BpRight" -> PopCount(mbpRights), 125209c6f1ddSLingrui98 "BpWrong" -> PopCount(mbpWrongs), 125309c6f1ddSLingrui98 "BpBRight" -> PopCount(mbpBRights), 125409c6f1ddSLingrui98 "BpBWrong" -> PopCount(mbpBWrongs), 125509c6f1ddSLingrui98 "BpJRight" -> PopCount(mbpJRights), 125609c6f1ddSLingrui98 "BpJWrong" -> PopCount(mbpJWrongs), 125709c6f1ddSLingrui98 "BpIRight" -> PopCount(mbpIRights), 125809c6f1ddSLingrui98 "BpIWrong" -> PopCount(mbpIWrongs), 125909c6f1ddSLingrui98 "BpCRight" -> PopCount(mbpCRights), 126009c6f1ddSLingrui98 "BpCWrong" -> PopCount(mbpCWrongs), 126109c6f1ddSLingrui98 "BpRRight" -> PopCount(mbpRRights), 126209c6f1ddSLingrui98 "BpRWrong" -> PopCount(mbpRWrongs), 126309c6f1ddSLingrui98 126409c6f1ddSLingrui98 "ftb_false_hit" -> PopCount(ftb_false_hit), 126509c6f1ddSLingrui98 "ftb_hit" -> PopCount(ftb_hit), 126609c6f1ddSLingrui98 "ftb_new_entry" -> PopCount(ftb_new_entry), 126709c6f1ddSLingrui98 "ftb_new_entry_only_br" -> PopCount(ftb_new_entry_only_br), 126809c6f1ddSLingrui98 "ftb_new_entry_only_jmp" -> PopCount(ftb_new_entry_only_jmp), 126909c6f1ddSLingrui98 "ftb_new_entry_has_br_and_jmp" -> PopCount(ftb_new_entry_has_br_and_jmp), 127009c6f1ddSLingrui98 "ftb_old_entry" -> PopCount(ftb_old_entry), 127109c6f1ddSLingrui98 "ftb_modified_entry" -> PopCount(ftb_modified_entry), 127209c6f1ddSLingrui98 "ftb_modified_entry_new_br" -> PopCount(ftb_modified_entry_new_br), 127309c6f1ddSLingrui98 "ftb_jalr_target_modified" -> PopCount(ftb_modified_entry_jalr_target_modified), 127409c6f1ddSLingrui98 "ftb_modified_entry_br_full" -> PopCount(ftb_modified_entry_br_full), 127509c6f1ddSLingrui98 "ftb_modified_entry_always_taken" -> PopCount(ftb_modified_entry_always_taken) 12766d0e92edSLingrui98 ) ++ ftb_init_entry_len_map ++ ftb_modified_entry_len_map ++ s2_entry_len_map ++ 1277cb4f77ceSLingrui98 s3_entry_len_map ++ commit_num_inst_map ++ ftq_occupancy_map ++ 12781d7e5011SLingrui98 mispred_stage_map ++ br_mispred_stage_map ++ jalr_mispred_stage_map ++ 12791d7e5011SLingrui98 correct_stage_map ++ br_correct_stage_map ++ jalr_correct_stage_map 128009c6f1ddSLingrui98 128109c6f1ddSLingrui98 for((key, value) <- perfCountsMap) { 128209c6f1ddSLingrui98 XSPerfAccumulate(key, value) 128309c6f1ddSLingrui98 } 128409c6f1ddSLingrui98 128509c6f1ddSLingrui98 // --------------------------- Debug -------------------------------- 128609c6f1ddSLingrui98 // XSDebug(enq_fire, p"enq! " + io.fromBpu.resp.bits.toPrintable) 128709c6f1ddSLingrui98 XSDebug(io.toIfu.req.fire, p"fire to ifu " + io.toIfu.req.bits.toPrintable) 128809c6f1ddSLingrui98 XSDebug(do_commit, p"deq! [ptr] $do_commit_ptr\n") 128909c6f1ddSLingrui98 XSDebug(true.B, p"[bpuPtr] $bpuPtr, [ifuPtr] $ifuPtr, [ifuWbPtr] $ifuWbPtr [commPtr] $commPtr\n") 129009c6f1ddSLingrui98 XSDebug(true.B, p"[in] v:${io.fromBpu.resp.valid} r:${io.fromBpu.resp.ready} " + 129109c6f1ddSLingrui98 p"[out] v:${io.toIfu.req.valid} r:${io.toIfu.req.ready}\n") 129209c6f1ddSLingrui98 XSDebug(do_commit, p"[deq info] cfiIndex: $commit_cfi, $commit_pc_bundle, target: ${Hexadecimal(commit_target)}\n") 129309c6f1ddSLingrui98 129409c6f1ddSLingrui98 // def ubtbCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 129509c6f1ddSLingrui98 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 129609c6f1ddSLingrui98 // case (((valid, pd), ans), taken) => 129709c6f1ddSLingrui98 // Mux(valid && pd.isBr, 129809c6f1ddSLingrui98 // isWrong ^ Mux(ans.hit.asBool, 129909c6f1ddSLingrui98 // Mux(ans.taken.asBool, taken && ans.target === commitEntry.target, 130009c6f1ddSLingrui98 // !taken), 130109c6f1ddSLingrui98 // !taken), 130209c6f1ddSLingrui98 // false.B) 130309c6f1ddSLingrui98 // } 130409c6f1ddSLingrui98 // } 130509c6f1ddSLingrui98 130609c6f1ddSLingrui98 // def btbCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 130709c6f1ddSLingrui98 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 130809c6f1ddSLingrui98 // case (((valid, pd), ans), taken) => 130909c6f1ddSLingrui98 // Mux(valid && pd.isBr, 131009c6f1ddSLingrui98 // isWrong ^ Mux(ans.hit.asBool, 131109c6f1ddSLingrui98 // Mux(ans.taken.asBool, taken && ans.target === commitEntry.target, 131209c6f1ddSLingrui98 // !taken), 131309c6f1ddSLingrui98 // !taken), 131409c6f1ddSLingrui98 // false.B) 131509c6f1ddSLingrui98 // } 131609c6f1ddSLingrui98 // } 131709c6f1ddSLingrui98 131809c6f1ddSLingrui98 // def tageCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 131909c6f1ddSLingrui98 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 132009c6f1ddSLingrui98 // case (((valid, pd), ans), taken) => 132109c6f1ddSLingrui98 // Mux(valid && pd.isBr, 132209c6f1ddSLingrui98 // isWrong ^ (ans.taken.asBool === taken), 132309c6f1ddSLingrui98 // false.B) 132409c6f1ddSLingrui98 // } 132509c6f1ddSLingrui98 // } 132609c6f1ddSLingrui98 132709c6f1ddSLingrui98 // def loopCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 132809c6f1ddSLingrui98 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 132909c6f1ddSLingrui98 // case (((valid, pd), ans), taken) => 133009c6f1ddSLingrui98 // Mux(valid && (pd.isBr) && ans.hit.asBool, 133109c6f1ddSLingrui98 // isWrong ^ (!taken), 133209c6f1ddSLingrui98 // false.B) 133309c6f1ddSLingrui98 // } 133409c6f1ddSLingrui98 // } 133509c6f1ddSLingrui98 133609c6f1ddSLingrui98 // def rasCheck(commit: FtqEntry, predAns: Seq[PredictorAnswer], isWrong: Bool) = { 133709c6f1ddSLingrui98 // commit.valids.zip(commit.pd).zip(predAns).zip(commit.takens).map { 133809c6f1ddSLingrui98 // case (((valid, pd), ans), taken) => 133909c6f1ddSLingrui98 // Mux(valid && pd.isRet.asBool /*&& taken*/ && ans.hit.asBool, 134009c6f1ddSLingrui98 // isWrong ^ (ans.target === commitEntry.target), 134109c6f1ddSLingrui98 // false.B) 134209c6f1ddSLingrui98 // } 134309c6f1ddSLingrui98 // } 134409c6f1ddSLingrui98 134509c6f1ddSLingrui98 // val ubtbRights = ubtbCheck(commitEntry, commitEntry.metas.map(_.ubtbAns), false.B) 134609c6f1ddSLingrui98 // val ubtbWrongs = ubtbCheck(commitEntry, commitEntry.metas.map(_.ubtbAns), true.B) 134709c6f1ddSLingrui98 // // btb and ubtb pred jal and jalr as well 134809c6f1ddSLingrui98 // val btbRights = btbCheck(commitEntry, commitEntry.metas.map(_.btbAns), false.B) 134909c6f1ddSLingrui98 // val btbWrongs = btbCheck(commitEntry, commitEntry.metas.map(_.btbAns), true.B) 135009c6f1ddSLingrui98 // val tageRights = tageCheck(commitEntry, commitEntry.metas.map(_.tageAns), false.B) 135109c6f1ddSLingrui98 // val tageWrongs = tageCheck(commitEntry, commitEntry.metas.map(_.tageAns), true.B) 135209c6f1ddSLingrui98 135309c6f1ddSLingrui98 // val loopRights = loopCheck(commitEntry, commitEntry.metas.map(_.loopAns), false.B) 135409c6f1ddSLingrui98 // val loopWrongs = loopCheck(commitEntry, commitEntry.metas.map(_.loopAns), true.B) 135509c6f1ddSLingrui98 135609c6f1ddSLingrui98 // val rasRights = rasCheck(commitEntry, commitEntry.metas.map(_.rasAns), false.B) 135709c6f1ddSLingrui98 // val rasWrongs = rasCheck(commitEntry, commitEntry.metas.map(_.rasAns), true.B) 13581ca0e4f3SYinan Xu 1359cd365d4cSrvcoresjw val perfEvents = Seq( 1360cd365d4cSrvcoresjw ("bpu_s2_redirect ", bpu_s2_redirect ), 1361cb4f77ceSLingrui98 ("bpu_s3_redirect ", bpu_s3_redirect ), 1362cd365d4cSrvcoresjw ("bpu_to_ftq_stall ", enq.valid && ~enq.ready ), 1363cd365d4cSrvcoresjw ("mispredictRedirect ", perf_redirect.valid && RedirectLevel.flushAfter === perf_redirect.bits.level), 1364cd365d4cSrvcoresjw ("replayRedirect ", perf_redirect.valid && RedirectLevel.flushItself(perf_redirect.bits.level) ), 1365cd365d4cSrvcoresjw ("predecodeRedirect ", fromIfuRedirect.valid ), 1366cd365d4cSrvcoresjw ("to_ifu_bubble ", io.toIfu.req.ready && !io.toIfu.req.valid ), 1367cd365d4cSrvcoresjw ("from_bpu_real_bubble ", !enq.valid && enq.ready && allowBpuIn ), 1368cd365d4cSrvcoresjw ("BpInstr ", PopCount(mbpInstrs) ), 1369cd365d4cSrvcoresjw ("BpBInstr ", PopCount(mbpBRights | mbpBWrongs) ), 1370cd365d4cSrvcoresjw ("BpRight ", PopCount(mbpRights) ), 1371cd365d4cSrvcoresjw ("BpWrong ", PopCount(mbpWrongs) ), 1372cd365d4cSrvcoresjw ("BpBRight ", PopCount(mbpBRights) ), 1373cd365d4cSrvcoresjw ("BpBWrong ", PopCount(mbpBWrongs) ), 1374cd365d4cSrvcoresjw ("BpJRight ", PopCount(mbpJRights) ), 1375cd365d4cSrvcoresjw ("BpJWrong ", PopCount(mbpJWrongs) ), 1376cd365d4cSrvcoresjw ("BpIRight ", PopCount(mbpIRights) ), 1377cd365d4cSrvcoresjw ("BpIWrong ", PopCount(mbpIWrongs) ), 1378cd365d4cSrvcoresjw ("BpCRight ", PopCount(mbpCRights) ), 1379cd365d4cSrvcoresjw ("BpCWrong ", PopCount(mbpCWrongs) ), 1380cd365d4cSrvcoresjw ("BpRRight ", PopCount(mbpRRights) ), 1381cd365d4cSrvcoresjw ("BpRWrong ", PopCount(mbpRWrongs) ), 1382cd365d4cSrvcoresjw ("ftb_false_hit ", PopCount(ftb_false_hit) ), 1383cd365d4cSrvcoresjw ("ftb_hit ", PopCount(ftb_hit) ), 1384cd365d4cSrvcoresjw ) 13851ca0e4f3SYinan Xu generatePerfEvent() 138609c6f1ddSLingrui98}