109c6f1ddSLingrui98/*************************************************************************************** 209c6f1ddSLingrui98* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 309c6f1ddSLingrui98* Copyright (c) 2020-2021 Peng Cheng Laboratory 409c6f1ddSLingrui98* 509c6f1ddSLingrui98* XiangShan is licensed under Mulan PSL v2. 609c6f1ddSLingrui98* You can use this software according to the terms and conditions of the Mulan PSL v2. 709c6f1ddSLingrui98* You may obtain a copy of Mulan PSL v2 at: 809c6f1ddSLingrui98* http://license.coscl.org.cn/MulanPSL2 909c6f1ddSLingrui98* 1009c6f1ddSLingrui98* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 1109c6f1ddSLingrui98* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 1209c6f1ddSLingrui98* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 1309c6f1ddSLingrui98* 1409c6f1ddSLingrui98* See the Mulan PSL v2 for more details. 1509c6f1ddSLingrui98***************************************************************************************/ 1609c6f1ddSLingrui98 1709c6f1ddSLingrui98package xiangshan.frontend 1809c6f1ddSLingrui98 1909c6f1ddSLingrui98import chipsalliance.rocketchip.config.Parameters 2009c6f1ddSLingrui98import chisel3._ 2109c6f1ddSLingrui98import chisel3.util._ 222a3050c2SJayimport freechips.rocketchip.rocket.RVCDecoder 2309c6f1ddSLingrui98import xiangshan._ 2409c6f1ddSLingrui98import xiangshan.cache.mmu._ 251d8f4dcbSJayimport xiangshan.frontend.icache._ 2609c6f1ddSLingrui98import utils._ 27b6982e83SLemoverimport xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle} 2809c6f1ddSLingrui98 2909c6f1ddSLingrui98trait HasInstrMMIOConst extends HasXSParameter with HasIFUConst{ 3009c6f1ddSLingrui98 def mmioBusWidth = 64 3109c6f1ddSLingrui98 def mmioBusBytes = mmioBusWidth / 8 320be662e4SJay def maxInstrLen = 32 3309c6f1ddSLingrui98} 3409c6f1ddSLingrui98 3509c6f1ddSLingrui98trait HasIFUConst extends HasXSParameter{ 361d8f4dcbSJay def addrAlign(addr: UInt, bytes: Int, highest: Int): UInt = Cat(addr(highest-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W)) 371d8f4dcbSJay def fetchQueueSize = 2 381d8f4dcbSJay 392a3050c2SJay def getBasicBlockIdx( pc: UInt, start: UInt ): UInt = { 402a3050c2SJay val byteOffset = pc - start 412a3050c2SJay (byteOffset - instBytes.U)(log2Ceil(PredictWidth),instOffsetBits) 421d8f4dcbSJay } 4309c6f1ddSLingrui98} 4409c6f1ddSLingrui98 4509c6f1ddSLingrui98class IfuToFtqIO(implicit p:Parameters) extends XSBundle { 4609c6f1ddSLingrui98 val pdWb = Valid(new PredecodeWritebackBundle) 4709c6f1ddSLingrui98} 4809c6f1ddSLingrui98 4909c6f1ddSLingrui98class FtqInterface(implicit p: Parameters) extends XSBundle { 5009c6f1ddSLingrui98 val fromFtq = Flipped(new FtqToIfuIO) 5109c6f1ddSLingrui98 val toFtq = new IfuToFtqIO 5209c6f1ddSLingrui98} 5309c6f1ddSLingrui98 540be662e4SJayclass UncacheInterface(implicit p: Parameters) extends XSBundle { 550be662e4SJay val fromUncache = Flipped(DecoupledIO(new InsUncacheResp)) 560be662e4SJay val toUncache = DecoupledIO( new InsUncacheReq ) 570be662e4SJay} 5809c6f1ddSLingrui98class NewIFUIO(implicit p: Parameters) extends XSBundle { 5909c6f1ddSLingrui98 val ftqInter = new FtqInterface 601d8f4dcbSJay val icacheInter = Vec(2, Flipped(new ICacheMainPipeBundle)) 611d8f4dcbSJay val icacheStop = Output(Bool()) 621d8f4dcbSJay val icachePerfInfo = Input(new ICachePerfInfo) 6309c6f1ddSLingrui98 val toIbuffer = Decoupled(new FetchToIBuffer) 640be662e4SJay val uncacheInter = new UncacheInterface 6572951335SLi Qianruo val frontendTrigger = Flipped(new FrontendTdataDistributeIO) 6672951335SLi Qianruo val csrTriggerEnable = Input(Vec(4, Bool())) 67a37fbf10SJay val rob_commits = Flipped(Vec(CommitWidth, Valid(new RobCommitInfo))) 6809c6f1ddSLingrui98} 6909c6f1ddSLingrui98 7009c6f1ddSLingrui98// record the situation in which fallThruAddr falls into 7109c6f1ddSLingrui98// the middle of an RVI inst 7209c6f1ddSLingrui98class LastHalfInfo(implicit p: Parameters) extends XSBundle { 7309c6f1ddSLingrui98 val valid = Bool() 7409c6f1ddSLingrui98 val middlePC = UInt(VAddrBits.W) 7509c6f1ddSLingrui98 def matchThisBlock(startAddr: UInt) = valid && middlePC === startAddr 7609c6f1ddSLingrui98} 7709c6f1ddSLingrui98 7809c6f1ddSLingrui98class IfuToPreDecode(implicit p: Parameters) extends XSBundle { 7909c6f1ddSLingrui98 val data = if(HasCExtension) Vec(PredictWidth + 1, UInt(16.W)) else Vec(PredictWidth, UInt(32.W)) 8072951335SLi Qianruo val frontendTrigger = new FrontendTdataDistributeIO 8172951335SLi Qianruo val csrTriggerEnable = Vec(4, Bool()) 822a3050c2SJay val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 8309c6f1ddSLingrui98} 8409c6f1ddSLingrui98 852a3050c2SJay 862a3050c2SJayclass IfuToPredChecker(implicit p: Parameters) extends XSBundle { 872a3050c2SJay val ftqOffset = Valid(UInt(log2Ceil(PredictWidth).W)) 882a3050c2SJay val jumpOffset = Vec(PredictWidth, UInt(XLEN.W)) 892a3050c2SJay val target = UInt(VAddrBits.W) 902a3050c2SJay val instrRange = Vec(PredictWidth, Bool()) 912a3050c2SJay val instrValid = Vec(PredictWidth, Bool()) 922a3050c2SJay val pds = Vec(PredictWidth, new PreDecodeInfo) 932a3050c2SJay val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 942a3050c2SJay} 952a3050c2SJay 962a3050c2SJayclass NewIFU(implicit p: Parameters) extends XSModule 972a3050c2SJay with HasICacheParameters 982a3050c2SJay with HasIFUConst 992a3050c2SJay with HasPdConst 1002a3050c2SJay with HasCircularQueuePtrHelper 1012a3050c2SJay with HasPerfEvents 10209c6f1ddSLingrui98{ 10309c6f1ddSLingrui98 println(s"icache ways: ${nWays} sets:${nSets}") 10409c6f1ddSLingrui98 val io = IO(new NewIFUIO) 10509c6f1ddSLingrui98 val (toFtq, fromFtq) = (io.ftqInter.toFtq, io.ftqInter.fromFtq) 1061d8f4dcbSJay val (toICache, fromICache) = (VecInit(io.icacheInter.map(_.req)), VecInit(io.icacheInter.map(_.resp))) 1070be662e4SJay val (toUncache, fromUncache) = (io.uncacheInter.toUncache , io.uncacheInter.fromUncache) 10809c6f1ddSLingrui98 10909c6f1ddSLingrui98 def isCrossLineReq(start: UInt, end: UInt): Bool = start(blockOffBits) ^ end(blockOffBits) 11009c6f1ddSLingrui98 11109c6f1ddSLingrui98 def isLastInCacheline(fallThruAddr: UInt): Bool = fallThruAddr(blockOffBits - 1, 1) === 0.U 11209c6f1ddSLingrui98 1131d8f4dcbSJay class TlbExept(implicit p: Parameters) extends XSBundle{ 1141d8f4dcbSJay val pageFault = Bool() 1151d8f4dcbSJay val accessFault = Bool() 1161d8f4dcbSJay val mmio = Bool() 117b005f7c6SJay } 11809c6f1ddSLingrui98 1192a3050c2SJay val preDecoder = Module(new PreDecode) 1202a3050c2SJay val predChecker = Module(new PredChecker) 1212a3050c2SJay val frontendTrigger = Module(new FrontendTrigger) 1222a3050c2SJay val (preDecoderIn, preDecoderOut) = (preDecoder.io.in, preDecoder.io.out) 1232a3050c2SJay val (checkerIn, checkerOut) = (predChecker.io.in, predChecker.io.out) 1241d8f4dcbSJay 125*58dbdfc2SJay /** 126*58dbdfc2SJay ****************************************************************************** 127*58dbdfc2SJay * IFU Stage 0 128*58dbdfc2SJay * - send cacheline fetch request to ICacheMainPipe 129*58dbdfc2SJay ****************************************************************************** 130*58dbdfc2SJay */ 13109c6f1ddSLingrui98 13209c6f1ddSLingrui98 val f0_valid = fromFtq.req.valid 13309c6f1ddSLingrui98 val f0_ftq_req = fromFtq.req.bits 13409c6f1ddSLingrui98 val f0_situation = VecInit(Seq(isCrossLineReq(f0_ftq_req.startAddr, f0_ftq_req.fallThruAddr), isLastInCacheline(f0_ftq_req.fallThruAddr))) 13509c6f1ddSLingrui98 val f0_doubleLine = f0_situation(0) || f0_situation(1) 13609c6f1ddSLingrui98 val f0_vSetIdx = VecInit(get_idx((f0_ftq_req.startAddr)), get_idx(f0_ftq_req.fallThruAddr)) 13709c6f1ddSLingrui98 val f0_fire = fromFtq.req.fire() 13809c6f1ddSLingrui98 13909c6f1ddSLingrui98 val f0_flush, f1_flush, f2_flush, f3_flush = WireInit(false.B) 14009c6f1ddSLingrui98 val from_bpu_f0_flush, from_bpu_f1_flush, from_bpu_f2_flush, from_bpu_f3_flush = WireInit(false.B) 14109c6f1ddSLingrui98 14209c6f1ddSLingrui98 from_bpu_f0_flush := fromFtq.flushFromBpu.shouldFlushByStage2(f0_ftq_req.ftqIdx) || 14309c6f1ddSLingrui98 fromFtq.flushFromBpu.shouldFlushByStage3(f0_ftq_req.ftqIdx) 14409c6f1ddSLingrui98 1452a3050c2SJay val wb_redirect , mmio_redirect, backend_redirect= WireInit(false.B) 1462a3050c2SJay val f3_wb_not_flush = WireInit(false.B) 1472a3050c2SJay 1482a3050c2SJay backend_redirect := fromFtq.redirect.valid 1492a3050c2SJay f3_flush := backend_redirect || (wb_redirect && !f3_wb_not_flush) 1502a3050c2SJay f2_flush := backend_redirect || mmio_redirect || wb_redirect 15109c6f1ddSLingrui98 f1_flush := f2_flush || from_bpu_f1_flush 15209c6f1ddSLingrui98 f0_flush := f1_flush || from_bpu_f0_flush 15309c6f1ddSLingrui98 15409c6f1ddSLingrui98 val f1_ready, f2_ready, f3_ready = WireInit(false.B) 15509c6f1ddSLingrui98 1561d8f4dcbSJay fromFtq.req.ready := toICache(0).ready && toICache(1).ready && f2_ready && GTimer() > 500.U 15709c6f1ddSLingrui98 1582a3050c2SJay toICache(0).valid := fromFtq.req.valid && !f0_flush 1591d8f4dcbSJay toICache(0).bits.vaddr := fromFtq.req.bits.startAddr 1602a3050c2SJay toICache(1).valid := fromFtq.req.valid && f0_doubleLine && !f0_flush 1611d8f4dcbSJay toICache(1).bits.vaddr := fromFtq.req.bits.fallThruAddr 16209c6f1ddSLingrui98 163*58dbdfc2SJay /** <PERF> f0 fetch bubble */ 164f7c29b0aSJinYue 165*58dbdfc2SJay XSPerfAccumulate("fetch_bubble_ftq_not_valid", !f0_valid ) 166*58dbdfc2SJay XSPerfAccumulate("fetch_bubble_pipe_stall", f0_valid && toICache(0).ready && toICache(1).ready && !f1_ready ) 167*58dbdfc2SJay XSPerfAccumulate("fetch_bubble_sram_0_busy", f0_valid && !toICache(0).ready ) 168*58dbdfc2SJay XSPerfAccumulate("fetch_bubble_sram_1_busy", f0_valid && !toICache(1).ready ) 169*58dbdfc2SJay 170*58dbdfc2SJay 171*58dbdfc2SJay /** 172*58dbdfc2SJay ****************************************************************************** 173*58dbdfc2SJay * IFU Stage 1 174*58dbdfc2SJay * - calculate pc/half_pc/cut_ptr for every instruction 175*58dbdfc2SJay ****************************************************************************** 176*58dbdfc2SJay */ 1771d8f4dcbSJay 1781d8f4dcbSJay val f1_valid = RegInit(false.B) 1791d8f4dcbSJay val f1_ftq_req = RegEnable(next = f0_ftq_req, enable=f0_fire) 1801d8f4dcbSJay val f1_situation = RegEnable(next = f0_situation, enable=f0_fire) 1811d8f4dcbSJay val f1_doubleLine = RegEnable(next = f0_doubleLine, enable=f0_fire) 1821d8f4dcbSJay val f1_vSetIdx = RegEnable(next = f0_vSetIdx, enable=f0_fire) 1831d8f4dcbSJay val f1_fire = f1_valid && f1_ready 1841d8f4dcbSJay 1851d8f4dcbSJay f1_ready := f2_ready || !f1_valid 1861d8f4dcbSJay 1871d8f4dcbSJay from_bpu_f1_flush := fromFtq.flushFromBpu.shouldFlushByStage3(f1_ftq_req.ftqIdx) 1881d8f4dcbSJay 1891d8f4dcbSJay when(f1_flush) {f1_valid := false.B} 1901d8f4dcbSJay .elsewhen(f0_fire && !f0_flush) {f1_valid := true.B} 1911d8f4dcbSJay .elsewhen(f1_fire) {f1_valid := false.B} 1922a3050c2SJay 1932a3050c2SJay val f1_pc = VecInit((0 until PredictWidth).map(i => f1_ftq_req.startAddr + (i * 2).U)) 1942a3050c2SJay val f1_half_snpc = VecInit((0 until PredictWidth).map(i => f1_ftq_req.startAddr + ((i+2) * 2).U)) 1952a3050c2SJay val f1_cut_ptr = if(HasCExtension) VecInit((0 until PredictWidth + 1).map(i => Cat(0.U(1.W), f1_ftq_req.startAddr(blockOffBits-1, 1)) + i.U )) 1962a3050c2SJay else VecInit((0 until PredictWidth).map(i => Cat(0.U(1.W), f1_ftq_req.startAddr(blockOffBits-1, 2)) + i.U )) 1972a3050c2SJay 198*58dbdfc2SJay /** 199*58dbdfc2SJay ****************************************************************************** 200*58dbdfc2SJay * IFU Stage 2 201*58dbdfc2SJay * - icache response data (latched for pipeline stop) 202*58dbdfc2SJay * - generate exceprion bits for every instruciton (page fault/access fault/mmio) 203*58dbdfc2SJay * - generate predicted instruction range (1 means this instruciton is in this fetch packet) 204*58dbdfc2SJay * - cut data from cachlines to packet instruction code 205*58dbdfc2SJay * - instruction predecode and RVC expand 206*58dbdfc2SJay ****************************************************************************** 207*58dbdfc2SJay */ 208*58dbdfc2SJay 2091d8f4dcbSJay val icacheRespAllValid = WireInit(false.B) 21009c6f1ddSLingrui98 21109c6f1ddSLingrui98 val f2_valid = RegInit(false.B) 21209c6f1ddSLingrui98 val f2_ftq_req = RegEnable(next = f1_ftq_req, enable=f1_fire) 21309c6f1ddSLingrui98 val f2_situation = RegEnable(next = f1_situation, enable=f1_fire) 21409c6f1ddSLingrui98 val f2_doubleLine = RegEnable(next = f1_doubleLine, enable=f1_fire) 2151d8f4dcbSJay val f2_vSetIdx = RegEnable(next = f1_vSetIdx, enable=f1_fire) 2161d8f4dcbSJay val f2_fire = f2_valid && f2_ready 2171d8f4dcbSJay 2181d8f4dcbSJay f2_ready := f3_ready && icacheRespAllValid || !f2_valid 2191d8f4dcbSJay //TODO: addr compare may be timing critical 2201d8f4dcbSJay val f2_icache_all_resp_wire = fromICache(0).valid && (fromICache(0).bits.vaddr === f2_ftq_req.startAddr) && ((fromICache(1).valid && (fromICache(1).bits.vaddr === f2_ftq_req.fallThruAddr)) || !f2_doubleLine) 2211d8f4dcbSJay val f2_icache_all_resp_reg = RegInit(false.B) 2221d8f4dcbSJay 2231d8f4dcbSJay icacheRespAllValid := f2_icache_all_resp_reg || f2_icache_all_resp_wire 2241d8f4dcbSJay 2251d8f4dcbSJay io.icacheStop := !f3_ready 2261d8f4dcbSJay 2271d8f4dcbSJay when(f2_flush) {f2_icache_all_resp_reg := false.B} 2281d8f4dcbSJay .elsewhen(f2_valid && f2_icache_all_resp_wire && !f3_ready) {f2_icache_all_resp_reg := true.B} 2291d8f4dcbSJay .elsewhen(f2_fire && f2_icache_all_resp_reg) {f2_icache_all_resp_reg := false.B} 23009c6f1ddSLingrui98 23109c6f1ddSLingrui98 when(f2_flush) {f2_valid := false.B} 23209c6f1ddSLingrui98 .elsewhen(f1_fire && !f1_flush) {f2_valid := true.B } 23309c6f1ddSLingrui98 .elsewhen(f2_fire) {f2_valid := false.B} 23409c6f1ddSLingrui98 2351d8f4dcbSJay val f2_cache_response_data = ResultHoldBypass(valid = f2_icache_all_resp_wire, data = VecInit(fromICache.map(_.bits.readData))) 23609c6f1ddSLingrui98 2371d8f4dcbSJay val f2_except_pf = VecInit((0 until PortNumber).map(i => fromICache(i).bits.tlbExcp.pageFault)) 2381d8f4dcbSJay val f2_except_af = VecInit((0 until PortNumber).map(i => fromICache(i).bits.tlbExcp.accessFault)) 239c0b2b8e9Srvcoresjw val f2_mmio = fromICache(0).bits.tlbExcp.mmio && !fromICache(0).bits.tlbExcp.accessFault && 240c0b2b8e9Srvcoresjw !fromICache(0).bits.tlbExcp.pageFault 2410be662e4SJay 2422a3050c2SJay val f2_pc = RegEnable(next = f1_pc, enable = f1_fire) 2432a3050c2SJay val f2_half_snpc = RegEnable(next = f1_half_snpc, enable = f1_fire) 2442a3050c2SJay val f2_cut_ptr = RegEnable(next = f1_cut_ptr, enable = f1_fire) 2452a3050c2SJay 2462a3050c2SJay 2472a3050c2SJay def isNextLine(pc: UInt, startAddr: UInt) = { 2482a3050c2SJay startAddr(blockOffBits) ^ pc(blockOffBits) 2492a3050c2SJay } 2502a3050c2SJay 2512a3050c2SJay def isLastInLine(pc: UInt) = { 2522a3050c2SJay pc(blockOffBits - 1, 0) === "b111110".U 2532a3050c2SJay } 2542a3050c2SJay 2552a3050c2SJay val f2_foldpc = VecInit(f2_pc.map(i => XORFold(i(VAddrBits-1,1), MemPredPCWidth))) 2562a3050c2SJay val f2_jump_range = Fill(PredictWidth, !f2_ftq_req.ftqOffset.valid) | Fill(PredictWidth, 1.U(1.W)) >> ~f2_ftq_req.ftqOffset.bits 2572a3050c2SJay val f2_ftr_range = Fill(PredictWidth, f2_ftq_req.oversize) | Fill(PredictWidth, 1.U(1.W)) >> ~getBasicBlockIdx(f2_ftq_req.fallThruAddr, f2_ftq_req.startAddr) 2582a3050c2SJay val f2_instr_range = f2_jump_range & f2_ftr_range 2592a3050c2SJay val f2_pf_vec = VecInit((0 until PredictWidth).map(i => (!isNextLine(f2_pc(i), f2_ftq_req.startAddr) && f2_except_pf(0) || isNextLine(f2_pc(i), f2_ftq_req.startAddr) && f2_doubleLine && f2_except_pf(1)))) 2602a3050c2SJay val f2_af_vec = VecInit((0 until PredictWidth).map(i => (!isNextLine(f2_pc(i), f2_ftq_req.startAddr) && f2_except_af(0) || isNextLine(f2_pc(i), f2_ftq_req.startAddr) && f2_doubleLine && f2_except_af(1)))) 2612a3050c2SJay 2621d8f4dcbSJay val f2_paddrs = VecInit((0 until PortNumber).map(i => fromICache(i).bits.paddr)) 2631d8f4dcbSJay val f2_perf_info = io.icachePerfInfo 26409c6f1ddSLingrui98 2652a3050c2SJay def cut(cacheline: UInt, cutPtr: Vec[UInt]) : Vec[UInt] ={ 26609c6f1ddSLingrui98 if(HasCExtension){ 26709c6f1ddSLingrui98 val result = Wire(Vec(PredictWidth + 1, UInt(16.W))) 26809c6f1ddSLingrui98 val dataVec = cacheline.asTypeOf(Vec(blockBytes * 2/ 2, UInt(16.W))) 26909c6f1ddSLingrui98 (0 until PredictWidth + 1).foreach( i => 2702a3050c2SJay result(i) := dataVec(cutPtr(i)) 27109c6f1ddSLingrui98 ) 27209c6f1ddSLingrui98 result 27309c6f1ddSLingrui98 } else { 27409c6f1ddSLingrui98 val result = Wire(Vec(PredictWidth, UInt(32.W)) ) 27509c6f1ddSLingrui98 val dataVec = cacheline.asTypeOf(Vec(blockBytes * 2/ 4, UInt(32.W))) 27609c6f1ddSLingrui98 (0 until PredictWidth).foreach( i => 2772a3050c2SJay result(i) := dataVec(cutPtr(i)) 27809c6f1ddSLingrui98 ) 27909c6f1ddSLingrui98 result 28009c6f1ddSLingrui98 } 28109c6f1ddSLingrui98 } 28209c6f1ddSLingrui98 2832a3050c2SJay val f2_datas = VecInit((0 until PortNumber).map(i => f2_cache_response_data(i))) 2842a3050c2SJay val f2_cut_data = cut( Cat(f2_datas.map(cacheline => cacheline.asUInt ).reverse).asUInt, f2_cut_ptr ) 2852a3050c2SJay 286*58dbdfc2SJay /** predecode (include RVC expander) */ 2872a3050c2SJay preDecoderIn.data := f2_cut_data 2882a3050c2SJay preDecoderIn.frontendTrigger := io.frontendTrigger 2892a3050c2SJay preDecoderIn.csrTriggerEnable := io.csrTriggerEnable 2902a3050c2SJay preDecoderIn.pc := f2_pc 2912a3050c2SJay 2922a3050c2SJay val f2_expd_instr = preDecoderOut.expInstr 2932a3050c2SJay val f2_pd = preDecoderOut.pd 2942a3050c2SJay val f2_jump_offset = preDecoderOut.jumpOffset 2952a3050c2SJay val f2_hasHalfValid = preDecoderOut.hasHalfValid 2962a3050c2SJay val f2_crossPageFault = VecInit((0 until PredictWidth).map(i => isLastInLine(f2_pc(i)) && !f2_except_pf(0) && f2_doubleLine && f2_except_pf(1) && !f2_pd(i).isRVC )) 2972a3050c2SJay 2981d8f4dcbSJay val predecodeOutValid = WireInit(false.B) 2991d8f4dcbSJay 30009c6f1ddSLingrui98 301*58dbdfc2SJay /** 302*58dbdfc2SJay ****************************************************************************** 303*58dbdfc2SJay * IFU Stage 3 304*58dbdfc2SJay * - handle MMIO instruciton 305*58dbdfc2SJay * -send request to Uncache fetch Unit 306*58dbdfc2SJay * -every packet include 1 MMIO instruction 307*58dbdfc2SJay * -MMIO instructions will stop fetch pipeline until commiting from RoB 308*58dbdfc2SJay * -flush to snpc (send ifu_redirect to Ftq) 309*58dbdfc2SJay * - Ibuffer enqueue 310*58dbdfc2SJay * - check predict result in Frontend (jalFault/retFault/notCFIFault/invalidTakenFault/targetFault) 311*58dbdfc2SJay * - handle last half RVI instruction 312*58dbdfc2SJay ****************************************************************************** 313*58dbdfc2SJay */ 314*58dbdfc2SJay 31509c6f1ddSLingrui98 val f3_valid = RegInit(false.B) 31609c6f1ddSLingrui98 val f3_ftq_req = RegEnable(next = f2_ftq_req, enable=f2_fire) 31709c6f1ddSLingrui98 val f3_situation = RegEnable(next = f2_situation, enable=f2_fire) 31809c6f1ddSLingrui98 val f3_doubleLine = RegEnable(next = f2_doubleLine, enable=f2_fire) 3191d8f4dcbSJay val f3_fire = io.toIbuffer.fire() 3201d8f4dcbSJay 3211d8f4dcbSJay f3_ready := io.toIbuffer.ready || !f3_valid 32209c6f1ddSLingrui98 32309c6f1ddSLingrui98 val f3_cut_data = RegEnable(next = f2_cut_data, enable=f2_fire) 3241d8f4dcbSJay 32509c6f1ddSLingrui98 val f3_except_pf = RegEnable(next = f2_except_pf, enable = f2_fire) 32609c6f1ddSLingrui98 val f3_except_af = RegEnable(next = f2_except_af, enable = f2_fire) 3270be662e4SJay val f3_mmio = RegEnable(next = f2_mmio , enable = f2_fire) 32809c6f1ddSLingrui98 3292a3050c2SJay val f3_expd_instr = RegEnable(next = f2_expd_instr, enable = f2_fire) 3302a3050c2SJay val f3_pd = RegEnable(next = f2_pd, enable = f2_fire) 3312a3050c2SJay val f3_jump_offset = RegEnable(next = f2_jump_offset, enable = f2_fire) 3322a3050c2SJay val f3_af_vec = RegEnable(next = f2_af_vec, enable = f2_fire) 3332a3050c2SJay val f3_pf_vec = RegEnable(next = f2_pf_vec , enable = f2_fire) 3342a3050c2SJay val f3_pc = RegEnable(next = f2_pc, enable = f2_fire) 3352a3050c2SJay val f3_half_snpc = RegEnable(next = f2_half_snpc, enable = f2_fire) 3362a3050c2SJay val f3_instr_range = RegEnable(next = f2_instr_range, enable = f2_fire) 3372a3050c2SJay val f3_foldpc = RegEnable(next = f2_foldpc, enable = f2_fire) 3382a3050c2SJay val f3_crossPageFault = RegEnable(next = f2_crossPageFault, enable = f2_fire) 3392a3050c2SJay val f3_hasHalfValid = RegEnable(next = f2_hasHalfValid, enable = f2_fire) 34009c6f1ddSLingrui98 val f3_except = VecInit((0 until 2).map{i => f3_except_pf(i) || f3_except_af(i)}) 34109c6f1ddSLingrui98 val f3_has_except = f3_valid && (f3_except_af.reduce(_||_) || f3_except_pf.reduce(_||_)) 3421d8f4dcbSJay val f3_pAddrs = RegEnable(next = f2_paddrs, enable = f2_fire) 343a37fbf10SJay 344a1351e5dSJay val f3_oversize_target = f3_pc.last + 2.U 345a1351e5dSJay 3462a3050c2SJay /*** MMIO State Machine***/ 347a37fbf10SJay val f3_mmio_data = Reg(UInt(maxInstrLen.W)) 348a37fbf10SJay 3492a3050c2SJay val mmio_idle :: mmio_send_req :: mmio_w_resp :: mmio_resend :: mmio_resend_w_resp :: mmio_wait_commit :: mmio_commited :: Nil = Enum(7) 350a37fbf10SJay val mmio_state = RegInit(mmio_idle) 351a37fbf10SJay 3529bae7d6eSJay val f3_req_is_mmio = f3_mmio && f3_valid 3532a3050c2SJay val mmio_commit = VecInit(io.rob_commits.map{commit => commit.valid && commit.bits.ftqIdx === f3_ftq_req.ftqIdx && commit.bits.ftqOffset === 0.U}).asUInt.orR 3542a3050c2SJay val f3_mmio_req_commit = f3_req_is_mmio && mmio_state === mmio_commited 355a37fbf10SJay 3562a3050c2SJay val f3_mmio_to_commit = f3_req_is_mmio && mmio_state === mmio_wait_commit 357a37fbf10SJay val f3_mmio_to_commit_next = RegNext(f3_mmio_to_commit) 358a37fbf10SJay val f3_mmio_can_go = f3_mmio_to_commit && !f3_mmio_to_commit_next 359a37fbf10SJay 3609bae7d6eSJay val f3_ftq_flush_self = fromFtq.redirect.valid && RedirectLevel.flushItself(fromFtq.redirect.bits.level) 361167bcd01SJay val f3_ftq_flush_by_older = fromFtq.redirect.valid && isBefore(fromFtq.redirect.bits.ftqIdx, f3_ftq_req.ftqIdx) 3629bae7d6eSJay 363167bcd01SJay val f3_need_not_flush = f3_req_is_mmio && fromFtq.redirect.valid && !f3_ftq_flush_self && !f3_ftq_flush_by_older 3649bae7d6eSJay 3659bae7d6eSJay when(f3_flush && !f3_need_not_flush) {f3_valid := false.B} 366a37fbf10SJay .elsewhen(f2_fire && !f2_flush ) {f3_valid := true.B } 367a37fbf10SJay .elsewhen(io.toIbuffer.fire() && !f3_req_is_mmio) {f3_valid := false.B} 368a37fbf10SJay .elsewhen{f3_req_is_mmio && f3_mmio_req_commit} {f3_valid := false.B} 369a37fbf10SJay 370a37fbf10SJay val f3_mmio_use_seq_pc = RegInit(false.B) 371a37fbf10SJay 372a37fbf10SJay val (redirect_ftqIdx, redirect_ftqOffset) = (fromFtq.redirect.bits.ftqIdx,fromFtq.redirect.bits.ftqOffset) 373a37fbf10SJay val redirect_mmio_req = fromFtq.redirect.valid && redirect_ftqIdx === f3_ftq_req.ftqIdx && redirect_ftqOffset === 0.U 374a37fbf10SJay 375a37fbf10SJay when(RegNext(f2_fire && !f2_flush) && f3_req_is_mmio) { f3_mmio_use_seq_pc := true.B } 376a37fbf10SJay .elsewhen(redirect_mmio_req) { f3_mmio_use_seq_pc := false.B } 377a37fbf10SJay 378a37fbf10SJay f3_ready := Mux(f3_req_is_mmio, io.toIbuffer.ready && f3_mmio_req_commit || !f3_valid , io.toIbuffer.ready || !f3_valid) 379a37fbf10SJay 380a37fbf10SJay when(fromUncache.fire()) {f3_mmio_data := fromUncache.bits.data} 381a37fbf10SJay 382a37fbf10SJay 383a37fbf10SJay switch(mmio_state){ 384a37fbf10SJay is(mmio_idle){ 3859bae7d6eSJay when(f3_req_is_mmio){ 386a37fbf10SJay mmio_state := mmio_send_req 387a37fbf10SJay } 388a37fbf10SJay } 389a37fbf10SJay 390a37fbf10SJay is(mmio_send_req){ 391a37fbf10SJay mmio_state := Mux(toUncache.fire(), mmio_w_resp, mmio_send_req ) 392a37fbf10SJay } 393a37fbf10SJay 394a37fbf10SJay is(mmio_w_resp){ 395a37fbf10SJay when(fromUncache.fire()){ 396a37fbf10SJay val isRVC = fromUncache.bits.data(1,0) =/= 3.U 3972a3050c2SJay mmio_state := Mux(isRVC, mmio_resend , mmio_wait_commit) 398a37fbf10SJay } 399a37fbf10SJay } 400a37fbf10SJay 401a37fbf10SJay is(mmio_resend){ 402a37fbf10SJay mmio_state := Mux(toUncache.fire(), mmio_resend_w_resp, mmio_resend ) 403a37fbf10SJay } 404a37fbf10SJay 405a37fbf10SJay is(mmio_resend_w_resp){ 406a37fbf10SJay when(fromUncache.fire()){ 4072a3050c2SJay mmio_state := mmio_wait_commit 408a37fbf10SJay } 409a37fbf10SJay } 410a37fbf10SJay 4112a3050c2SJay is(mmio_wait_commit){ 4122a3050c2SJay when(mmio_commit){ 4132a3050c2SJay mmio_state := mmio_commited 414a37fbf10SJay } 415a37fbf10SJay } 4162a3050c2SJay 4172a3050c2SJay is(mmio_commited){ 4182a3050c2SJay mmio_state := mmio_idle 4192a3050c2SJay } 420a37fbf10SJay } 421a37fbf10SJay 422167bcd01SJay when(f3_ftq_flush_self || f3_ftq_flush_by_older) { 4239bae7d6eSJay mmio_state := mmio_idle 4249bae7d6eSJay f3_mmio_data := 0.U 4259bae7d6eSJay } 4269bae7d6eSJay 427a37fbf10SJay toUncache.valid := ((mmio_state === mmio_send_req) || (mmio_state === mmio_resend)) && f3_req_is_mmio 428a37fbf10SJay toUncache.bits.addr := Mux((mmio_state === mmio_resend), f3_pAddrs(0) + 2.U, f3_pAddrs(0)) 429a37fbf10SJay fromUncache.ready := true.B 430a37fbf10SJay 43109c6f1ddSLingrui98 4322a3050c2SJay val f3_lastHalf = RegInit(0.U.asTypeOf(new LastHalfInfo)) 43309c6f1ddSLingrui98 43409c6f1ddSLingrui98 val f3_predecode_range = VecInit(preDecoderOut.pd.map(inst => inst.valid)).asUInt 4350be662e4SJay val f3_mmio_range = VecInit((0 until PredictWidth).map(i => if(i ==0) true.B else false.B)) 4362a3050c2SJay val f3_instr_valid = Wire(Vec(PredictWidth, Bool())) 43709c6f1ddSLingrui98 4382a3050c2SJay /*** prediction result check ***/ 4392a3050c2SJay checkerIn.ftqOffset := f3_ftq_req.ftqOffset 4402a3050c2SJay checkerIn.jumpOffset := f3_jump_offset 4412a3050c2SJay checkerIn.target := f3_ftq_req.target 4422a3050c2SJay checkerIn.instrRange := f3_instr_range.asTypeOf(Vec(PredictWidth, Bool())) 4432a3050c2SJay checkerIn.instrValid := f3_instr_valid.asTypeOf(Vec(PredictWidth, Bool())) 4442a3050c2SJay checkerIn.pds := f3_pd 4452a3050c2SJay checkerIn.pc := f3_pc 4462a3050c2SJay 447*58dbdfc2SJay /*** handle half RVI in the last 2 Bytes ***/ 4482a3050c2SJay 4492a3050c2SJay def hasLastHalf(idx: UInt) = { 4502a3050c2SJay !f3_pd(idx).isRVC && checkerOut.fixedRange(idx) && f3_instr_valid(idx) && !checkerOut.fixedTaken(idx) && !checkerOut.fixedMissPred(idx) && ! f3_req_is_mmio && !f3_ftq_req.oversize 4512a3050c2SJay } 4522a3050c2SJay 4532a3050c2SJay val f3_last_validIdx = ~ParallelPriorityEncoder(checkerOut.fixedRange.reverse) 4542a3050c2SJay 4552a3050c2SJay val f3_hasLastHalf = hasLastHalf((PredictWidth - 1).U) 4562a3050c2SJay val f3_false_lastHalf = hasLastHalf(f3_last_validIdx) 4572a3050c2SJay val f3_false_snpc = f3_half_snpc(f3_last_validIdx) 4582a3050c2SJay 4592a3050c2SJay val f3_lastHalf_mask = VecInit((0 until PredictWidth).map( i => if(i ==0) false.B else true.B )).asUInt() 4602a3050c2SJay 4612a3050c2SJay when (f3_flush) { 4622a3050c2SJay f3_lastHalf.valid := false.B 4632a3050c2SJay }.elsewhen (f3_fire) { 4642a3050c2SJay f3_lastHalf.valid := f3_hasLastHalf 4652a3050c2SJay f3_lastHalf.middlePC := f3_ftq_req.fallThruAddr 4662a3050c2SJay } 4672a3050c2SJay 4682a3050c2SJay f3_instr_valid := Mux(f3_lastHalf.valid,f3_hasHalfValid ,VecInit(f3_pd.map(inst => inst.valid))) 4692a3050c2SJay 4702a3050c2SJay /*** frontend Trigger ***/ 4712a3050c2SJay frontendTrigger.io.pds := f3_pd 4722a3050c2SJay frontendTrigger.io.pc := f3_pc 4732a3050c2SJay frontendTrigger.io.data := f3_cut_data 4742a3050c2SJay 4752a3050c2SJay frontendTrigger.io.frontendTrigger := io.frontendTrigger 4762a3050c2SJay frontendTrigger.io.csrTriggerEnable := io.csrTriggerEnable 4772a3050c2SJay 4782a3050c2SJay val f3_triggered = frontendTrigger.io.triggered 4792a3050c2SJay 4802a3050c2SJay /*** send to Ibuffer ***/ 4812a3050c2SJay 4822a3050c2SJay io.toIbuffer.valid := f3_valid && (!f3_req_is_mmio || f3_mmio_can_go) && !f3_flush 4832a3050c2SJay io.toIbuffer.bits.instrs := f3_expd_instr 4842a3050c2SJay io.toIbuffer.bits.valid := f3_instr_valid.asUInt 4852a3050c2SJay io.toIbuffer.bits.enqEnable := checkerOut.fixedRange.asUInt & f3_instr_valid.asUInt 4862a3050c2SJay io.toIbuffer.bits.pd := f3_pd 48709c6f1ddSLingrui98 io.toIbuffer.bits.ftqPtr := f3_ftq_req.ftqIdx 4882a3050c2SJay io.toIbuffer.bits.pc := f3_pc 4892a3050c2SJay io.toIbuffer.bits.ftqOffset.zipWithIndex.map{case(a, i) => a.bits := i.U; a.valid := checkerOut.fixedTaken(i) && !f3_req_is_mmio} 4902a3050c2SJay io.toIbuffer.bits.foldpc := f3_foldpc 4912a3050c2SJay io.toIbuffer.bits.ipf := f3_pf_vec 4922a3050c2SJay io.toIbuffer.bits.acf := f3_af_vec 4932a3050c2SJay io.toIbuffer.bits.crossPageIPFFix := f3_crossPageFault 4942a3050c2SJay io.toIbuffer.bits.triggered := f3_triggered 4952a3050c2SJay 4962a3050c2SJay val lastHalfMask = VecInit((0 until PredictWidth).map(i => if(i ==0) false.B else true.B)) 4972a3050c2SJay when(f3_lastHalf.valid){ 4982a3050c2SJay io.toIbuffer.bits.enqEnable := checkerOut.fixedRange.asUInt & f3_instr_valid.asUInt & lastHalfMask.asUInt 4992a3050c2SJay io.toIbuffer.bits.valid := f3_lastHalf_mask & f3_instr_valid.asUInt 5002a3050c2SJay } 5012a3050c2SJay 5022a3050c2SJay /** external predecode for MMIO instruction */ 5032a3050c2SJay when(f3_req_is_mmio){ 5042a3050c2SJay val inst = Cat(f3_mmio_data(31,16), f3_mmio_data(15,0)) 5052a3050c2SJay val currentIsRVC = isRVC(inst) 5062a3050c2SJay 5072a3050c2SJay val brType::isCall::isRet::Nil = brInfo(inst) 5082a3050c2SJay val jalOffset = jal_offset(inst, currentIsRVC) 5092a3050c2SJay val brOffset = br_offset(inst, currentIsRVC) 5102a3050c2SJay 5112a3050c2SJay io.toIbuffer.bits.instrs (0) := new RVCDecoder(inst, XLEN).decode.bits 5122a3050c2SJay 5132a3050c2SJay io.toIbuffer.bits.pd(0).valid := true.B 5142a3050c2SJay io.toIbuffer.bits.pd(0).isRVC := currentIsRVC 5152a3050c2SJay io.toIbuffer.bits.pd(0).brType := brType 5162a3050c2SJay io.toIbuffer.bits.pd(0).isCall := isCall 5172a3050c2SJay io.toIbuffer.bits.pd(0).isRet := isRet 5182a3050c2SJay 5192a3050c2SJay io.toIbuffer.bits.enqEnable := f3_mmio_range.asUInt 5202a3050c2SJay } 5212a3050c2SJay 52209c6f1ddSLingrui98 52309c6f1ddSLingrui98 //Write back to Ftq 524a37fbf10SJay val f3_cache_fetch = f3_valid && !(f2_fire && !f2_flush) 525a37fbf10SJay val finishFetchMaskReg = RegNext(f3_cache_fetch) 526a37fbf10SJay 5272a3050c2SJay val mmioFlushWb = Wire(Valid(new PredecodeWritebackBundle)) 5280be662e4SJay val f3_mmio_missOffset = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 529a37fbf10SJay f3_mmio_missOffset.valid := f3_req_is_mmio 5300be662e4SJay f3_mmio_missOffset.bits := 0.U 5310be662e4SJay 5322a3050c2SJay mmioFlushWb.valid := (f3_req_is_mmio && mmio_state === mmio_wait_commit && RegNext(fromUncache.fire()) && f3_mmio_use_seq_pc) 5332a3050c2SJay mmioFlushWb.bits.pc := f3_pc 5342a3050c2SJay mmioFlushWb.bits.pd := f3_pd 5352a3050c2SJay mmioFlushWb.bits.pd.zipWithIndex.map{case(instr,i) => instr.valid := f3_mmio_range(i)} 5362a3050c2SJay mmioFlushWb.bits.ftqIdx := f3_ftq_req.ftqIdx 5372a3050c2SJay mmioFlushWb.bits.ftqOffset := f3_ftq_req.ftqOffset.bits 5382a3050c2SJay mmioFlushWb.bits.misOffset := f3_mmio_missOffset 5392a3050c2SJay mmioFlushWb.bits.cfiOffset := DontCare 5402a3050c2SJay mmioFlushWb.bits.target := Mux((f3_mmio_data(1,0) =/= 3.U), f3_ftq_req.startAddr + 2.U , f3_ftq_req.startAddr + 4.U) 5412a3050c2SJay mmioFlushWb.bits.jalTarget := DontCare 5422a3050c2SJay mmioFlushWb.bits.instrRange := f3_mmio_range 54309c6f1ddSLingrui98 5442a3050c2SJay mmio_redirect := (f3_req_is_mmio && mmio_state === mmio_wait_commit && RegNext(fromUncache.fire()) && f3_mmio_use_seq_pc) 5452a3050c2SJay 546*58dbdfc2SJay /** 547*58dbdfc2SJay ****************************************************************************** 548*58dbdfc2SJay * IFU Write Back Stage 549*58dbdfc2SJay * - write back predecode information to Ftq to update 550*58dbdfc2SJay * - redirect if found fault prediction 551*58dbdfc2SJay * - redirect if has false hit last half (last PC is not start + 32 Bytes, but in the midle of an notCFI RVI instruction) 552*58dbdfc2SJay ****************************************************************************** 5532a3050c2SJay */ 554*58dbdfc2SJay 5552a3050c2SJay val wb_valid = RegNext(RegNext(f2_fire && !f2_flush) && !f3_req_is_mmio && !f3_flush) 5562a3050c2SJay val wb_ftq_req = RegNext(f3_ftq_req) 5572a3050c2SJay 5582a3050c2SJay val wb_check_result = RegNext(checkerOut) 5592a3050c2SJay val wb_instr_range = RegNext(io.toIbuffer.bits.enqEnable) 5602a3050c2SJay val wb_pc = RegNext(f3_pc) 5612a3050c2SJay val wb_pd = RegNext(f3_pd) 5622a3050c2SJay val wb_instr_valid = RegNext(f3_instr_valid) 5632a3050c2SJay 5642a3050c2SJay /* false hit lastHalf */ 5652a3050c2SJay val wb_lastIdx = RegNext(f3_last_validIdx) 5662a3050c2SJay val wb_false_lastHalf = RegNext(f3_false_lastHalf) && wb_lastIdx =/= (PredictWidth - 1).U 5672a3050c2SJay val wb_false_target = RegNext(f3_false_snpc) 5682a3050c2SJay 5692a3050c2SJay val wb_half_flush = wb_false_lastHalf 5702a3050c2SJay val wb_half_target = wb_false_target 5712a3050c2SJay 572a1351e5dSJay /* false oversize */ 573a1351e5dSJay val lastIsRVC = wb_instr_range.asTypeOf(Vec(PredictWidth,Bool())).last && wb_pd.last.isRVC 574a1351e5dSJay val lastIsRVI = wb_instr_range.asTypeOf(Vec(PredictWidth,Bool()))(PredictWidth - 2) && !wb_pd(PredictWidth - 2).isRVC 575a1351e5dSJay val lastTaken = wb_check_result.fixedTaken.last 576a1351e5dSJay val wb_false_oversize = wb_valid && wb_ftq_req.oversize && (lastIsRVC || lastIsRVI) && !lastTaken 577a1351e5dSJay val wb_oversize_target = RegNext(f3_oversize_target) 578a1351e5dSJay 579a1351e5dSJay when(wb_valid){ 580a1351e5dSJay assert(!wb_false_oversize || !wb_half_flush, "False oversize and false half should be exclusive. ") 581a1351e5dSJay } 582a1351e5dSJay 5832a3050c2SJay f3_wb_not_flush := wb_ftq_req.ftqIdx === f3_ftq_req.ftqIdx && f3_valid && wb_valid 5842a3050c2SJay 5852a3050c2SJay val checkFlushWb = Wire(Valid(new PredecodeWritebackBundle)) 5862a3050c2SJay checkFlushWb.valid := wb_valid 5872a3050c2SJay checkFlushWb.bits.pc := wb_pc 5882a3050c2SJay checkFlushWb.bits.pd := wb_pd 5892a3050c2SJay checkFlushWb.bits.pd.zipWithIndex.map{case(instr,i) => instr.valid := wb_instr_valid(i)} 5902a3050c2SJay checkFlushWb.bits.ftqIdx := wb_ftq_req.ftqIdx 5912a3050c2SJay checkFlushWb.bits.ftqOffset := wb_ftq_req.ftqOffset.bits 592a1351e5dSJay checkFlushWb.bits.misOffset.valid := ParallelOR(wb_check_result.fixedMissPred) || wb_half_flush || wb_false_oversize 5932a3050c2SJay checkFlushWb.bits.misOffset.bits := Mux(wb_half_flush, (PredictWidth - 1).U, ParallelPriorityEncoder(wb_check_result.fixedMissPred)) 5942a3050c2SJay checkFlushWb.bits.cfiOffset.valid := ParallelOR(wb_check_result.fixedTaken) 5952a3050c2SJay checkFlushWb.bits.cfiOffset.bits := ParallelPriorityEncoder(wb_check_result.fixedTaken) 596a1351e5dSJay checkFlushWb.bits.target := Mux(wb_false_oversize, wb_oversize_target, 597a1351e5dSJay Mux(wb_half_flush, wb_half_target, wb_check_result.fixedTarget(ParallelPriorityEncoder(wb_check_result.fixedMissPred)))) 5982a3050c2SJay checkFlushWb.bits.jalTarget := wb_check_result.fixedTarget(ParallelPriorityEncoder(VecInit(wb_pd.map{pd => pd.isJal }))) 5992a3050c2SJay checkFlushWb.bits.instrRange := wb_instr_range.asTypeOf(Vec(PredictWidth, Bool())) 6002a3050c2SJay 6012a3050c2SJay toFtq.pdWb := Mux(f3_req_is_mmio, mmioFlushWb, checkFlushWb) 6022a3050c2SJay 6032a3050c2SJay wb_redirect := checkFlushWb.bits.misOffset.valid && wb_valid 60409c6f1ddSLingrui98 6051d8f4dcbSJay 6061d8f4dcbSJay /** performance counter */ 6071d8f4dcbSJay val f3_perf_info = RegEnable(next = f2_perf_info, enable = f2_fire) 6081d8f4dcbSJay val f3_req_0 = io.toIbuffer.fire() 6091d8f4dcbSJay val f3_req_1 = io.toIbuffer.fire() && f3_doubleLine 6101d8f4dcbSJay val f3_hit_0 = io.toIbuffer.fire() && f3_perf_info.bank_hit(0) 6111d8f4dcbSJay val f3_hit_1 = io.toIbuffer.fire() && f3_doubleLine & f3_perf_info.bank_hit(1) 6121d8f4dcbSJay val f3_hit = f3_perf_info.hit 613cd365d4cSrvcoresjw val perfEvents = Seq( 6142a3050c2SJay ("frontendFlush ", wb_redirect ), 615cd365d4cSrvcoresjw ("ifu_req ", io.toIbuffer.fire() ), 6161d8f4dcbSJay ("ifu_miss ", io.toIbuffer.fire() && !f3_perf_info.hit ), 617cd365d4cSrvcoresjw ("ifu_req_cacheline_0 ", f3_req_0 ), 618cd365d4cSrvcoresjw ("ifu_req_cacheline_1 ", f3_req_1 ), 619cd365d4cSrvcoresjw ("ifu_req_cacheline_0_hit ", f3_hit_1 ), 620cd365d4cSrvcoresjw ("ifu_req_cacheline_1_hit ", f3_hit_1 ), 6211d8f4dcbSJay ("only_0_hit ", f3_perf_info.only_0_hit && io.toIbuffer.fire() ), 6221d8f4dcbSJay ("only_0_miss ", f3_perf_info.only_0_miss && io.toIbuffer.fire() ), 6231d8f4dcbSJay ("hit_0_hit_1 ", f3_perf_info.hit_0_hit_1 && io.toIbuffer.fire() ), 6241d8f4dcbSJay ("hit_0_miss_1 ", f3_perf_info.hit_0_miss_1 && io.toIbuffer.fire() ), 6251d8f4dcbSJay ("miss_0_hit_1 ", f3_perf_info.miss_0_hit_1 && io.toIbuffer.fire() ), 6261d8f4dcbSJay ("miss_0_miss_1 ", f3_perf_info.miss_0_miss_1 && io.toIbuffer.fire() ), 627cd365d4cSrvcoresjw ("cross_line_block ", io.toIbuffer.fire() && f3_situation(0) ), 628cd365d4cSrvcoresjw ("fall_through_is_cacheline_end", io.toIbuffer.fire() && f3_situation(1) ), 629cd365d4cSrvcoresjw ) 6301ca0e4f3SYinan Xu generatePerfEvent() 63109c6f1ddSLingrui98 632f7c29b0aSJinYue XSPerfAccumulate("ifu_req", io.toIbuffer.fire() ) 633f7c29b0aSJinYue XSPerfAccumulate("ifu_miss", io.toIbuffer.fire() && !f3_hit ) 634f7c29b0aSJinYue XSPerfAccumulate("ifu_req_cacheline_0", f3_req_0 ) 635f7c29b0aSJinYue XSPerfAccumulate("ifu_req_cacheline_1", f3_req_1 ) 636f7c29b0aSJinYue XSPerfAccumulate("ifu_req_cacheline_0_hit", f3_hit_0 ) 637f7c29b0aSJinYue XSPerfAccumulate("ifu_req_cacheline_1_hit", f3_hit_1 ) 6382a3050c2SJay XSPerfAccumulate("frontendFlush", wb_redirect ) 6391d8f4dcbSJay XSPerfAccumulate("only_0_hit", f3_perf_info.only_0_hit && io.toIbuffer.fire() ) 6401d8f4dcbSJay XSPerfAccumulate("only_0_miss", f3_perf_info.only_0_miss && io.toIbuffer.fire() ) 6411d8f4dcbSJay XSPerfAccumulate("hit_0_hit_1", f3_perf_info.hit_0_hit_1 && io.toIbuffer.fire() ) 6421d8f4dcbSJay XSPerfAccumulate("hit_0_miss_1", f3_perf_info.hit_0_miss_1 && io.toIbuffer.fire() ) 6431d8f4dcbSJay XSPerfAccumulate("miss_0_hit_1", f3_perf_info.miss_0_hit_1 && io.toIbuffer.fire() ) 6441d8f4dcbSJay XSPerfAccumulate("miss_0_miss_1", f3_perf_info.miss_0_miss_1 && io.toIbuffer.fire() ) 645f7c29b0aSJinYue XSPerfAccumulate("cross_line_block", io.toIbuffer.fire() && f3_situation(0) ) 646f7c29b0aSJinYue XSPerfAccumulate("fall_through_is_cacheline_end", io.toIbuffer.fire() && f3_situation(1) ) 64709c6f1ddSLingrui98} 648