1//package xiangshan.frontend 2// 3//import chisel3._ 4//import chisel3.util._ 5//import utils._ 6//import xiangshan._ 7//import xiangshan.backend.ALUOpType 8//import xiangshan.backend.JumpOpType 9// 10//class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 11// def tagBits = VAddrBits - idxBits - 1 12// 13// val tag = UInt(tagBits.W) 14// val idx = UInt(idxBits.W) 15// val offset = UInt(1.W) 16// 17// def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 18// def getTag(x: UInt) = fromUInt(x).tag 19// def getIdx(x: UInt) = fromUInt(x).idx 20// def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 21// def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 22//} 23// 24//class Stage1To2IO extends XSBundle { 25// val pc = Output(UInt(VAddrBits.W)) 26// val btb = new Bundle { 27// val hits = Output(UInt(PredictWidth.W)) 28// val targets = Output(Vec(PredictWidth, UInt(VAddrBits.W))) 29// } 30// val jbtac = new Bundle { 31// val hitIdx = Output(UInt(PredictWidth.W)) 32// val target = Output(UInt(VAddrBits.W)) 33// } 34// val tage = new Bundle { 35// val hits = Output(UInt(PredictWidth.W)) 36// val takens = Output(Vec(PredictWidth, Bool())) 37// } 38// val hist = Output(Vec(PredictWidth, UInt(HistoryLength.W))) 39// val btbPred = ValidIO(new BranchPrediction) 40//} 41// 42//class BPUStage1 extends XSModule { 43// val io = IO(new Bundle() { 44// val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) } 45// // from backend 46// val redirectInfo = Input(new RedirectInfo) 47// // from Stage3 48// val flush = Input(Bool()) 49// val s3RollBackHist = Input(UInt(HistoryLength.W)) 50// val s3Taken = Input(Bool()) 51// // to ifu, quick prediction result 52// val s1OutPred = ValidIO(new BranchPrediction) 53// // to Stage2 54// val out = Decoupled(new Stage1To2IO) 55// }) 56// 57// io.in.pc.ready := true.B 58// 59// // flush Stage1 when io.flush 60// val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true) 61// val s1OutPredLatch = RegEnable(io.s1OutPred.bits, RegNext(io.in.pc.fire())) 62// val outLatch = RegEnable(io.out.bits, RegNext(io.in.pc.fire())) 63// 64// val s1Valid = RegInit(false.B) 65// when (io.flush) { 66// s1Valid := true.B 67// }.elsewhen (io.in.pc.fire()) { 68// s1Valid := true.B 69// }.elsewhen (io.out.fire()) { 70// s1Valid := false.B 71// } 72// io.out.valid := s1Valid 73// 74// 75// // global history register 76// val ghr = RegInit(0.U(HistoryLength.W)) 77// // modify updateGhr and newGhr when updating ghr 78// val updateGhr = WireInit(false.B) 79// val newGhr = WireInit(0.U(HistoryLength.W)) 80// when (updateGhr) { ghr := newGhr } 81// // use hist as global history!!! 82// val hist = Mux(updateGhr, newGhr, ghr) 83// 84// // Tage predictor 85// val tage = if(EnableBPD) Module(new Tage) else Module(new FakeTAGE) 86// tage.io.req.valid := io.in.pc.fire() 87// tage.io.req.bits.pc := io.in.pc.bits 88// tage.io.req.bits.hist := hist 89// tage.io.redirectInfo <> io.redirectInfo 90// io.s1OutPred.bits.tageMeta := tage.io.meta 91// 92// // latch pc for 1 cycle latency when reading SRAM 93// val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.fire()) 94// // TODO: pass real mask in 95// // val maskLatch = RegEnable(btb.io.in.mask, io.in.pc.fire()) 96// val maskLatch = Fill(PredictWidth, 1.U(1.W)) 97// 98// val r = io.redirectInfo.redirect 99// val updateFetchpc = r.pc - (r.fetchIdx << 1.U) 100// // BTB 101// val btb = Module(new BTB) 102// btb.io.in.pc <> io.in.pc 103// btb.io.in.pcLatch := pcLatch 104// // TODO: pass real mask in 105// btb.io.in.mask := Fill(PredictWidth, 1.U(1.W)) 106// btb.io.redirectValid := io.redirectInfo.valid 107// btb.io.flush := io.flush 108// 109// // btb.io.update.fetchPC := updateFetchpc 110// // btb.io.update.fetchIdx := r.fetchIdx 111// btb.io.update.pc := r.pc 112// btb.io.update.hit := r.btbHit 113// btb.io.update.misPred := io.redirectInfo.misPred 114// // btb.io.update.writeWay := r.btbVictimWay 115// btb.io.update.oldCtr := r.btbPredCtr 116// btb.io.update.taken := r.taken 117// btb.io.update.target := r.brTarget 118// btb.io.update.btbType := r.btbType 119// // TODO: add RVC logic 120// btb.io.update.isRVC := r.isRVC 121// 122// // val btbHit = btb.io.out.hit 123// val btbTaken = btb.io.out.taken 124// val btbTakenIdx = btb.io.out.takenIdx 125// val btbTakenTarget = btb.io.out.target 126// // val btbWriteWay = btb.io.out.writeWay 127// val btbNotTakens = btb.io.out.notTakens 128// val btbCtrs = VecInit(btb.io.out.dEntries.map(_.pred)) 129// val btbValids = btb.io.out.hits 130// val btbTargets = VecInit(btb.io.out.dEntries.map(_.target)) 131// val btbTypes = VecInit(btb.io.out.dEntries.map(_.btbType)) 132// val btbIsRVCs = VecInit(btb.io.out.dEntries.map(_.isRVC)) 133// 134// 135// val jbtac = Module(new JBTAC) 136// jbtac.io.in.pc <> io.in.pc 137// jbtac.io.in.pcLatch := pcLatch 138// // TODO: pass real mask in 139// jbtac.io.in.mask := Fill(PredictWidth, 1.U(1.W)) 140// jbtac.io.in.hist := hist 141// jbtac.io.redirectValid := io.redirectInfo.valid 142// jbtac.io.flush := io.flush 143// 144// jbtac.io.update.fetchPC := updateFetchpc 145// jbtac.io.update.fetchIdx := r.fetchIdx 146// jbtac.io.update.misPred := io.redirectInfo.misPred 147// jbtac.io.update.btbType := r.btbType 148// jbtac.io.update.target := r.target 149// jbtac.io.update.hist := r.hist 150// jbtac.io.update.isRVC := r.isRVC 151// 152// val jbtacHit = jbtac.io.out.hit 153// val jbtacTarget = jbtac.io.out.target 154// val jbtacHitIdx = jbtac.io.out.hitIdx 155// val jbtacIsRVC = jbtac.io.out.isRVC 156// 157// // calculate global history of each instr 158// val firstHist = RegNext(hist) 159// val histShift = Wire(Vec(PredictWidth, UInt(log2Up(PredictWidth).W))) 160// val shift = Wire(Vec(PredictWidth, Vec(PredictWidth, UInt(1.W)))) 161// (0 until PredictWidth).foreach(i => shift(i) := Mux(!btbNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), PredictWidth)).asTypeOf(Vec(PredictWidth, UInt(1.W)))) 162// for (j <- 0 until PredictWidth) { 163// var tmp = 0.U 164// for (i <- 0 until PredictWidth) { 165// tmp = tmp + shift(i)(j) 166// } 167// histShift(j) := tmp 168// } 169// 170// // update ghr 171// updateGhr := io.s1OutPred.bits.redirect || 172// RegNext(io.in.pc.fire) && ~io.s1OutPred.bits.redirect && (btbNotTakens.asUInt & maskLatch).orR || // TODO: use parallel or 173// io.flush 174// val brJumpIdx = Mux(!btbTaken, 0.U, UIntToOH(btbTakenIdx)) 175// val indirectIdx = Mux(!jbtacHit, 0.U, UIntToOH(jbtacHitIdx)) 176// // if backend redirects, restore history from backend; 177// // if stage3 redirects, restore history from stage3; 178// // if stage1 redirects, speculatively update history; 179// // if none of above happens, check if stage1 has not-taken branches and shift zeroes accordingly 180// newGhr := Mux(io.redirectInfo.flush(), (r.hist << 1.U) | !(r.btbType === BTBtype.B && !r.taken), 181// Mux(io.flush, Mux(io.s3Taken, (io.s3RollBackHist << 1.U) | 1.U, io.s3RollBackHist), 182// Mux(io.s1OutPred.bits.redirect, (PriorityMux(brJumpIdx | indirectIdx, io.s1OutPred.bits.hist) << 1.U | 1.U), 183// io.s1OutPred.bits.hist(0) << PopCount(btbNotTakens.asUInt & maskLatch)))) 184// 185// def getInstrValid(i: Int): UInt = { 186// val vec = Wire(Vec(PredictWidth, UInt(1.W))) 187// for (j <- 0 until PredictWidth) { 188// if (j <= i) 189// vec(j) := 1.U 190// else 191// vec(j) := 0.U 192// } 193// vec.asUInt 194// } 195// 196// // redirect based on BTB and JBTAC 197// val takenIdx = LowestBit(brJumpIdx | indirectIdx, PredictWidth) 198// 199// // io.out.valid := RegNext(io.in.pc.fire()) && !io.flush 200// 201// // io.s1OutPred.valid := io.out.valid 202// io.s1OutPred.valid := io.out.fire() 203// when (RegNext(io.in.pc.fire())) { 204// io.s1OutPred.bits.redirect := btbTaken || jbtacHit 205// // io.s1OutPred.bits.instrValid := (maskLatch & Fill(PredictWidth, ~io.s1OutPred.bits.redirect || io.s1OutPred.bits.lateJump) | 206// // PriorityMux(brJumpIdx | indirectIdx, (0 until PredictWidth).map(getInstrValid(_)))).asTypeOf(Vec(PredictWidth, Bool())) 207// io.s1OutPred.bits.instrValid := (maskLatch & Fill(PredictWidth, ~io.s1OutPred.bits.redirect) | 208// PriorityMux(brJumpIdx | indirectIdx, (0 until PredictWidth).map(getInstrValid(_)))).asTypeOf(Vec(PredictWidth, Bool())) 209// for (i <- 0 until (PredictWidth - 1)) { 210// when (!io.s1OutPred.bits.lateJump && (1.U << i) === takenIdx && (!btbIsRVCs(i) && btbValids(i) || !jbtacIsRVC && (1.U << i) === indirectIdx)) { 211// io.s1OutPred.bits.instrValid(i+1) := maskLatch(i+1) 212// } 213// } 214// io.s1OutPred.bits.target := Mux(takenIdx === 0.U, pcLatch + (PopCount(maskLatch) << 1.U), Mux(takenIdx === brJumpIdx, btbTakenTarget, jbtacTarget)) 215// io.s1OutPred.bits.lateJump := btb.io.out.isRVILateJump || jbtac.io.out.isRVILateJump 216// (0 until PredictWidth).map(i => io.s1OutPred.bits.hist(i) := firstHist << histShift(i)) 217// // io.s1OutPred.bits.btbVictimWay := btbWriteWay 218// io.s1OutPred.bits.predCtr := btbCtrs 219// io.s1OutPred.bits.btbHit := btbValids 220// io.s1OutPred.bits.tageMeta := tage.io.meta // TODO: enableBPD 221// io.s1OutPred.bits.rasSp := DontCare 222// io.s1OutPred.bits.rasTopCtr := DontCare 223// }.otherwise { 224// io.s1OutPred.bits := s1OutPredLatch 225// } 226// 227// when (RegNext(io.in.pc.fire())) { 228// io.out.bits.pc := pcLatch 229// io.out.bits.btb.hits := btbValids.asUInt 230// (0 until PredictWidth).map(i => io.out.bits.btb.targets(i) := btbTargets(i)) 231// io.out.bits.jbtac.hitIdx := Mux(jbtacHit, UIntToOH(jbtacHitIdx), 0.U) // UIntToOH(jbtacHitIdx) 232// io.out.bits.jbtac.target := jbtacTarget 233// io.out.bits.tage <> tage.io.out 234// // TODO: we don't need this repeatedly! 235// io.out.bits.hist := io.s1OutPred.bits.hist 236// io.out.bits.btbPred := io.s1OutPred 237// }.otherwise { 238// io.out.bits := outLatch 239// } 240// 241// 242// // debug info 243// XSDebug("in:(%d %d) pc=%x ghr=%b\n", io.in.pc.valid, io.in.pc.ready, io.in.pc.bits, hist) 244// XSDebug("outPred:(%d) pc=0x%x, redirect=%d instrValid=%b tgt=%x\n", 245// io.s1OutPred.valid, pcLatch, io.s1OutPred.bits.redirect, io.s1OutPred.bits.instrValid.asUInt, io.s1OutPred.bits.target) 246// XSDebug(io.flush && io.redirectInfo.flush(), 247// "flush from backend: pc=%x tgt=%x brTgt=%x btbType=%b taken=%d oldHist=%b fetchIdx=%d isExcpt=%d\n", 248// r.pc, r.target, r.brTarget, r.btbType, r.taken, r.hist, r.fetchIdx, r.isException) 249// XSDebug(io.flush && !io.redirectInfo.flush(), 250// "flush from Stage3: s3Taken=%d s3RollBackHist=%b\n", io.s3Taken, io.s3RollBackHist) 251// 252//} 253// 254//class Stage2To3IO extends Stage1To2IO { 255//} 256// 257//class BPUStage2 extends XSModule { 258// val io = IO(new Bundle() { 259// // flush from Stage3 260// val flush = Input(Bool()) 261// val in = Flipped(Decoupled(new Stage1To2IO)) 262// val out = Decoupled(new Stage2To3IO) 263// }) 264// 265// // flush Stage2 when Stage3 or banckend redirects 266// val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true) 267// val inLatch = RegInit(0.U.asTypeOf(io.in.bits)) 268// when (io.in.fire()) { inLatch := io.in.bits } 269// val validLatch = RegInit(false.B) 270// when (io.flush) { 271// validLatch := false.B 272// }.elsewhen (io.in.fire()) { 273// validLatch := true.B 274// }.elsewhen (io.out.fire()) { 275// validLatch := false.B 276// } 277// 278// io.out.valid := !io.flush && !flushS2 && validLatch 279// io.in.ready := !validLatch || io.out.fire() 280// 281// // do nothing 282// io.out.bits := inLatch 283// 284// // debug info 285// XSDebug("in:(%d %d) pc=%x out:(%d %d) pc=%x\n", 286// io.in.valid, io.in.ready, io.in.bits.pc, io.out.valid, io.out.ready, io.out.bits.pc) 287// XSDebug("validLatch=%d pc=%x\n", validLatch, inLatch.pc) 288// XSDebug(io.flush, "flush!!!\n") 289//} 290// 291//class BPUStage3 extends XSModule { 292// val io = IO(new Bundle() { 293// val flush = Input(Bool()) 294// val in = Flipped(Decoupled(new Stage2To3IO)) 295// val out = Decoupled(new BranchPrediction) 296// // from icache 297// val predecode = Flipped(ValidIO(new Predecode)) 298// // from backend 299// val redirectInfo = Input(new RedirectInfo) 300// // to Stage1 and Stage2 301// val flushBPU = Output(Bool()) 302// // to Stage1, restore ghr in stage1 when flushBPU is valid 303// val s1RollBackHist = Output(UInt(HistoryLength.W)) 304// val s3Taken = Output(Bool()) 305// }) 306// 307// val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true) 308// val inLatch = RegInit(0.U.asTypeOf(io.in.bits)) 309// val validLatch = RegInit(false.B) 310// val predecodeLatch = RegInit(0.U.asTypeOf(io.predecode.bits)) 311// val predecodeValidLatch = RegInit(false.B) 312// when (io.in.fire()) { inLatch := io.in.bits } 313// when (io.flush) { 314// validLatch := false.B 315// }.elsewhen (io.in.fire()) { 316// validLatch := true.B 317// }.elsewhen (io.out.fire()) { 318// validLatch := false.B 319// } 320// 321// when (io.predecode.valid) { predecodeLatch := io.predecode.bits } 322// when (io.flush || io.out.fire()) { 323// predecodeValidLatch := false.B 324// }.elsewhen (io.predecode.valid) { 325// predecodeValidLatch := true.B 326// } 327// 328// val predecodeValid = io.predecode.valid || predecodeValidLatch 329// val predecode = Mux(io.predecode.valid, io.predecode.bits, predecodeLatch) 330// io.out.valid := validLatch && predecodeValid && !flushS3 && !io.flush 331// io.in.ready := !validLatch || io.out.fire() 332// 333// // RAS 334// // TODO: split retAddr and ctr 335// def rasEntry() = new Bundle { 336// val retAddr = UInt(VAddrBits.W) 337// val ctr = UInt(8.W) // layer of nested call functions 338// } 339// val ras = RegInit(VecInit(Seq.fill(RasSize)(0.U.asTypeOf(rasEntry())))) 340// val sp = Counter(RasSize) 341// val rasTop = ras(sp.value) 342// val rasTopAddr = rasTop.retAddr 343// 344// // get the first taken branch/jal/call/jalr/ret in a fetch line 345// // brNotTakenIdx indicates all the not-taken branches before the first jump instruction 346// 347// val tageHits = inLatch.tage.hits 348// val tageTakens = inLatch.tage.takens 349// val btbTakens = inLatch.btbPred.bits.predCtr 350// 351// val brs = inLatch.btb.hits & Reverse(Cat(predecode.fuOpTypes.map { t => ALUOpType.isBranch(t) }).asUInt) & predecode.mask 352// // val brTakens = brs & inLatch.tage.takens.asUInt 353// val brTakens = if (EnableBPD) { 354// // If tage hits, use tage takens, otherwise keep btbpreds 355// // brs & Reverse(Cat(inLatch.tage.takens.map {t => Fill(2, t.asUInt)}).asUInt) 356// XSDebug("tageHits=%b, tageTakens=%b\n", tageHits, tageTakens.asUInt) 357// brs & Reverse(Cat((0 until PredictWidth).map(i => Mux(tageHits(i), tageTakens(i), btbTakens(i)(1))))) 358// } else { 359// brs & Reverse(Cat(inLatch.btbPred.bits.predCtr.map {c => c(1)}).asUInt) 360// } 361// val jals = inLatch.btb.hits & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.jal }).asUInt) & predecode.mask 362// val calls = inLatch.btb.hits & predecode.mask & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.call }).asUInt) 363// val jalrs = inLatch.jbtac.hitIdx & predecode.mask & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.jalr }).asUInt) 364// val rets = predecode.mask & Reverse(Cat(predecode.fuOpTypes.map { t => t === JumpOpType.ret }).asUInt) 365// 366// val brTakenIdx = PriorityMux(brTakens, (0 until PredictWidth).map(_.U)) 367// val jalIdx = PriorityMux(jals, (0 until PredictWidth).map(_.U)) 368// val callIdx = PriorityMux(calls, (0 until PredictWidth).map(_.U)) 369// val jalrIdx = PriorityMux(jalrs, (0 until PredictWidth).map(_.U)) 370// val retIdx = PriorityMux(rets, (0 until PredictWidth).map(_.U)) 371// 372// val jmps = (if (EnableRAS) {brTakens | jals | calls | jalrs | rets} else {brTakens | jals | calls | jalrs}) 373// val jmpIdx = MuxCase(0.U, (0 until PredictWidth).map(i => (jmps(i), i.U))) 374// io.s3Taken := MuxCase(false.B, (0 until PredictWidth).map(i => (jmps(i), true.B))) 375// 376// // val brNotTakens = VecInit((0 until PredictWidth).map(i => brs(i) && ~inLatch.tage.takens(i) && i.U <= jmpIdx && io.predecode.bits.mask(i))) 377// val brNotTakens = if (EnableBPD) { 378// VecInit((0 until PredictWidth).map(i => brs(i) && i.U <= jmpIdx && Mux(tageHits(i), ~tageTakens(i), ~btbTakens(i)(1)) && predecode.mask(i))) 379// } else { 380// VecInit((0 until PredictWidth).map(i => brs(i) && i.U <= jmpIdx && ~inLatch.btbPred.bits.predCtr(i)(1) && predecode.mask(i))) 381// } 382// 383// // TODO: what if if4 and if2 late jump to the same target? 384// // val lateJump = io.s3Taken && PriorityMux(Reverse(predecode.mask), ((PredictWidth - 1) to 0).map(_.U)) === jmpIdx && !predecode.isRVC(jmpIdx) 385// val lateJump = io.s3Taken && PriorityMux(Reverse(predecode.mask), (0 until PredictWidth).map {i => (PredictWidth - 1 - i).U}) === jmpIdx && !predecode.isRVC(jmpIdx) 386// io.out.bits.lateJump := lateJump 387// 388// io.out.bits.predCtr := inLatch.btbPred.bits.predCtr 389// io.out.bits.btbHit := inLatch.btbPred.bits.btbHit 390// io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta 391// //io.out.bits.btbType := Mux(jmpIdx === retIdx, BTBtype.R, 392// // Mux(jmpIdx === jalrIdx, BTBtype.I, 393// // Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J))) 394// val firstHist = inLatch.btbPred.bits.hist(0) 395// // there may be several notTaken branches before the first jump instruction, 396// // so we need to calculate how many zeroes should each instruction shift in its global history. 397// // each history is exclusive of instruction's own jump direction. 398// val histShift = Wire(Vec(PredictWidth, UInt(log2Up(PredictWidth).W))) 399// val shift = Wire(Vec(PredictWidth, Vec(PredictWidth, UInt(1.W)))) 400// (0 until PredictWidth).foreach(i => shift(i) := Mux(!brNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), PredictWidth)).asTypeOf(Vec(PredictWidth, UInt(1.W)))) 401// for (j <- 0 until PredictWidth) { 402// var tmp = 0.U 403// for (i <- 0 until PredictWidth) { 404// tmp = tmp + shift(i)(j) 405// } 406// histShift(j) := tmp 407// } 408// (0 until PredictWidth).foreach(i => io.out.bits.hist(i) := firstHist << histShift(i)) 409// // save ras checkpoint info 410// io.out.bits.rasSp := sp.value 411// io.out.bits.rasTopCtr := rasTop.ctr 412// 413// // flush BPU and redirect when target differs from the target predicted in Stage1 414// val tToNt = inLatch.btbPred.bits.redirect && ~io.s3Taken 415// val ntToT = ~inLatch.btbPred.bits.redirect && io.s3Taken 416// val dirDiffers = tToNt || ntToT 417// val tgtDiffers = inLatch.btbPred.bits.redirect && io.s3Taken && io.out.bits.target =/= inLatch.btbPred.bits.target 418// // io.out.bits.redirect := (if (EnableBPD) {dirDiffers || tgtDiffers} else false.B) 419// io.out.bits.redirect := dirDiffers || tgtDiffers 420// io.out.bits.target := Mux(!io.s3Taken, inLatch.pc + (PopCount(predecode.mask) << 1.U), // TODO: RVC 421// Mux(jmpIdx === retIdx, rasTopAddr, 422// Mux(jmpIdx === jalrIdx, inLatch.jbtac.target, 423// inLatch.btb.targets(jmpIdx)))) 424// // for (i <- 0 until FetchWidth) { 425// // io.out.bits.instrValid(i) := ((io.s3Taken && i.U <= jmpIdx) || ~io.s3Taken) && io.predecode.bits.mask(i) 426// // } 427// io.out.bits.instrValid := predecode.mask.asTypeOf(Vec(PredictWidth, Bool())) 428// for (i <- PredictWidth - 1 to 0) { 429// io.out.bits.instrValid(i) := (io.s3Taken && i.U <= jmpIdx || !io.s3Taken) && predecode.mask(i) 430// if (i != (PredictWidth - 1)) { 431// when (!lateJump && !predecode.isRVC(i) && io.s3Taken && i.U <= jmpIdx) { 432// io.out.bits.instrValid(i+1) := predecode.mask(i+1) 433// } 434// } 435// } 436// io.flushBPU := io.out.bits.redirect && io.out.fire() 437// 438// // speculative update RAS 439// val rasWrite = WireInit(0.U.asTypeOf(rasEntry())) 440// val retAddr = inLatch.pc + (callIdx << 1.U) + Mux(predecode.isRVC(callIdx), 2.U, 4.U) 441// rasWrite.retAddr := retAddr 442// val allocNewEntry = rasWrite.retAddr =/= rasTopAddr 443// rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U) 444// val rasWritePosition = Mux(allocNewEntry, sp.value + 1.U, sp.value) 445// when (io.out.fire() && io.s3Taken) { 446// when (jmpIdx === callIdx) { 447// ras(rasWritePosition) := rasWrite 448// when (allocNewEntry) { sp.value := sp.value + 1.U } 449// }.elsewhen (jmpIdx === retIdx) { 450// when (rasTop.ctr === 1.U) { 451// sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U) 452// }.otherwise { 453// ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry()) 454// } 455// } 456// } 457// // use checkpoint to recover RAS 458// val recoverSp = io.redirectInfo.redirect.rasSp 459// val recoverCtr = io.redirectInfo.redirect.rasTopCtr 460// when (io.redirectInfo.flush()) { 461// sp.value := recoverSp 462// ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry()) 463// } 464// 465// // roll back global history in S1 if S3 redirects 466// io.s1RollBackHist := Mux(io.s3Taken, io.out.bits.hist(jmpIdx), 467// io.out.bits.hist(0) << PopCount(brs & predecode.mask & ~Reverse(Cat(inLatch.tage.takens.map {t => Fill(2, t.asUInt)}).asUInt))) 468// 469// // debug info 470// XSDebug(io.in.fire(), "in:(%d %d) pc=%x\n", io.in.valid, io.in.ready, io.in.bits.pc) 471// XSDebug(io.out.fire(), "out:(%d %d) pc=%x redirect=%d predcdMask=%b instrValid=%b tgt=%x\n", 472// io.out.valid, io.out.ready, inLatch.pc, io.out.bits.redirect, predecode.mask, io.out.bits.instrValid.asUInt, io.out.bits.target) 473// XSDebug("flushS3=%d\n", flushS3) 474// XSDebug("validLatch=%d predecode.valid=%d\n", validLatch, predecodeValid) 475// XSDebug("brs=%b brTakens=%b brNTakens=%b jals=%b jalrs=%b calls=%b rets=%b\n", 476// brs, brTakens, brNotTakens.asUInt, jals, jalrs, calls, rets) 477// // ?????condition is wrong 478// // XSDebug(io.in.fire() && callIdx.orR, "[RAS]:pc=0x%x, rasWritePosition=%d, rasWriteAddr=0x%x\n", 479// // io.in.bits.pc, rasWritePosition, retAddr) 480//} 481// 482//class BPU extends XSModule { 483// val io = IO(new Bundle() { 484// // from backend 485// // flush pipeline if misPred and update bpu based on redirect signals from brq 486// val redirectInfo = Input(new RedirectInfo) 487// 488// val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) } 489// 490// val btbOut = ValidIO(new BranchPrediction) 491// val tageOut = Decoupled(new BranchPrediction) 492// 493// // predecode info from icache 494// // TODO: simplify this after implement predecode unit 495// val predecode = Flipped(ValidIO(new Predecode)) 496// }) 497// 498// val s1 = Module(new BPUStage1) 499// val s2 = Module(new BPUStage2) 500// val s3 = Module(new BPUStage3) 501// 502// s1.io.redirectInfo <> io.redirectInfo 503// s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush() 504// s1.io.in.pc.valid := io.in.pc.valid 505// s1.io.in.pc.bits <> io.in.pc.bits 506// io.btbOut <> s1.io.s1OutPred 507// s1.io.s3RollBackHist := s3.io.s1RollBackHist 508// s1.io.s3Taken := s3.io.s3Taken 509// 510// s1.io.out <> s2.io.in 511// s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush() 512// 513// s2.io.out <> s3.io.in 514// s3.io.flush := io.redirectInfo.flush() 515// s3.io.predecode <> io.predecode 516// io.tageOut <> s3.io.out 517// s3.io.redirectInfo <> io.redirectInfo 518//}