1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import xiangshan._ 6import utils._ 7 8class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 9 def tagBits = VAddrBits - idxBits - 2 10 11 val tag = UInt(tagBits.W) 12 val idx = UInt(idxBits.W) 13 val offset = UInt(2.W) 14 15 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 16 def getTag(x: UInt) = fromUInt(x).tag 17 def getIdx(x: UInt) = fromUInt(x).idx 18 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 19 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 20} 21 22class Stage1To2IO extends XSBundle { 23 val pc = Output(UInt(VAddrBits.W)) 24 val btb = new Bundle { 25 val hits = Output(UInt(FetchWidth.W)) 26 val targets = Output(Vec(FetchWidth, UInt(VAddrBits.B))) 27 } 28 val jbtac = new Bundle { 29 val hitIdx = Output(UInt(FetchWidth.W)) 30 val target = Output(UInt(VAddrBits.W)) 31 } 32 val tage = new Bundle { 33 val hits = Output(UInt(FetchWidth.W)) 34 val takens = Output(Vec(FetchWidth, Bool())) 35 } 36 val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W))) 37 val btbPred = ValidIO(new BranchPrediction) 38} 39 40class BPUStage1 extends XSModule { 41 val io = IO(new Bundle() { 42 val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) } 43 // from backend 44 val redirectInfo = Flipped(new RedirectInfo) 45 // from Stage3 46 val flush = Input(Bool()) 47 val s3RollBackHist = Input(UInt(HistoryLength.W)) 48 // to ifu, quick prediction result 49 val btbOut = ValidIO(new BranchPrediction) 50 // to Stage2 51 val out = Decoupled(new Stage1To2IO) 52 }) 53 54 // flush Stage1 when io.flush || io.redirect.valid 55 56 // TODO: delete this!!! 57 io.in.pc.ready := true.B 58 io.btbOut.valid := false.B 59 io.btbOut.bits := DontCare 60 io.out.valid := false.B 61 io.out.bits := DontCare 62 63} 64 65class Stage2To3IO extends Stage1To2IO { 66} 67 68class BPUStage2 extends XSModule { 69 val io = IO(new Bundle() { 70 // flush from Stage3 71 val flush = Input(Bool()) 72 val in = Flipped(Decoupled(new Stage1To2IO)) 73 val out = Decoupled(new Stage2To3IO) 74 }) 75 76 // flush Stage2 when Stage3 or banckend redirects 77 val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true) 78 io.out.valid := !flushS2 && RegNext(io.in.fire()) 79 io.in.ready := !io.out.valid || io.out.fire() 80 81 // do nothing 82 io.out.bits := RegEnable(io.in.bits, io.in.fire()) 83} 84 85class BPUStage3 extends XSModule { 86 val io = IO(new Bundle() { 87 val flush = Input(Bool()) 88 val in = Flipped(Decoupled(new Stage2To3IO)) 89 val out = ValidIO(new BranchPrediction) 90 // from icache 91 val predecode = Flipped(ValidIO(new Predecode)) 92 // from backend 93 val redirectInfo = Flipped(new RedirectInfo) 94 // to Stage1 and Stage2 95 val flushBPU = Output(Bool()) 96 // to Stage1, restore ghr in stage1 when flushBPU is valid 97 val s1RollBackHist = Output(UInt(HistoryLength.W)) 98 }) 99 100 val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true) 101 val inLatch = RegInit(0.U.asTypeOf(io.in.bits)) 102 val validLatch = RegInit(false.B) 103 when (io.in.fire()) { inLatch := io.in.bits } 104 when (io.in.fire()) { 105 validLatch := !io.in.flush 106 }.elsewhen (io.out.valid) { 107 validLatch := false.B 108 } 109 io.out.valid := validLatch && io.predecode.valid && !flushS3 110 io.in.ready := !validLatch || io.out.valid 111 112 // RAS 113 // TODO: split retAddr and ctr 114 def rasEntry() = new Bundle { 115 val retAddr = UInt(VAddrBits.W) 116 val ctr = UInt(8.W) // layer of nested call functions 117 } 118 val ras = RegInit(VecInit(RasSize, 0.U.asTypeOf(rasEntry()))) 119 val sp = Counter(RasSize) 120 val rasTop = ras(sp.value) 121 val rasTopAddr = rasTop.retAddr 122 123 // for example, getLowerMask("b00101100".U, 8) = "b00111111", getLowestBit("b00101100".U, 8) = "b00000100".U 124 def getLowerMask(idx: UInt, len: Int) = (0 until len).map(i => idx >> i.U).reduce(_|_) 125 def getLowestBit(idx: UInt, len: Int) = Mux(idx(0), 1.U(len.W), Reverse(((0 until len).map(i => Reverse(idx(len - 1, 0)) >> i.U).reduce(_|_) + 1.U) >> 1.U)) 126 127 // get the first taken branch/jal/call/jalr/ret in a fetch line 128 // brTakenIdx/jalIdx/callIdx/jalrIdx/retIdx/jmpIdx is one-hot encoded. 129 // brNotTakenIdx indicates all the not-taken branches before the first jump instruction. 130 val brIdx = inLatch.btb.hits & io.predecode.bits.fuTypes.map { t => ALUOpType.isBranch(t) }.asUInt & io.predecode.bits.mask 131 val brTakenIdx = getLowestBit(brIdx & inLatch.tage.takens.asUInt, FetchWidth) 132 //val brNotTakenIdx = brIdx & ~inLatch.tage.takens.asUInt & getLowerMask(brTakenIdx, FetchWidth) 133 val jalIdx = getLowestBit(inLatch.btb.hits & io.predecode.bits.fuTypes.map { t => t === ALUOpType.jal }.asUInt & io.predecode.bits.mask, FetchWidth) 134 val callIdx = getLowestBit(inLatch.btb.hits & io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.call }.asUInt, FetchWidth) 135 val jalrIdx = getLowestBit(inLatch.jbtac.hitIdx & io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.jalr }.asUInt, FetchWidth) 136 val retIdx = getLowestBit(io.predecode.bits.mask & io.predecode.bits.fuTypes.map { t => t === ALUOpType.ret }.asUInt, FetchWidth) 137 138 val jmpIdx = getLowestBit(brTakenIdx | jalIdx | callIdx | jalrIdx | retIdx, FetchWidth) 139 val brNotTakenIdx = brIdx & ~inLatch.tage.takens.asUInt & getLowerMask(jmpIdx, FetchWidth) 140 141 io.out.bits.redirect := jmpIdx.orR.asBool 142 io.out.bits.target := Mux(jmpIdx === retIdx, rasTopAddr, 143 Mux(jmpIdx === jalrIdx, inLatch.jbtac.target, 144 Mux(jmpIdx === 0.U, inLatch.pc + 4.U, // TODO: RVC 145 PriorityMux(jmpIdx, inLatch.btb.targets)))) 146 io.out.bits.instrValid := getLowerMask(jmpIdx, FetchWidth).asTypeOf(Vec(FetchWidth, Bool())) 147 //io.out.bits._type := Mux(jmpIdx === retIdx, BTBtype.R, 148 // Mux(jmpIdx === jalrIdx, BTBtype.I, 149 // Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J))) 150 val firstHist = inLatch.btbPred.bits.hist 151 // there may be several notTaken branches before the first jump instruction, 152 // so we need to calculate how many zeroes should each instruction shift in its global history. 153 // each history is exclusive of instruction's own jump direction. 154 val histShift = WireInit(VecInit(FetchWidth, 0.U(log2Up(FetchWidth).W))) 155 histShift := (0 until FetchWidth).map(i => Mux(!brNotTakenIdx(i), 0.U, ~getLowerMask(UIntToOH(i.U), FetchWidth))).reduce(_+_) 156 (0 until FetchWidth).map(i => io.out.bits.hist(i) := firstHist << histShift) 157 // save ras checkpoint info 158 io.out.bits.rasSp := sp.value 159 io.out.bits.rasTopCtr := rasTop.ctr 160 161 // flush BPU and redirect when target differs from the target predicted in Stage1 162 io.out.bits.redirect := !inLatch.btbPred.bits.redirect ^ jmpIdx.orR.asBool || 163 inLatch.btbPred.bits.redirect && jmpIdx.orR.asBool && io.out.bits.target =/= inLatch.btbPred.bits.target 164 io.flushBPU := io.out.bits.redirect && io.out.valid 165 166 // speculative update RAS 167 val rasWrite = WireInit(0.U.asTypeOf(rasEntry())) 168 rasWrite.retAddr := inLatch.pc + OHToUInt(callIdx) << 2.U + 4.U 169 val allocNewEntry = rasWrite.retAddr =/= rasTopAddr 170 rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U) 171 when (io.out.valid) { 172 when (jmpIdx === callIdx) { 173 ras(Mux(allocNewEntry, sp.value + 1.U, sp.value)) := rasWrite 174 when (allocNewEntry) { sp.value := sp.value + 1.U } 175 }.elsewhen (jmpIdx === retIdx) { 176 when (rasTop.ctr === 1.U) { 177 sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U) 178 }.otherwise { 179 ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry()) 180 } 181 } 182 } 183 // use checkpoint to recover RAS 184 val recoverSp = io.redirectInfo.redirect.rasSp 185 val recoverCtr = io.redirectInfo.redirect.rasTopCtr 186 when (io.redirectInfo.valid && io.redirectInfo.misPred) { 187 sp.value := recoverSp 188 ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry()) 189 } 190 191 // roll back global history in S1 if S3 redirects 192 io.s1RollBackHist := PriorityMux(jmpIdx, io.out.bits.hist) 193} 194 195class BPU extends XSModule { 196 val io = IO(new Bundle() { 197 // from backend 198 // flush pipeline if misPred and update bpu based on redirect signals from brq 199 val redirectInfo = Flipped(new RedirectInfo) 200 201 val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) } 202 203 val btbOut = ValidIO(new BranchPrediction) 204 val tageOut = ValidIO(new BranchPrediction) 205 206 // predecode info from icache 207 // TODO: simplify this after implement predecode unit 208 val predecode = Flipped(ValidIO(new Predecode)) 209 }) 210 211 val s1 = Module(new BPUStage1) 212 val s2 = Module(new BPUStage2) 213 val s3 = Module(new BPUStage3) 214 215 s1.io.redirectInfo <> io.redirectInfo 216 s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush() 217 s1.io.in.pc.valid := io.in.pc.valid 218 s1.io.in.pc.bits <> io.in.pc.bits 219 io.btbOut <> s1.io.btbOut 220 s1.io.s3RollBackHist := s3.io.s1RollBackHist 221 222 s1.io.out <> s2.io.in 223 s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush() 224 225 s2.io.out <> s3.io.in 226 s3.io.flush := io.redirectInfo.flush() 227 s3.io.predecode <> io.predecode 228 io.tageOut <> s3.io.out 229 s3.io.redirectInfo <> io.redirectInfo 230 231 // TODO: delete this and put BTB and JBTAC into Stage1 232 /* 233 val flush = BoolStopWatch(io.redirect.valid, io.in.pc.valid, startHighPriority = true) 234 235 // BTB makes a quick prediction for branch and direct jump, which is 236 // 4-way set-associative, and each way is divided into 4 banks. 237 val btbAddr = new TableAddr(log2Up(BtbSets), BtbBanks) 238 def btbEntry() = new Bundle { 239 val valid = Bool() 240 // TODO: don't need full length of tag and target 241 val tag = UInt(btbAddr.tagBits.W) 242 val _type = UInt(2.W) 243 val target = UInt(VAddrBits.W) 244 val pred = UInt(2.W) // 2-bit saturated counter as a quick predictor 245 } 246 247 val btb = List.fill(BtbBanks)(List.fill(BtbWays)( 248 Module(new SRAMTemplate(btbEntry(), set = BtbSets / BtbBanks, shouldReset = true, holdRead = true, singlePort = true)))) 249 250 // val fetchPkgAligned = btbAddr.getBank(io.in.pc.bits) === 0.U 251 val HeadBank = btbAddr.getBank(io.in.pc.bits) 252 val TailBank = btbAddr.getBank(io.in.pc.bits + FetchWidth.U << 2.U - 4.U) 253 for (b <- 0 until BtbBanks) { 254 for (w <- 0 until BtbWays) { 255 btb(b)(w).reset := reset.asBool 256 btb(b)(w).io.r.req.valid := io.in.pc.valid && Mux(TailBank > HeadBank, b.U >= HeadBank && b.U <= TailBank, b.U >= TailBank || b.U <= HeadBank) 257 btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.in.pc.bits) 258 } 259 } 260 // latch pc for 1 cycle latency when reading SRAM 261 val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.valid) 262 val btbRead = Wire(Vec(BtbBanks, Vec(BtbWays, btbEntry()))) 263 val btbHits = Wire(Vec(FetchWidth, Bool())) 264 val btbTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W))) 265 val btbTypes = Wire(Vec(FetchWidth, UInt(2.W))) 266 // val btbPreds = Wire(Vec(FetchWidth, UInt(2.W))) 267 val btbTakens = Wire(Vec(FetchWidth, Bool())) 268 for (b <- 0 until BtbBanks) { 269 for (w <- 0 until BtbWays) { 270 btbRead(b)(w) := btb(b)(w).io.r.resp.data(0) 271 } 272 } 273 for (i <- 0 until FetchWidth) { 274 btbHits(i) := false.B 275 for (b <- 0 until BtbBanks) { 276 for (w <- 0 until BtbWays) { 277 when (b.U === btbAddr.getBank(pcLatch) && btbRead(b)(w).valid && btbRead(b)(w).tag === btbAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2)) { 278 btbHits(i) := !flush && RegNext(btb(b)(w).io.r.req.fire(), init = false.B) 279 btbTargets(i) := btbRead(b)(w).target 280 btbTypes(i) := btbRead(b)(w)._type 281 // btbPreds(i) := btbRead(b)(w).pred 282 btbTakens(i) := (btbRead(b)(w).pred)(1).asBool 283 }.otherwise { 284 btbHits(i) := false.B 285 btbTargets(i) := DontCare 286 btbTypes(i) := DontCare 287 btbTakens(i) := DontCare 288 } 289 } 290 } 291 } 292 293 // JBTAC, divided into 8 banks, makes prediction for indirect jump except ret. 294 val jbtacAddr = new TableAddr(log2Up(JbtacSize), JbtacBanks) 295 def jbtacEntry() = new Bundle { 296 val valid = Bool() 297 // TODO: don't need full length of tag and target 298 val tag = UInt(jbtacAddr.tagBits.W) 299 val target = UInt(VAddrBits.W) 300 } 301 302 val jbtac = List.fill(JbtacBanks)(Module(new SRAMTemplate(jbtacEntry(), set = JbtacSize / JbtacBanks, shouldReset = true, holdRead = true, singlePort = true))) 303 304 (0 until JbtacBanks).map(i => jbtac(i).reset := reset.asBool) 305 (0 until JbtacBanks).map(i => jbtac(i).io.r.req.valid := io.in.pc.valid) 306 (0 until JbtacBanks).map(i => jbtac(i).io.r.req.bits.setIdx := jbtacAddr.getBankIdx(Cat((io.in.pc.bits)(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2)) 307 308 val jbtacRead = Wire(Vec(JbtacBanks, jbtacEntry())) 309 (0 until JbtacBanks).map(i => jbtacRead(i) := jbtac(i).io.r.resp.data(0)) 310 val jbtacHits = Wire(Vec(FetchWidth, Bool())) 311 val jbtacTargets = Wire(Vec(FetchWidth, UInt(VAddrBits.W))) 312 val jbtacHeadBank = jbtacAddr.getBank(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W))) 313 for (i <- 0 until FetchWidth) { 314 jbtacHits(i) := false.B 315 for (b <- 0 until JbtacBanks) { 316 when (jbtacHeadBank + i.U === b.U) { 317 jbtacHits(i) := jbtacRead(b).valid && jbtacRead(b).tag === jbtacAddr.getTag(Cat(pcLatch(VAddrBits - 1, 2), 0.U(2.W)) + i.U << 2) && 318 !flush && RegNext(jbtac(b).io.r.req.fire(), init = false.B) 319 jbtacTargets(i) := jbtacRead(b).target 320 }.otherwise { 321 jbtacHits(i) := false.B 322 jbtacTargets(i) := DontCare 323 } 324 } 325 } 326 327 // redirect based on BTB and JBTAC 328 (0 until FetchWidth).map(i => io.predMask(i) := btbHits(i) && Mux(btbTypes(i) === BTBtype.B, btbTakens(i), true.B) || jbtacHits(i)) 329 (0 until FetchWidth).map(i => io.predTargets(i) := Mux(btbHits(i) && !(btbTypes(i) === BTBtype.B && !btbTakens(i)), btbTargets(i), jbtacTargets(i))) 330 331 332 // update bpu, including BTB, JBTAC... 333 // 1. update BTB 334 // 1.1 read the selected bank 335 for (b <- 0 until BtbBanks) { 336 for (w <- 0 until BtbWays) { 337 btb(b)(w).io.r.req.valid := io.redirect.valid && btbAddr.getBank(io.redirect.bits.pc) === b.U 338 btb(b)(w).io.r.req.bits.setIdx := btbAddr.getBankIdx(io.redirect.bits.pc) 339 } 340 } 341 342 // 1.2 match redirect pc tag with the 4 tags in a btb line, find a way to write 343 // val redirectLatch = RegEnable(io.redirect.bits, io.redirect.valid) 344 val redirectLatch = RegNext(io.redirect.bits, init = 0.U.asTypeOf(new Redirect)) 345 val bankLatch = btbAddr.getBank(redirectLatch.pc) 346 val btbUpdateRead = Wire(Vec(BtbWays, btbEntry())) 347 val btbValids = Wire(Vec(BtbWays, Bool())) 348 val btbUpdateTagHits = Wire(Vec(BtbWays, Bool())) 349 for (b <- 0 until BtbBanks) { 350 for (w <- 0 until BtbWays) { 351 when (b.U === bankLatch) { 352 btbUpdateRead(w) := btb(b)(w).io.r.resp.data(0) 353 btbValids(w) := btbUpdateRead(w).valid && RegNext(btb(b)(w).io.r.req.fire(), init = false.B) 354 }.otherwise { 355 btbUpdateRead(w) := 0.U.asTypeOf(btbEntry()) 356 btbValids(w) := false.B 357 } 358 } 359 } 360 (0 until BtbWays).map(w => btbUpdateTagHits(w) := btbValids(w) && btbUpdateRead(w).tag === btbAddr.getTag(redirectLatch.pc)) 361 // val btbWriteWay = Wire(Vec(BtbWays, Bool())) 362 val btbWriteWay = Wire(UInt(BtbWays.W)) 363 val btbInvalids = ~ btbValids.asUInt 364 when (btbUpdateTagHits.asUInt.orR) { 365 // tag hits 366 btbWriteWay := btbUpdateTagHits.asUInt 367 }.elsewhen (!btbValids.asUInt.andR) { 368 // no tag hits but there are free entries 369 btbWriteWay := Mux(btbInvalids >= 8.U, "b1000".U, 370 Mux(btbInvalids >= 4.U, "b0100".U, 371 Mux(btbInvalids >= 2.U, "b0010".U, "b0001".U))) 372 }.otherwise { 373 // no tag hits and no free entry, select a victim way 374 btbWriteWay := UIntToOH(LFSR64()(log2Up(BtbWays) - 1, 0)) 375 } 376 377 // 1.3 calculate new 2-bit counter value 378 val btbWrite = WireInit(0.U.asTypeOf(btbEntry())) 379 btbWrite.valid := true.B 380 btbWrite.tag := btbAddr.getTag(redirectLatch.pc) 381 btbWrite._type := redirectLatch._type 382 btbWrite.target := redirectLatch.brTarget 383 val oldPred = WireInit("b01".U) 384 oldPred := PriorityMux(btbWriteWay.asTypeOf(Vec(BtbWays, Bool())), btbUpdateRead.map{ e => e.pred }) 385 val newPred = Mux(redirectLatch.taken, Mux(oldPred === "b11".U, "b11".U, oldPred + 1.U), 386 Mux(oldPred === "b00".U, "b00".U, oldPred - 1.U)) 387 btbWrite.pred := Mux(btbUpdateTagHits.asUInt.orR && redirectLatch._type === BTBtype.B, newPred, "b01".U) 388 389 // 1.4 write BTB 390 for (b <- 0 until BtbBanks) { 391 for (w <- 0 until BtbWays) { 392 when (b.U === bankLatch) { 393 btb(b)(w).io.w.req.valid := OHToUInt(btbWriteWay) === w.U && 394 RegNext(io.redirect.valid, init = false.B) && 395 (redirectLatch._type === BTBtype.B || redirectLatch._type === BTBtype.J) 396 btb(b)(w).io.w.req.bits.setIdx := btbAddr.getBankIdx(redirectLatch.pc) 397 btb(b)(w).io.w.req.bits.data := btbWrite 398 }.otherwise { 399 btb(b)(w).io.w.req.valid := false.B 400 btb(b)(w).io.w.req.bits.setIdx := DontCare 401 btb(b)(w).io.w.req.bits.data := DontCare 402 } 403 } 404 } 405 406 // 2. update JBTAC 407 val jbtacWrite = WireInit(0.U.asTypeOf(jbtacEntry())) 408 jbtacWrite.valid := true.B 409 jbtacWrite.tag := jbtacAddr.getTag(io.redirect.bits.pc) 410 jbtacWrite.target := io.redirect.bits.target 411 (0 until JbtacBanks).map(b => 412 jbtac(b).io.w.req.valid := io.redirect.valid && 413 b.U === jbtacAddr.getBank(io.redirect.bits.pc) && 414 io.redirect.bits._type === BTBtype.I) 415 (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.setIdx := jbtacAddr.getBankIdx(io.redirect.bits.pc)) 416 (0 until JbtacBanks).map(b => jbtac(b).io.w.req.bits.data := jbtacWrite) 417 */ 418} 419