1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.ALUOpType 8import xiangshan.backend.JumpOpType 9import chisel3.util.experimental.BoringUtils 10import xiangshan.backend.decode.XSTrap 11 12class TableAddr(val idxBits: Int, val banks: Int) extends XSBundle { 13 def tagBits = VAddrBits - idxBits - 1 14 15 val tag = UInt(tagBits.W) 16 val idx = UInt(idxBits.W) 17 val offset = UInt(1.W) 18 19 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 20 def getTag(x: UInt) = fromUInt(x).tag 21 def getIdx(x: UInt) = fromUInt(x).idx 22 def getBank(x: UInt) = getIdx(x)(log2Up(banks) - 1, 0) 23 def getBankIdx(x: UInt) = getIdx(x)(idxBits - 1, log2Up(banks)) 24} 25 26class Stage1To2IO extends XSBundle { 27 val pc = Output(UInt(VAddrBits.W)) 28 val btb = new Bundle { 29 val hits = Output(UInt(FetchWidth.W)) 30 val targets = Output(Vec(FetchWidth, UInt(VAddrBits.W))) 31 } 32 val jbtac = new Bundle { 33 val hitIdx = Output(UInt(FetchWidth.W)) 34 val target = Output(UInt(VAddrBits.W)) 35 } 36 val tage = new Bundle { 37 val hits = Output(UInt(FetchWidth.W)) 38 val takens = Output(Vec(FetchWidth, Bool())) 39 } 40 val hist = Output(Vec(FetchWidth, UInt(HistoryLength.W))) 41 val btbPred = ValidIO(new BranchPrediction) 42} 43 44class BPUStage1 extends XSModule { 45 val io = IO(new Bundle() { 46 val in = new Bundle { val pc = Flipped(Decoupled(UInt(VAddrBits.W))) } 47 // from backend 48 val redirectInfo = Input(new RedirectInfo) 49 // from Stage3 50 val flush = Input(Bool()) 51 val s3RollBackHist = Input(UInt(HistoryLength.W)) 52 val s3Taken = Input(Bool()) 53 // to ifu, quick prediction result 54 val s1OutPred = ValidIO(new BranchPrediction) 55 // to Stage2 56 val out = Decoupled(new Stage1To2IO) 57 }) 58 59 io.in.pc.ready := true.B 60 61 // flush Stage1 when io.flush 62 val flushS1 = BoolStopWatch(io.flush, io.in.pc.fire(), startHighPriority = true) 63 64 // global history register 65 val ghr = RegInit(0.U(HistoryLength.W)) 66 // modify updateGhr and newGhr when updating ghr 67 val updateGhr = WireInit(false.B) 68 val newGhr = WireInit(0.U(HistoryLength.W)) 69 when (updateGhr) { ghr := newGhr } 70 // use hist as global history!!! 71 val hist = Mux(updateGhr, newGhr, ghr) 72 73 // Tage predictor 74 val tage = Module(new FakeTAGE) 75 // val tage = if(EnableBPD) Module(new Tage) else Module(new FakeTAGE) 76 tage.io.req.valid := io.in.pc.fire() 77 tage.io.req.bits.pc := io.in.pc.bits 78 tage.io.req.bits.hist := hist 79 tage.io.redirectInfo <> io.redirectInfo 80 io.out.bits.tage <> tage.io.out 81 io.s1OutPred.bits.tageMeta := tage.io.meta 82 83 // latch pc for 1 cycle latency when reading SRAM 84 val pcLatch = RegEnable(io.in.pc.bits, io.in.pc.fire()) 85 86 val r = io.redirectInfo.redirect 87 val updateFetchpc = r.pc - (r.fetchIdx << 2.U) 88 // BTB 89 val btb = Module(new BTB) 90 btb.io.in.pc <> io.in.pc 91 btb.io.in.pcLatch := pcLatch 92 // TODO: pass real mask in 93 btb.io.in.mask := "b1111111111111111".asUInt 94 btb.io.redirectValid := io.redirectInfo.valid 95 btb.io.flush := io.flush 96 97 // btb.io.update.fetchPC := updateFetchpc 98 // btb.io.update.fetchIdx := r.fetchIdx 99 btb.io.update.pc := r.pc 100 btb.io.update.hit := r.btbHitWay 101 btb.io.update.misPred := io.redirectInfo.misPred 102 // btb.io.update.writeWay := r.btbVictimWay 103 btb.io.update.oldCtr := r.btbPredCtr 104 btb.io.update.taken := r.taken 105 btb.io.update.target := r.brTarget 106 btb.io.update._type := r._type 107 // TODO: add RVC logic 108 btb.io.update.isRVC := DontCare 109 110 val btbHit = btb.io.out.hit 111 val btbTaken = btb.io.out.taken 112 val btbTakenIdx = btb.io.out.takenIdx 113 val btbTakenTarget = btb.io.out.target 114 // val btbWriteWay = btb.io.out.writeWay 115 val btbNotTakens = btb.io.out.notTakens 116 val btbCtrs = VecInit(btb.io.out.dEntries.map(_.pred)) 117 val btbValids = btb.io.out.hits 118 val btbTargets = VecInit(btb.io.out.dEntries.map(_.target)) 119 val btbTypes = VecInit(btb.io.out.dEntries.map(_._type)) 120 121 122 val jbtac = Module(new JBTAC) 123 jbtac.io.in.pc <> io.in.pc 124 jbtac.io.in.pcLatch := pcLatch 125 jbtac.io.in.hist := hist 126 jbtac.io.redirectValid := io.redirectInfo.valid 127 jbtac.io.flush := io.flush 128 129 jbtac.io.update.fetchPC := updateFetchpc 130 jbtac.io.update.fetchIdx := r.fetchIdx << 1 131 jbtac.io.update.misPred := io.redirectInfo.misPred 132 jbtac.io.update._type := r._type 133 jbtac.io.update.target := r.target 134 jbtac.io.update.hist := r.hist 135 136 val jbtacHit = jbtac.io.out.hit 137 val jbtacTarget = jbtac.io.out.target 138 val jbtacHitIdx = jbtac.io.out.hitIdx 139 140 // calculate global history of each instr 141 val firstHist = RegNext(hist) 142 val histShift = Wire(Vec(FetchWidth, UInt(log2Up(FetchWidth).W))) 143 val shift = Wire(Vec(FetchWidth, Vec(FetchWidth, UInt(1.W)))) 144 (0 until FetchWidth).foreach(i => shift(i) := Mux(!btbNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth)).asTypeOf(Vec(FetchWidth, UInt(1.W)))) 145 for (j <- 0 until FetchWidth) { 146 var tmp = 0.U 147 for (i <- 0 until FetchWidth) { 148 tmp = tmp + shift(i)(j) 149 } 150 histShift(j) := tmp 151 } 152 (0 until FetchWidth).foreach(i => io.s1OutPred.bits.hist(i) := firstHist << histShift(i)) 153 154 // update ghr 155 updateGhr := io.s1OutPred.bits.redirect || io.flush 156 val brJumpIdx = Mux(!(btbHit && btbTaken), 0.U, UIntToOH(btbTakenIdx)) 157 val indirectIdx = Mux(!jbtacHit, 0.U, UIntToOH(jbtacHitIdx)) 158 //val newTaken = Mux(io.redirectInfo.flush(), !(r._type === BTBtype.B && !r.taken), ) 159 newGhr := Mux(io.redirectInfo.flush(), (r.hist << 1.U) | !(r._type === BTBtype.B && !r.taken), 160 Mux(io.flush, Mux(io.s3Taken, (io.s3RollBackHist << 1.U) | 1.U, io.s3RollBackHist), 161 Mux(io.s1OutPred.bits.redirect, ((PriorityMux(brJumpIdx | indirectIdx, io.s1OutPred.bits.hist) << 1.U) | 1.U), 162 io.s1OutPred.bits.hist(0) << PopCount(btbNotTakens)))) 163 164 // redirect based on BTB and JBTAC 165 // io.out.valid := RegNext(io.in.pc.fire()) && !flushS1u 166 io.out.valid := RegNext(io.in.pc.fire()) && !io.flush 167 168 io.s1OutPred.valid := io.out.valid 169 io.s1OutPred.bits.redirect := btbHit && btbTaken || jbtacHit 170 171 172 def getInstrValid(i: Int): UInt = { 173 val mask = Wire(UInt(FetchWidth.W)) 174 val vec = Wire(Vec(FetchWidth, UInt(1.W))) 175 for (j <- 0 until FetchWidth) { 176 if (j <= i) 177 vec(j) := 1.U 178 else 179 vec(j) := 0.U 180 } 181 mask := vec.asUInt 182 mask 183 } 184 io.s1OutPred.bits.instrValid := (Fill(FetchWidth, ~io.s1OutPred.bits.redirect).asUInt | 185 PriorityMux(brJumpIdx | indirectIdx, (0 until FetchWidth).map(getInstrValid(_)))).asTypeOf(Vec(FetchWidth, Bool())) 186 io.s1OutPred.bits.target := Mux(brJumpIdx === LowestBit(brJumpIdx | indirectIdx, FetchWidth), btbTakenTarget, jbtacTarget) 187 io.s1OutPred.bits.predCtr := btbCtrs 188 io.s1OutPred.bits.btbHitWay := btbHit 189 io.s1OutPred.bits.rasSp := DontCare 190 io.s1OutPred.bits.rasTopCtr := DontCare 191 192 io.out.bits.pc := pcLatch 193 io.out.bits.btb.hits := btbValids.asUInt 194 (0 until FetchWidth).foreach(i => io.out.bits.btb.targets(i) := btbTargets(i)) 195 io.out.bits.jbtac.hitIdx := UIntToOH(jbtacHitIdx) 196 io.out.bits.jbtac.target := jbtacTarget 197 // TODO: we don't need this repeatedly! 198 io.out.bits.hist := io.s1OutPred.bits.hist 199 io.out.bits.btbPred := io.s1OutPred 200 201 202 203 // debug info 204 XSDebug(true.B, "in:(%d %d) pc=%x ghr=%b\n", io.in.pc.valid, io.in.pc.ready, io.in.pc.bits, hist) 205 XSDebug(true.B, "outPred:(%d) pc=0x%x, redirect=%d instrValid=%b tgt=%x\n", 206 io.s1OutPred.valid, pcLatch, io.s1OutPred.bits.redirect, io.s1OutPred.bits.instrValid.asUInt, io.s1OutPred.bits.target) 207 XSDebug(io.flush && io.redirectInfo.flush(), 208 "flush from backend: pc=%x tgt=%x brTgt=%x _type=%b taken=%d oldHist=%b fetchIdx=%d isExcpt=%d\n", 209 r.pc, r.target, r.brTarget, r._type, r.taken, r.hist, r.fetchIdx, r.isException) 210 XSDebug(io.flush && !io.redirectInfo.flush(), 211 "flush from Stage3: s3Taken=%d s3RollBackHist=%b\n", io.s3Taken, io.s3RollBackHist) 212 213} 214 215class Stage2To3IO extends Stage1To2IO { 216} 217 218class BPUStage2 extends XSModule { 219 val io = IO(new Bundle() { 220 // flush from Stage3 221 val flush = Input(Bool()) 222 val in = Flipped(Decoupled(new Stage1To2IO)) 223 val out = Decoupled(new Stage2To3IO) 224 }) 225 226 // flush Stage2 when Stage3 or banckend redirects 227 val flushS2 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true) 228 val inLatch = RegInit(0.U.asTypeOf(io.in.bits)) 229 when (io.in.fire()) { inLatch := io.in.bits } 230 val validLatch = RegInit(false.B) 231 when (io.flush) { 232 validLatch := false.B 233 }.elsewhen (io.in.fire()) { 234 validLatch := true.B 235 }.elsewhen (io.out.fire()) { 236 validLatch := false.B 237 } 238 239 io.out.valid := !io.flush && !flushS2 && validLatch 240 io.in.ready := !validLatch || io.out.fire() 241 242 // do nothing 243 io.out.bits := inLatch 244 245 // debug info 246 XSDebug(true.B, "in:(%d %d) pc=%x out:(%d %d) pc=%x\n", 247 io.in.valid, io.in.ready, io.in.bits.pc, io.out.valid, io.out.ready, io.out.bits.pc) 248 XSDebug(true.B, "validLatch=%d pc=%x\n", validLatch, inLatch.pc) 249 XSDebug(io.flush, "flush!!!\n") 250} 251 252class BPUStage3 extends XSModule { 253 val io = IO(new Bundle() { 254 val flush = Input(Bool()) 255 val in = Flipped(Decoupled(new Stage2To3IO)) 256 val out = ValidIO(new BranchPrediction) 257 // from icache 258 val predecode = Flipped(ValidIO(new Predecode)) 259 // from backend 260 val redirectInfo = Input(new RedirectInfo) 261 // to Stage1 and Stage2 262 val flushBPU = Output(Bool()) 263 // to Stage1, restore ghr in stage1 when flushBPU is valid 264 val s1RollBackHist = Output(UInt(HistoryLength.W)) 265 val s3Taken = Output(Bool()) 266 }) 267 268 val flushS3 = BoolStopWatch(io.flush, io.in.fire(), startHighPriority = true) 269 val inLatch = RegInit(0.U.asTypeOf(io.in.bits)) 270 val validLatch = RegInit(false.B) 271 when (io.in.fire()) { inLatch := io.in.bits } 272 when (io.flush) { 273 validLatch := false.B 274 }.elsewhen (io.in.fire()) { 275 validLatch := true.B 276 }.elsewhen (io.out.valid) { 277 validLatch := false.B 278 } 279 io.out.valid := validLatch && io.predecode.valid && !flushS3 && !io.flush 280 io.in.ready := !validLatch || io.out.valid 281 282 // RAS 283 // TODO: split retAddr and ctr 284 def rasEntry() = new Bundle { 285 val retAddr = UInt(VAddrBits.W) 286 val ctr = UInt(8.W) // layer of nested call functions 287 } 288 val ras = RegInit(VecInit(Seq.fill(RasSize)(0.U.asTypeOf(rasEntry())))) 289 val sp = Counter(RasSize) 290 val rasTop = ras(sp.value) 291 val rasTopAddr = rasTop.retAddr 292 293 // get the first taken branch/jal/call/jalr/ret in a fetch line 294 // brNotTakenIdx indicates all the not-taken branches before the first jump instruction 295 val brs = inLatch.btb.hits & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => ALUOpType.isBranch(t) }).asUInt) & io.predecode.bits.mask 296 val brTakenIdx = PriorityMux(brs & inLatch.tage.takens.asUInt, (0 until FetchWidth).map(_.U)) 297 val jalIdx = PriorityMux(inLatch.btb.hits & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.jal }).asUInt) & io.predecode.bits.mask, (0 until FetchWidth).map(_.U)) 298 val callIdx = PriorityMux(inLatch.btb.hits & io.predecode.bits.mask & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.call }).asUInt), (0 until FetchWidth).map(_.U)) 299 val jalrIdx = PriorityMux(inLatch.jbtac.hitIdx & io.predecode.bits.mask & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.jalr }).asUInt), (0 until FetchWidth).map(_.U)) 300 val retIdx = PriorityMux(io.predecode.bits.mask & Reverse(Cat(io.predecode.bits.fuOpTypes.map { t => t === JumpOpType.ret }).asUInt), (0 until FetchWidth).map(_.U)) 301 302 val jmpIdx = Wire(UInt(log2Up(FetchWidth).W)) 303 var idx = 0.U 304 io.s3Taken := false.B 305 for (i <- FetchWidth-1 to 0) { 306 val isBrTaken = brs(i) && inLatch.tage.takens(i) 307 val isJal = inLatch.btb.hits(i) && io.predecode.bits.fuOpTypes(i) === JumpOpType.jal && io.predecode.bits.mask(i) 308 val isCall = inLatch.btb.hits(i) && io.predecode.bits.fuOpTypes(i) === JumpOpType.call && io.predecode.bits.mask(i) 309 val isJalr = inLatch.jbtac.hitIdx(i) && io.predecode.bits.fuOpTypes(i) === JumpOpType.jalr && io.predecode.bits.mask(i) 310 val isRet = io.predecode.bits.fuOpTypes(i) === JumpOpType.ret && io.predecode.bits.mask(i) 311 when (isBrTaken || isJal || isCall || isJalr || isRet) { 312 idx = i.U 313 io.s3Taken := true.B 314 } 315 } 316 jmpIdx := idx 317 val brNotTakens = VecInit((0 until FetchWidth).map(i => brs(i) && ~inLatch.tage.takens(i) && i.U <= jmpIdx && io.predecode.bits.mask(i))) 318 319 io.out.bits.target := Mux(!io.s3Taken, inLatch.pc + (PopCount(io.predecode.bits.mask) << 2.U), // TODO: RVC 320 Mux(jmpIdx === retIdx, rasTopAddr, 321 Mux(jmpIdx === jalrIdx, inLatch.jbtac.target, 322 inLatch.btb.targets(jmpIdx)))) 323 for (i <- 0 until FetchWidth) { 324 io.out.bits.instrValid(i) := ~io.s3Taken || i.U <= jmpIdx 325 } 326 io.out.bits.predCtr := inLatch.btbPred.bits.predCtr 327 io.out.bits.btbHitWay := inLatch.btbPred.bits.btbHitWay 328 io.out.bits.tageMeta := inLatch.btbPred.bits.tageMeta 329 //io.out.bits._type := Mux(jmpIdx === retIdx, BTBtype.R, 330 // Mux(jmpIdx === jalrIdx, BTBtype.I, 331 // Mux(jmpIdx === brTakenIdx, BTBtype.B, BTBtype.J))) 332 val firstHist = inLatch.btbPred.bits.hist(0) 333 // there may be several notTaken branches before the first jump instruction, 334 // so we need to calculate how many zeroes should each instruction shift in its global history. 335 // each history is exclusive of instruction's own jump direction. 336 val histShift = Wire(Vec(FetchWidth, UInt(log2Up(FetchWidth).W))) 337 val shift = Wire(Vec(FetchWidth, Vec(FetchWidth, UInt(1.W)))) 338 (0 until FetchWidth).foreach(i => shift(i) := Mux(!brNotTakens(i), 0.U, ~LowerMask(UIntToOH(i.U), FetchWidth)).asTypeOf(Vec(FetchWidth, UInt(1.W)))) 339 for (j <- 0 until FetchWidth) { 340 var tmp = 0.U 341 for (i <- 0 until FetchWidth) { 342 tmp = tmp + shift(i)(j) 343 } 344 histShift(j) := tmp 345 } 346 (0 until FetchWidth).foreach(i => io.out.bits.hist(i) := firstHist << histShift(i)) 347 // save ras checkpoint info 348 io.out.bits.rasSp := sp.value 349 io.out.bits.rasTopCtr := rasTop.ctr 350 351 // flush BPU and redirect when target differs from the target predicted in Stage1 352 io.out.bits.redirect := (if(EnableBPD) (inLatch.btbPred.bits.redirect ^ io.s3Taken || 353 inLatch.btbPred.bits.redirect && io.s3Taken && io.out.bits.target =/= inLatch.btbPred.bits.target) 354 else false.B) 355 io.flushBPU := io.out.bits.redirect && io.out.valid 356 357 // speculative update RAS 358 val rasWrite = WireInit(0.U.asTypeOf(rasEntry())) 359 rasWrite.retAddr := inLatch.pc + (callIdx << 2.U) + 4.U 360 val allocNewEntry = rasWrite.retAddr =/= rasTopAddr 361 rasWrite.ctr := Mux(allocNewEntry, 1.U, rasTop.ctr + 1.U) 362 when (io.out.valid) { 363 when (jmpIdx === callIdx) { 364 ras(Mux(allocNewEntry, sp.value + 1.U, sp.value)) := rasWrite 365 when (allocNewEntry) { sp.value := sp.value + 1.U } 366 }.elsewhen (jmpIdx === retIdx) { 367 when (rasTop.ctr === 1.U) { 368 sp.value := Mux(sp.value === 0.U, 0.U, sp.value - 1.U) 369 }.otherwise { 370 ras(sp.value) := Cat(rasTop.ctr - 1.U, rasTopAddr).asTypeOf(rasEntry()) 371 } 372 } 373 } 374 // use checkpoint to recover RAS 375 val recoverSp = io.redirectInfo.redirect.rasSp 376 val recoverCtr = io.redirectInfo.redirect.rasTopCtr 377 when (io.redirectInfo.valid && io.redirectInfo.misPred) { 378 sp.value := recoverSp 379 ras(recoverSp) := Cat(recoverCtr, ras(recoverSp).retAddr).asTypeOf(rasEntry()) 380 } 381 382 // roll back global history in S1 if S3 redirects 383 io.s1RollBackHist := Mux(io.s3Taken, io.out.bits.hist(jmpIdx), io.out.bits.hist(0) << PopCount(brs & ~inLatch.tage.takens.asUInt)) 384 385 // debug info 386 XSDebug(io.in.fire(), "in:(%d %d) pc=%x\n", io.in.valid, io.in.ready, io.in.bits.pc) 387 XSDebug(io.out.valid, "out:%d pc=%x redirect=%d predcdMask=%b instrValid=%b tgt=%x\n", 388 io.out.valid, inLatch.pc, io.out.bits.redirect, io.predecode.bits.mask, io.out.bits.instrValid.asUInt, io.out.bits.target) 389 XSDebug(true.B, "flushS3=%d\n", flushS3) 390 XSDebug(true.B, "validLatch=%d predecode.valid=%d\n", validLatch, io.predecode.valid) 391 XSDebug(true.B, "brs=%b brTakenIdx=%d brNTakens=%b jalIdx=%d jalrIdx=%d callIdx=%d retIdx=%d\n", 392 brs, brTakenIdx, brNotTakens.asUInt, jalIdx, jalrIdx, callIdx, retIdx) 393 394 // BPU's TEMP Perf Cnt 395 BoringUtils.addSource(io.out.valid, "MbpS3Cnt") 396 BoringUtils.addSource(io.out.valid && io.out.bits.redirect, "MbpS3TageRed") 397 BoringUtils.addSource(io.out.valid && (inLatch.btbPred.bits.redirect ^ io.s3Taken), "MbpS3TageRedDir") 398 BoringUtils.addSource(io.out.valid && (inLatch.btbPred.bits.redirect 399 && io.s3Taken && (io.out.bits.target =/= inLatch.btbPred.bits.target)), "MbpS3TageRedTar") 400} 401 402class BPU extends XSModule { 403 val io = IO(new Bundle() { 404 // from backend 405 // flush pipeline if misPred and update bpu based on redirect signals from brq 406 val redirectInfo = Input(new RedirectInfo) 407 408 val in = new Bundle { val pc = Flipped(Valid(UInt(VAddrBits.W))) } 409 410 val btbOut = ValidIO(new BranchPrediction) 411 val tageOut = ValidIO(new BranchPrediction) 412 413 // predecode info from icache 414 // TODO: simplify this after implement predecode unit 415 val predecode = Flipped(ValidIO(new Predecode)) 416 }) 417 418 val s1 = Module(new BPUStage1) 419 val s2 = Module(new BPUStage2) 420 val s3 = Module(new BPUStage3) 421 422 s1.io.redirectInfo <> io.redirectInfo 423 s1.io.flush := s3.io.flushBPU || io.redirectInfo.flush() 424 s1.io.in.pc.valid := io.in.pc.valid 425 s1.io.in.pc.bits <> io.in.pc.bits 426 io.btbOut <> s1.io.s1OutPred 427 s1.io.s3RollBackHist := s3.io.s1RollBackHist 428 s1.io.s3Taken := s3.io.s3Taken 429 430 s1.io.out <> s2.io.in 431 s2.io.flush := s3.io.flushBPU || io.redirectInfo.flush() 432 433 s2.io.out <> s3.io.in 434 s3.io.flush := io.redirectInfo.flush() 435 s3.io.predecode <> io.predecode 436 io.tageOut <> s3.io.out 437 s3.io.redirectInfo <> io.redirectInfo 438 439 // TODO: temp and ugly code, when perf counters is added( may after adding CSR), please mv the below counter 440 val bpuPerfCntList = List( 441 ("MbpInstr"," "), 442 ("MbpRight"," "), 443 ("MbpWrong"," "), 444 ("MbpBRight"," "), 445 ("MbpBWrong"," "), 446 ("MbpJRight"," "), 447 ("MbpJWrong"," "), 448 ("MbpIRight"," "), 449 ("MbpIWrong"," "), 450 ("MbpRRight"," "), 451 ("MbpRWrong"," "), 452 ("MbpS3Cnt"," "), 453 ("MbpS3TageRed"," "), 454 ("MbpS3TageRedDir"," "), 455 ("MbpS3TageRedTar"," ") 456 ) 457 458 val bpuPerfCnts = List.fill(bpuPerfCntList.length)(RegInit(0.U(XLEN.W))) 459 val bpuPerfCntConds = List.fill(bpuPerfCntList.length)(WireInit(false.B)) 460 (bpuPerfCnts zip bpuPerfCntConds) map { case (cnt, cond) => { when (cond) { cnt := cnt + 1.U }}} 461 462 for(i <- bpuPerfCntList.indices) { 463 BoringUtils.addSink(bpuPerfCntConds(i), bpuPerfCntList(i)._1) 464 } 465 466 val xsTrap = WireInit(false.B) 467 BoringUtils.addSink(xsTrap, "XSTRAP_BPU") 468 469 // if (!p.FPGAPlatform) { 470 when (xsTrap) { 471 printf("=================BPU's PerfCnt================\n") 472 for(i <- bpuPerfCntList.indices) { 473 printf(bpuPerfCntList(i)._1 + bpuPerfCntList(i)._2 + " <- " + "%d\n", bpuPerfCnts(i)) 474 } 475 } 476 // } 477}