1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import xiangshan._ 23import utils._ 24import utility._ 25import xiangshan.ExceptionNO._ 26 27class IBufPtr(implicit p: Parameters) extends CircularQueuePtr[IBufPtr]( 28 p => p(XSCoreParamsKey).IBufSize 29) { 30} 31 32class IBufInBankPtr(implicit p: Parameters) extends CircularQueuePtr[IBufInBankPtr]( 33 p => p(XSCoreParamsKey).IBufSize / p(XSCoreParamsKey).IBufNBank 34) { 35} 36 37class IBufBankPtr(implicit p: Parameters) extends CircularQueuePtr[IBufBankPtr]( 38 p => p(XSCoreParamsKey).IBufNBank 39) { 40} 41 42class IBufferIO(implicit p: Parameters) extends XSBundle { 43 val flush = Input(Bool()) 44 val ControlRedirect = Input(Bool()) 45 val ControlBTBMissBubble = Input(Bool()) 46 val TAGEMissBubble = Input(Bool()) 47 val SCMissBubble = Input(Bool()) 48 val ITTAGEMissBubble = Input(Bool()) 49 val RASMissBubble = Input(Bool()) 50 val MemVioRedirect = Input(Bool()) 51 val in = Flipped(DecoupledIO(new FetchToIBuffer)) 52 val out = Vec(DecodeWidth, DecoupledIO(new CtrlFlow)) 53 val full = Output(Bool()) 54 val decodeCanAccept = Input(Bool()) 55 val stallReason = new StallReasonIO(DecodeWidth) 56} 57 58class IBufEntry(implicit p: Parameters) extends XSBundle { 59 val inst = UInt(32.W) 60 val pc = UInt(VAddrBits.W) 61 val foldpc = UInt(MemPredPCWidth.W) 62 val pd = new PreDecodeInfo 63 val pred_taken = Bool() 64 val ftqPtr = new FtqPtr 65 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 66 val exceptionType = IBufferExceptionType() 67 val exceptionFromBackend = Bool() 68 val triggered = TriggerAction() 69 70 def fromFetch(fetch: FetchToIBuffer, i: Int): IBufEntry = { 71 inst := fetch.instrs(i) 72 pc := fetch.pc(i) 73 foldpc := fetch.foldpc(i) 74 pd := fetch.pd(i) 75 pred_taken := fetch.ftqOffset(i).valid 76 ftqPtr := fetch.ftqPtr 77 ftqOffset := fetch.ftqOffset(i).bits 78 exceptionType := IBufferExceptionType.cvtFromFetchExcpAndCrossPageAndRVCII( 79 fetch.exceptionType(i), 80 fetch.crossPageIPFFix(i), 81 fetch.illegalInstr(i), 82 ) 83 exceptionFromBackend := fetch.exceptionFromBackend(i) 84 triggered := fetch.triggered(i) 85 this 86 } 87 88 def toCtrlFlow: CtrlFlow = { 89 val cf = Wire(new CtrlFlow) 90 cf.instr := inst 91 cf.pc := pc 92 cf.foldpc := foldpc 93 cf.exceptionVec := 0.U.asTypeOf(ExceptionVec()) 94 cf.exceptionVec(instrPageFault) := IBufferExceptionType.isPF (this.exceptionType) 95 cf.exceptionVec(instrGuestPageFault) := IBufferExceptionType.isGPF(this.exceptionType) 96 cf.exceptionVec(instrAccessFault) := IBufferExceptionType.isAF (this.exceptionType) 97 cf.exceptionVec(EX_II) := IBufferExceptionType.isRVCII(this.exceptionType) 98 cf.exceptionFromBackend := exceptionFromBackend 99 cf.trigger := triggered 100 cf.pd := pd 101 cf.pred_taken := pred_taken 102 cf.crossPageIPFFix := IBufferExceptionType.isCrossPage(this.exceptionType) 103 cf.storeSetHit := DontCare 104 cf.waitForRobIdx := DontCare 105 cf.loadWaitBit := DontCare 106 cf.loadWaitStrict := DontCare 107 cf.ssid := DontCare 108 cf.ftqPtr := ftqPtr 109 cf.ftqOffset := ftqOffset 110 cf 111 } 112 113 object IBufferExceptionType extends NamedUInt(3) { 114 def None = "b000".U 115 def NonCrossPF = "b001".U 116 def NonCrossGPF = "b010".U 117 def NonCrossAF = "b011".U 118 // illegal instruction 119 def rvcII = "b100".U 120 def CrossPF = "b101".U 121 def CrossGPF = "b110".U 122 def CrossAF = "b111".U 123 124 def cvtFromFetchExcpAndCrossPageAndRVCII(fetchExcp: UInt, crossPage: Bool, rvcIll: Bool): UInt = { 125 require( 126 fetchExcp.getWidth == ExceptionType.width, 127 s"The width(${fetchExcp.getWidth}) of fetchExcp should be equal to " + 128 s"the width(${ExceptionType.width}) of frontend.ExceptionType." 129 ) 130 MuxCase(0.U, Seq( 131 crossPage -> Cat(1.U(1.W), fetchExcp), 132 fetchExcp.orR -> fetchExcp, 133 rvcIll -> this.rvcII, 134 )) 135 } 136 137 def isRVCII(uint: UInt): Bool = { 138 this.checkInputWidth(uint) 139 uint(2) && uint(1, 0) === 0.U 140 } 141 142 def isCrossPage(uint: UInt): Bool = { 143 this.checkInputWidth(uint) 144 uint(2) && uint(1, 0) =/= 0.U 145 } 146 147 def isPF (uint: UInt): Bool = uint(1, 0) === this.NonCrossPF (1, 0) 148 def isGPF(uint: UInt): Bool = uint(1, 0) === this.NonCrossGPF(1, 0) 149 def isAF (uint: UInt): Bool = uint(1, 0) === this.NonCrossAF (1, 0) 150 } 151} 152 153class IBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper with HasPerfEvents { 154 val io = IO(new IBufferIO) 155 156 // io alias 157 private val decodeCanAccept = io.decodeCanAccept 158 159 // Parameter Check 160 private val bankSize = IBufSize / IBufNBank 161 require(IBufSize % IBufNBank == 0, s"IBufNBank should divide IBufSize, IBufNBank: $IBufNBank, IBufSize: $IBufSize") 162 require(IBufNBank >= DecodeWidth, 163 s"IBufNBank should be equal or larger than DecodeWidth, IBufNBank: $IBufNBank, DecodeWidth: $DecodeWidth") 164 165 // IBuffer is organized as raw registers 166 // This is due to IBuffer is a huge queue, read & write port logic should be precisely controlled 167 // . + + E E E - . 168 // . + + E E E - . 169 // . . + E E E - . 170 // . . + E E E E - 171 // As shown above, + means enqueue, - means dequeue, E is current content 172 // When dequeue, read port is organized like a banked FIFO 173 // Dequeue reads no more than 1 entry from each bank sequentially, this can be exploit to reduce area 174 // Enqueue writes cannot benefit from this characteristic unless use a SRAM 175 // For detail see Enqueue and Dequeue below 176 private val ibuf: Vec[IBufEntry] = RegInit(VecInit.fill(IBufSize)(0.U.asTypeOf(new IBufEntry))) 177 private val bankedIBufView: Vec[Vec[IBufEntry]] = VecInit.tabulate(IBufNBank)( 178 bankID => VecInit.tabulate(bankSize)( 179 inBankOffset => ibuf(bankID + inBankOffset * IBufNBank) 180 ) 181 ) 182 183 184 // Bypass wire 185 private val bypassEntries = WireDefault(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry)))) 186 // Normal read wire 187 private val deqEntries = WireDefault(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry)))) 188 // Output register 189 private val outputEntries = RegInit(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry)))) 190 private val outputEntriesValidNum = PriorityMuxDefault(outputEntries.map(_.valid).zip(Seq.range(1, DecodeWidth).map(_.U)).reverse.toSeq, 0.U) 191 192 // Between Bank 193 private val deqBankPtrVec: Vec[IBufBankPtr] = RegInit(VecInit.tabulate(DecodeWidth)(_.U.asTypeOf(new IBufBankPtr))) 194 private val deqBankPtr: IBufBankPtr = deqBankPtrVec(0) 195 private val deqBankPtrVecNext = Wire(deqBankPtrVec.cloneType) 196 // Inside Bank 197 private val deqInBankPtr: Vec[IBufInBankPtr] = RegInit(VecInit.fill(IBufNBank)(0.U.asTypeOf(new IBufInBankPtr))) 198 private val deqInBankPtrNext = Wire(deqInBankPtr.cloneType) 199 200 val deqPtr = RegInit(0.U.asTypeOf(new IBufPtr)) 201 val deqPtrNext = Wire(deqPtr.cloneType) 202 203 val enqPtrVec = RegInit(VecInit.tabulate(PredictWidth)(_.U.asTypeOf(new IBufPtr))) 204 val enqPtr = enqPtrVec(0) 205 206 val numTryEnq = WireDefault(0.U) 207 val numEnq = Mux(io.in.fire, numTryEnq, 0.U) 208 209 // empty and decode can accept insts 210 val useBypass = enqPtr === deqPtr && decodeCanAccept 211 212 // The number of decode accepted insts. 213 // Since decode promises accepting insts in order, use priority encoder to simplify the accumulation. 214 private val numOut = Wire(UInt(log2Ceil(DecodeWidth).W)) 215 private val numDeq = numOut 216 217 // counter current number of valid 218 val numValid = distanceBetween(enqPtr, deqPtr) 219 val numValidAfterDeq = numValid - numDeq 220 // counter next number of valid 221 val numValidNext = numValid + numEnq - numDeq 222 val allowEnq = RegInit(true.B) 223 val numFromFetch = Mux(io.in.valid, PopCount(io.in.bits.enqEnable), 0.U) 224 225 allowEnq := (IBufSize - PredictWidth).U >= numValidNext // Disable when almost full 226 227 val enqOffset = VecInit.tabulate(PredictWidth)(i => PopCount(io.in.bits.valid.asBools.take(i))) 228 val enqData = VecInit.tabulate(PredictWidth)(i => Wire(new IBufEntry).fromFetch(io.in.bits, i)) 229 230 val outputEntriesIsNotFull = !outputEntries(DecodeWidth-1).valid 231 when(decodeCanAccept) { 232 numOut := Mux(numValid >= DecodeWidth.U, DecodeWidth.U, numValid) 233 }.elsewhen(outputEntriesIsNotFull) { 234 numOut := Mux(numValid >= DecodeWidth.U - outputEntriesValidNum, DecodeWidth.U - outputEntriesValidNum, numValid) 235 }.otherwise { 236 numOut := 0.U 237 } 238 val numBypass = Wire(UInt(log2Ceil(DecodeWidth).W)) 239 // when using bypass, bypassed entries do not enqueue 240 when(useBypass) { 241 when(numFromFetch >= DecodeWidth.U) { 242 numTryEnq := numFromFetch - DecodeWidth.U 243 numBypass := DecodeWidth.U 244 } .otherwise { 245 numTryEnq := 0.U 246 numBypass := numFromFetch 247 } 248 } .otherwise { 249 numTryEnq := numFromFetch 250 numBypass := 0.U 251 } 252 253 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 254 // Bypass 255 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 256 bypassEntries.zipWithIndex.foreach { 257 case (entry, idx) => 258 // Select 259 val validOH = Range(0, PredictWidth).map { 260 i => 261 io.in.bits.valid(i) && 262 io.in.bits.enqEnable(i) && 263 enqOffset(i) === idx.asUInt 264 } // Should be OneHot 265 entry.valid := validOH.reduce(_ || _) && io.in.fire && !io.flush 266 entry.bits := Mux1H(validOH, enqData) 267 268 // Debug Assertion 269 XSError(io.in.valid && PopCount(validOH) > 1.asUInt, "validOH is not OneHot") 270 } 271 272 // => Decode Output 273 // clean register output 274 io.out zip outputEntries foreach { 275 case (io, reg) => 276 io.valid := reg.valid 277 io.bits := reg.bits.toCtrlFlow 278 } 279 (outputEntries zip bypassEntries).zipWithIndex.foreach { 280 case ((out, bypass), i) => 281 when(decodeCanAccept) { 282 when(useBypass && io.in.valid) { 283 out := bypass 284 }.otherwise { 285 out := deqEntries(i) 286 } 287 }.elsewhen(outputEntriesIsNotFull){ 288 out.valid := deqEntries(i).valid 289 out.bits := Mux(i.U < outputEntriesValidNum, out.bits, VecInit(deqEntries.take(i + 1).map(_.bits))(i.U - outputEntriesValidNum)) 290 } 291 } 292 293 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 294 // Enqueue 295 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 296 io.in.ready := allowEnq 297 // Data 298 ibuf.zipWithIndex.foreach { 299 case (entry, idx) => { 300 // Select 301 val validOH = Range(0, PredictWidth).map { 302 i => 303 val useBypassMatch = enqOffset(i) >= DecodeWidth.U && 304 enqPtrVec(enqOffset(i) - DecodeWidth.U).value === idx.asUInt 305 val normalMatch = enqPtrVec(enqOffset(i)).value === idx.asUInt 306 val m = Mux(useBypass, useBypassMatch, normalMatch) // when using bypass, bypassed entries do not enqueue 307 308 io.in.bits.valid(i) && io.in.bits.enqEnable(i) && m 309 } // Should be OneHot 310 val wen = validOH.reduce(_ || _) && io.in.fire && !io.flush 311 312 // Write port 313 // Each IBuffer entry has a PredictWidth -> 1 Mux 314 val writeEntry = Mux1H(validOH, enqData) 315 entry := Mux(wen, writeEntry, entry) 316 317 // Debug Assertion 318 XSError(io.in.valid && PopCount(validOH) > 1.asUInt, "validOH is not OneHot") 319 } 320 } 321 // Pointer maintenance 322 when (io.in.fire && !io.flush) { 323 enqPtrVec := VecInit(enqPtrVec.map(_ + numTryEnq)) 324 } 325 326 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 327 // Dequeue 328 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 329 val outputEntriesValidNumNext = Wire(UInt(log2Ceil(DecodeWidth).W)) 330 XSError(outputEntriesValidNumNext > DecodeWidth.U, "Ibuffer: outputEntriesValidNumNext > DecodeWidth.U") 331 val validVec = UIntToMask(outputEntriesValidNumNext(log2Ceil(DecodeWidth) - 1, 0), DecodeWidth) 332 when(decodeCanAccept) { 333 outputEntriesValidNumNext := Mux(useBypass, numBypass, numDeq) 334 }.elsewhen(outputEntriesIsNotFull) { 335 outputEntriesValidNumNext := outputEntriesValidNum + numDeq 336 }.otherwise { 337 outputEntriesValidNumNext := outputEntriesValidNum 338 } 339 // Data 340 // Read port 341 // 2-stage, IBufNBank * (bankSize -> 1) + IBufNBank -> 1 342 // Should be better than IBufSize -> 1 in area, with no significant latency increase 343 private val readStage1: Vec[IBufEntry] = VecInit.tabulate(IBufNBank)( 344 bankID => Mux1H(UIntToOH(deqInBankPtr(bankID).value), bankedIBufView(bankID)) 345 ) 346 for (i <- 0 until DecodeWidth) { 347 deqEntries(i).valid := validVec(i) 348 deqEntries(i).bits := Mux1H(UIntToOH(deqBankPtrVec(i).value), readStage1) 349 } 350 // Pointer maintenance 351 deqBankPtrVecNext := VecInit(deqBankPtrVec.map(_ + numDeq)) 352 deqPtrNext := deqPtr + numDeq 353 deqInBankPtrNext.zip(deqInBankPtr).zipWithIndex.foreach { 354 case ((ptrNext, ptr), idx) => { 355 // validVec[k] == bankValid[deqBankPtr + k] 356 // So bankValid[n] == validVec[n - deqBankPtr] 357 val validIdx = Mux(idx.asUInt >= deqBankPtr.value, 358 idx.asUInt - deqBankPtr.value, 359 ((idx + IBufNBank).asUInt - deqBankPtr.value)(log2Ceil(IBufNBank) - 1, 0) 360 )(log2Ceil(DecodeWidth) - 1, 0) 361 val bankAdvance = numOut > validIdx 362 ptrNext := Mux(bankAdvance , ptr + 1.U, ptr) 363 } 364 } 365 366 // Flush 367 when (io.flush) { 368 allowEnq := true.B 369 enqPtrVec := enqPtrVec.indices.map(_.U.asTypeOf(new IBufPtr)) 370 deqBankPtrVec := deqBankPtrVec.indices.map(_.U.asTypeOf(new IBufBankPtr)) 371 deqInBankPtr := VecInit.fill(IBufNBank)(0.U.asTypeOf(new IBufInBankPtr)) 372 deqPtr := 0.U.asTypeOf(new IBufPtr()) 373 outputEntries.foreach(_.valid := false.B) 374 }.otherwise { 375 deqPtr := deqPtrNext 376 deqInBankPtr := deqInBankPtrNext 377 deqBankPtrVec := deqBankPtrVecNext 378 } 379 io.full := !allowEnq 380 381 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 382 // TopDown 383 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 384 val topdown_stage = RegInit(0.U.asTypeOf(new FrontendTopDownBundle)) 385 topdown_stage := io.in.bits.topdown_info 386 when(io.flush) { 387 when(io.ControlRedirect) { 388 when(io.ControlBTBMissBubble) { 389 topdown_stage.reasons(TopDownCounters.BTBMissBubble.id) := true.B 390 }.elsewhen(io.TAGEMissBubble) { 391 topdown_stage.reasons(TopDownCounters.TAGEMissBubble.id) := true.B 392 }.elsewhen(io.SCMissBubble) { 393 topdown_stage.reasons(TopDownCounters.SCMissBubble.id) := true.B 394 }.elsewhen(io.ITTAGEMissBubble) { 395 topdown_stage.reasons(TopDownCounters.ITTAGEMissBubble.id) := true.B 396 }.elsewhen(io.RASMissBubble) { 397 topdown_stage.reasons(TopDownCounters.RASMissBubble.id) := true.B 398 } 399 }.elsewhen(io.MemVioRedirect) { 400 topdown_stage.reasons(TopDownCounters.MemVioRedirectBubble.id) := true.B 401 }.otherwise { 402 topdown_stage.reasons(TopDownCounters.OtherRedirectBubble.id) := true.B 403 } 404 } 405 406 407 val matchBubble = Wire(UInt(log2Up(TopDownCounters.NumStallReasons.id).W)) 408 val deqValidCount = PopCount(validVec.asBools) 409 val deqWasteCount = DecodeWidth.U - deqValidCount 410 matchBubble := (TopDownCounters.NumStallReasons.id - 1).U - PriorityEncoder(topdown_stage.reasons.reverse) 411 412 io.stallReason.reason.map(_ := 0.U) 413 for (i <- 0 until DecodeWidth) { 414 when(i.U < deqWasteCount) { 415 io.stallReason.reason(DecodeWidth - i - 1) := matchBubble 416 } 417 } 418 419 when(!(deqWasteCount === DecodeWidth.U || topdown_stage.reasons.asUInt.orR)) { 420 // should set reason for FetchFragmentationStall 421 // topdown_stage.reasons(TopDownCounters.FetchFragmentationStall.id) := true.B 422 for (i <- 0 until DecodeWidth) { 423 when(i.U < deqWasteCount) { 424 io.stallReason.reason(DecodeWidth - i - 1) := TopDownCounters.FetchFragBubble.id.U 425 } 426 } 427 } 428 429 when(io.stallReason.backReason.valid) { 430 io.stallReason.reason.map(_ := io.stallReason.backReason.bits) 431 } 432 433 // Debug info 434 XSError( 435 deqPtr.value =/= deqBankPtr.value + deqInBankPtr(deqBankPtr.value).value * IBufNBank.asUInt, 436 "Dequeue PTR mismatch" 437 ) 438 XSError(isBefore(enqPtr, deqPtr) && !isFull(enqPtr, deqPtr), "\ndeqPtr is older than enqPtr!\n") 439 440 XSDebug(io.flush, "IBuffer Flushed\n") 441 442 when(io.in.fire) { 443 XSDebug("Enque:\n") 444 XSDebug(p"MASK=${Binary(io.in.bits.valid)}\n") 445 for(i <- 0 until PredictWidth){ 446 XSDebug(p"PC=${Hexadecimal(io.in.bits.pc(i))} ${Hexadecimal(io.in.bits.instrs(i))}\n") 447 } 448 } 449 450 for (i <- 0 until DecodeWidth) { 451 XSDebug(io.out(i).fire, 452 p"deq: ${Hexadecimal(io.out(i).bits.instr)} PC=${Hexadecimal(io.out(i).bits.pc)}" + 453 p"v=${io.out(i).valid} r=${io.out(i).ready} " + 454 p"excpVec=${Binary(io.out(i).bits.exceptionVec.asUInt)} crossPageIPF=${io.out(i).bits.crossPageIPFFix}\n") 455 } 456 457 XSDebug(p"numValid: ${numValid}\n") 458 XSDebug(p"EnqNum: ${numEnq}\n") 459 XSDebug(p"DeqNum: ${numDeq}\n") 460 461 val afterInit = RegInit(false.B) 462 val headBubble = RegInit(false.B) 463 when (io.in.fire) { afterInit := true.B } 464 when (io.flush) { 465 headBubble := true.B 466 } .elsewhen(numValid =/= 0.U) { 467 headBubble := false.B 468 } 469 val instrHungry = afterInit && (numValid === 0.U) && !headBubble 470 471 QueuePerf(IBufSize, numValid, !allowEnq) 472 XSPerfAccumulate("flush", io.flush) 473 XSPerfAccumulate("hungry", instrHungry) 474 475 val ibuffer_IDWidth_hvButNotFull = afterInit && (numValid =/= 0.U) && (numValid < DecodeWidth.U) && !headBubble 476 XSPerfAccumulate("ibuffer_IDWidth_hvButNotFull", ibuffer_IDWidth_hvButNotFull) 477 478 val FrontBubble = Mux(decodeCanAccept, DecodeWidth.U - numOut, 0.U) 479 480 val perfEvents = Seq( 481 ("IBuffer_Flushed ", io.flush), 482 ("IBuffer_hungry ", instrHungry), 483 ("IBuffer_1_4_valid", (numValid > (0 * (IBufSize / 4)).U) & (numValid < (1 * (IBufSize / 4)).U)), 484 ("IBuffer_2_4_valid", (numValid >= (1 * (IBufSize / 4)).U) & (numValid < (2 * (IBufSize / 4)).U)), 485 ("IBuffer_3_4_valid", (numValid >= (2 * (IBufSize / 4)).U) & (numValid < (3 * (IBufSize / 4)).U)), 486 ("IBuffer_4_4_valid", (numValid >= (3 * (IBufSize / 4)).U) & (numValid < (4 * (IBufSize / 4)).U)), 487 ("IBuffer_full ", numValid.andR), 488 ("Front_Bubble ", FrontBubble) 489 ) 490 generatePerfEvent() 491} 492