xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LoadExceptionBuffer.scala (revision 0466583513e4c1ddbbb566b866b8963635acb20f)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.fpu.FPU
26import xiangshan.backend.rob.RobLsqIO
27import xiangshan.cache._
28import xiangshan.frontend.FtqPtr
29import xiangshan.ExceptionNO._
30import xiangshan.cache.wpu.ReplayCarry
31import xiangshan.backend.rob.RobPtr
32
33class LqExceptionBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper {
34  val io = IO(new Bundle() {
35    val redirect      = Flipped(Valid(new Redirect))
36    val req           = Vec(LoadPipelineWidth, Flipped(Valid(new LqWriteBundle)))
37    val exceptionAddr = new ExceptionAddrIO
38  })
39
40  val req_valid = RegInit(false.B)
41  val req = Reg(new LqWriteBundle)
42
43  // enqueue
44  // s1:
45  val s1_req = VecInit(io.req.map(_.bits))
46  val s1_valid = VecInit(io.req.map(x => x.valid))
47
48  // s2: delay 1 cycle
49  val s2_req = RegNext(s1_req)
50  val s2_valid = (0 until LoadPipelineWidth).map(i =>
51    RegNext(s1_valid(i)) &&
52    !s2_req(i).uop.robIdx.needFlush(RegNext(io.redirect)) &&
53    !s2_req(i).uop.robIdx.needFlush(io.redirect)
54  )
55  val s2_has_exception = s2_req.map(x => ExceptionNO.selectByFu(x.uop.cf.exceptionVec, lduCfg).asUInt.orR)
56
57  val s2_enqueue = Wire(Vec(LoadPipelineWidth, Bool()))
58  for (w <- 0 until LoadPipelineWidth) {
59    s2_enqueue(w) := s2_valid(w) && s2_has_exception(w)
60  }
61
62  when (req_valid && req.uop.robIdx.needFlush(io.redirect)) {
63    req_valid := s2_enqueue.asUInt.orR
64  } .elsewhen (s2_enqueue.asUInt.orR) {
65    req_valid := req_valid || true.B
66  }
67
68  def selectOldest[T <: LqWriteBundle](valid: Seq[Bool], bits: Seq[T]): (Seq[Bool], Seq[T]) = {
69    assert(valid.length == bits.length)
70    if (valid.length == 0 || valid.length == 1) {
71      (valid, bits)
72    } else if (valid.length == 2) {
73      val res = Seq.fill(2)(Wire(ValidIO(chiselTypeOf(bits(0)))))
74      for (i <- res.indices) {
75        res(i).valid := valid(i)
76        res(i).bits := bits(i)
77      }
78      val oldest = Mux(valid(0) && valid(1), Mux(isAfter(bits(0).uop.robIdx, bits(1).uop.robIdx), res(1), res(0)), Mux(valid(0) && !valid(1), res(0), res(1)))
79      (Seq(oldest.valid), Seq(oldest.bits))
80    } else {
81      val left = selectOldest(valid.take(valid.length / 2), bits.take(bits.length / 2))
82      val right = selectOldest(valid.takeRight(valid.length - (valid.length / 2)), bits.takeRight(bits.length - (bits.length / 2)))
83      selectOldest(left._1 ++ right._1, left._2 ++ right._2)
84    }
85  }
86
87  val reqSel = selectOldest(s2_enqueue, s2_req)
88
89  when (req_valid) {
90    req := Mux(reqSel._1(0) && isAfter(req.uop.robIdx, reqSel._2(0).uop.robIdx), reqSel._2(0), req)
91  } .elsewhen (s2_enqueue.asUInt.orR) {
92    req := reqSel._2(0)
93  }
94
95  io.exceptionAddr.vaddr := req.vaddr
96  XSPerfAccumulate("exception", !RegNext(req_valid) && req_valid)
97
98  // end
99}