xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LoadExceptionBuffer.scala (revision 082b30d1b017bd096bce2e5fc66bac2ab749293a)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.FuConfig._
26import xiangshan.backend.fu.fpu.FPU
27import xiangshan.backend.rob.RobLsqIO
28import xiangshan.cache._
29import xiangshan.frontend.FtqPtr
30import xiangshan.ExceptionNO._
31import xiangshan.cache.wpu.ReplayCarry
32import xiangshan.backend.rob.RobPtr
33
34class LqExceptionBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper {
35  val io = IO(new Bundle() {
36    val redirect      = Flipped(Valid(new Redirect))
37    val req           = Vec(LoadPipelineWidth + VecLoadPipelineWidth, Flipped(Valid(new LqWriteBundle)))
38    val exceptionAddr = new ExceptionAddrIO
39  })
40
41  val req_valid = RegInit(false.B)
42  val req = Reg(new LqWriteBundle)
43
44  // enqueue
45  // s1:
46  val s1_req = VecInit(io.req.map(_.bits))
47  val s1_valid = VecInit(io.req.map(x => x.valid))
48
49  // s2: delay 1 cycle
50  val s2_req = (0 until LoadPipelineWidth + VecLoadPipelineWidth).map(i => {
51    RegEnable(s1_req(i), s1_valid(i))})
52  val s2_valid = (0 until LoadPipelineWidth + VecLoadPipelineWidth).map(i =>
53    RegNext(s1_valid(i)) &&
54    !s2_req(i).uop.robIdx.needFlush(RegNext(io.redirect)) &&
55    !s2_req(i).uop.robIdx.needFlush(io.redirect)
56  )
57  val s2_has_exception = s2_req.map(x => ExceptionNO.selectByFu(x.uop.exceptionVec, LduCfg).asUInt.orR)
58
59  val s2_enqueue = Wire(Vec(LoadPipelineWidth + VecLoadPipelineWidth, Bool()))
60  for (w <- 0 until LoadPipelineWidth + VecLoadPipelineWidth) {
61    s2_enqueue(w) := s2_valid(w) && s2_has_exception(w)
62  }
63
64  when (req_valid && req.uop.robIdx.needFlush(io.redirect)) {
65    req_valid := s2_enqueue.asUInt.orR
66  } .elsewhen (s2_enqueue.asUInt.orR) {
67    req_valid := req_valid || true.B
68  }
69
70  def selectOldest[T <: LqWriteBundle](valid: Seq[Bool], bits: Seq[T]): (Seq[Bool], Seq[T]) = {
71    assert(valid.length == bits.length)
72    if (valid.length == 0 || valid.length == 1) {
73      (valid, bits)
74    } else if (valid.length == 2) {
75      val res = Seq.fill(2)(Wire(ValidIO(chiselTypeOf(bits(0)))))
76      for (i <- res.indices) {
77        res(i).valid := valid(i)
78        res(i).bits := bits(i)
79      }
80      val oldest = Mux(valid(0) && valid(1),
81        Mux(isAfter(bits(0).uop.robIdx, bits(1).uop.robIdx) ||
82          (isNotBefore(bits(0).uop.robIdx, bits(1).uop.robIdx) && bits(0).uop.uopIdx > bits(1).uop.uopIdx), res(1), res(0)),
83        Mux(valid(0) && !valid(1), res(0), res(1)))
84      (Seq(oldest.valid), Seq(oldest.bits))
85    } else {
86      val left = selectOldest(valid.take(valid.length / 2), bits.take(bits.length / 2))
87      val right = selectOldest(valid.takeRight(valid.length - (valid.length / 2)), bits.takeRight(bits.length - (bits.length / 2)))
88      selectOldest(left._1 ++ right._1, left._2 ++ right._2)
89    }
90  }
91
92  val reqSel = selectOldest(s2_enqueue, s2_req)
93
94  when (req_valid) {
95    req := Mux(
96      reqSel._1(0) && (isAfter(req.uop.robIdx, reqSel._2(0).uop.robIdx) || (isNotBefore(req.uop.robIdx, reqSel._2(0).uop.robIdx) && req.uop.uopIdx > reqSel._2(0).uop.uopIdx)),
97      reqSel._2(0),
98      req)
99  } .elsewhen (s2_enqueue.asUInt.orR) {
100    req := reqSel._2(0)
101  }
102
103  io.exceptionAddr.vaddr := req.vaddr
104  io.exceptionAddr.vstart := req.uop.vpu.vstart
105  io.exceptionAddr.vl     := req.uop.vpu.vl
106  io.exceptionAddr.gpaddr := req.gpaddr
107  XSPerfAccumulate("exception", !RegNext(req_valid) && req_valid)
108
109  // end
110}