xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LoadExceptionBuffer.scala (revision 870f462d572cd0ef6bf86c91dcda5a5fab6e99d3)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.FuConfig._
26import xiangshan.backend.fu.fpu.FPU
27import xiangshan.backend.rob.RobLsqIO
28import xiangshan.cache._
29import xiangshan.frontend.FtqPtr
30import xiangshan.ExceptionNO._
31import xiangshan.cache.wpu.ReplayCarry
32import xiangshan.backend.rob.RobPtr
33
34class LqExceptionBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper {
35  val io = IO(new Bundle() {
36    val redirect      = Flipped(Valid(new Redirect))
37    val req           = Vec(LoadPipelineWidth, Flipped(Valid(new LqWriteBundle)))
38    val exceptionAddr = new ExceptionAddrIO
39  })
40
41  val req_valid = RegInit(false.B)
42  val req = Reg(new LqWriteBundle)
43
44  // enqueue
45  // s1:
46  val s1_req = VecInit(io.req.map(_.bits))
47  val s1_valid = VecInit(io.req.map(x => x.valid))
48
49  // s2: delay 1 cycle
50  val s2_req = RegNext(s1_req)
51  val s2_valid = (0 until LoadPipelineWidth).map(i =>
52    RegNext(s1_valid(i)) &&
53    !s2_req(i).uop.robIdx.needFlush(RegNext(io.redirect)) &&
54    !s2_req(i).uop.robIdx.needFlush(io.redirect)
55  )
56  val s2_has_exception = s2_req.map(x => ExceptionNO.selectByFu(x.uop.exceptionVec, LduCfg).asUInt.orR)
57
58  val s2_enqueue = Wire(Vec(LoadPipelineWidth, Bool()))
59  for (w <- 0 until LoadPipelineWidth) {
60    s2_enqueue(w) := s2_valid(w) && s2_has_exception(w)
61  }
62
63  when (req_valid && req.uop.robIdx.needFlush(io.redirect)) {
64    req_valid := s2_enqueue.asUInt.orR
65  } .elsewhen (s2_enqueue.asUInt.orR) {
66    req_valid := req_valid || true.B
67  }
68
69  def selectOldest[T <: LqWriteBundle](valid: Seq[Bool], bits: Seq[T]): (Seq[Bool], Seq[T]) = {
70    assert(valid.length == bits.length)
71    if (valid.length == 0 || valid.length == 1) {
72      (valid, bits)
73    } else if (valid.length == 2) {
74      val res = Seq.fill(2)(Wire(ValidIO(chiselTypeOf(bits(0)))))
75      for (i <- res.indices) {
76        res(i).valid := valid(i)
77        res(i).bits := bits(i)
78      }
79      val oldest = Mux(valid(0) && valid(1), Mux(isAfter(bits(0).uop.robIdx, bits(1).uop.robIdx), res(1), res(0)), Mux(valid(0) && !valid(1), res(0), res(1)))
80      (Seq(oldest.valid), Seq(oldest.bits))
81    } else {
82      val left = selectOldest(valid.take(valid.length / 2), bits.take(bits.length / 2))
83      val right = selectOldest(valid.takeRight(valid.length - (valid.length / 2)), bits.takeRight(bits.length - (bits.length / 2)))
84      selectOldest(left._1 ++ right._1, left._2 ++ right._2)
85    }
86  }
87
88  val reqSel = selectOldest(s2_enqueue, s2_req)
89
90  when (req_valid) {
91    req := Mux(reqSel._1(0) && isAfter(req.uop.robIdx, reqSel._2(0).uop.robIdx), reqSel._2(0), req)
92  } .elsewhen (s2_enqueue.asUInt.orR) {
93    req := reqSel._2(0)
94  }
95
96  io.exceptionAddr.vaddr := req.vaddr
97  XSPerfAccumulate("exception", !RegNext(req_valid) && req_valid)
98
99  // end
100}