xref: /XiangShan/src/main/scala/xiangshan/mem/vector/VSegmentUnit.scala (revision 9394f0e7e316bec8d37d0ac5b7b26a9ac68e89fc)
1/***************************************************************************************
2  * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3  * Copyright (c) 2020-2021 Peng Cheng Laboratory
4  *
5  * XiangShan is licensed under Mulan PSL v2.
6  * You can use this software according to the terms and conditions of the Mulan PSL v2.
7  * You may obtain a copy of Mulan PSL v2 at:
8  *          http://license.coscl.org.cn/MulanPSL2
9  *
10  * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11  * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12  * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13  *
14  * See the Mulan PSL v2 for more details.
15  ***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.rob.RobPtr
26import xiangshan.backend.Bundles._
27import xiangshan.mem._
28import xiangshan.backend.fu.FuType
29import freechips.rocketchip.diplomacy.BufferParams
30import xiangshan.cache.mmu._
31import xiangshan.cache._
32import xiangshan.cache.wpu.ReplayCarry
33import xiangshan.backend.fu.util.SdtrigExt
34import xiangshan.ExceptionNO._
35import xiangshan.backend.fu.vector.Bundles.VConfig
36import xiangshan.backend.fu.vector.Utils.VecDataToMaskDataVec
37
38class VSegmentBundle(implicit p: Parameters) extends VLSUBundle
39{
40  val vaddr            = UInt(VAddrBits.W)
41  val uop              = new DynInst
42  val paddr            = UInt(PAddrBits.W)
43  val mask             = UInt(VLEN.W)
44  val valid            = Bool()
45  val alignedType      = UInt(alignTypeBits.W)
46  val vl               = UInt(elemIdxBits.W)
47  val vlmaxInVd        = UInt(elemIdxBits.W)
48  val vlmaxMaskInVd    = UInt(elemIdxBits.W)
49  // for exception
50  val vstart           = UInt(elemIdxBits.W)
51  val exceptionvaddr   = UInt(VAddrBits.W)
52  val exception_va     = Bool()
53  val exception_pa     = Bool()
54}
55
56class VSegmentUnit (implicit p: Parameters) extends VLSUModule
57  with HasDCacheParameters
58  with MemoryOpConstants
59  with SdtrigExt
60  with HasLoadHelper
61{
62  val io               = IO(new VSegmentUnitIO)
63
64  val maxSize          = VSegmentBufferSize
65
66  class VSegUPtr(implicit p: Parameters) extends CircularQueuePtr[VSegUPtr](maxSize){
67  }
68
69  object VSegUPtr {
70    def apply(f: Bool, v: UInt)(implicit p: Parameters): VSegUPtr = {
71      val ptr           = Wire(new VSegUPtr)
72      ptr.flag         := f
73      ptr.value        := v
74      ptr
75    }
76  }
77
78  // buffer uop
79  val instMicroOp       = Reg(new VSegmentBundle)
80  val data              = Reg(Vec(maxSize, UInt(VLEN.W)))
81  val pdest             = Reg(Vec(maxSize, UInt(PhyRegIdxWidth.W)))
82  val uopIdx            = Reg(Vec(maxSize, UopIdx()))
83  val stride            = Reg(Vec(maxSize, UInt(VLEN.W)))
84  val allocated         = RegInit(VecInit(Seq.fill(maxSize)(false.B)))
85  val enqPtr            = RegInit(0.U.asTypeOf(new VSegUPtr))
86  val deqPtr            = RegInit(0.U.asTypeOf(new VSegUPtr))
87  val stridePtr         = WireInit(0.U.asTypeOf(new VSegUPtr)) // for select stride/index
88
89  val segmentIdx        = RegInit(0.U(elemIdxBits.W))
90  val fieldIdx          = RegInit(0.U(fieldBits.W))
91  val segmentOffset     = RegInit(0.U(VAddrBits.W))
92  val splitPtr          = RegInit(0.U.asTypeOf(new VSegUPtr)) // for select load/store data
93  val splitPtrNext      = WireInit(0.U.asTypeOf(new VSegUPtr))
94
95  val exception_va      = WireInit(false.B)
96  val exception_pa      = WireInit(false.B)
97
98  val maxSegIdx         = instMicroOp.vl - 1.U
99  val maxNfields        = instMicroOp.uop.vpu.nf
100
101  XSError(segmentIdx > maxSegIdx, s"segmentIdx > vl, something error!\n")
102  XSError(fieldIdx > maxNfields, s"fieldIdx > nfields, something error!\n")
103
104  // MicroOp
105  val baseVaddr                       = instMicroOp.vaddr
106  val alignedType                     = instMicroOp.alignedType
107  val fuType                          = instMicroOp.uop.fuType
108  val mask                            = instMicroOp.mask
109  val exceptionVec                    = instMicroOp.uop.exceptionVec
110  val issueEew                        = instMicroOp.uop.vpu.veew
111  val issueLmul                       = instMicroOp.uop.vpu.vtype.vlmul
112  val issueSew                        = instMicroOp.uop.vpu.vtype.vsew
113  val issueEmul                       = EewLog2(issueEew) - issueSew + issueLmul
114  val elemIdxInVd                     = segmentIdx & instMicroOp.vlmaxMaskInVd
115  val issueInstType                   = Cat(true.B, instMicroOp.uop.fuOpType(6, 5)) // always segment instruction
116  val issueVLMAXLog2                  = GenVLMAXLog2(
117    Mux(issueLmul.asSInt > 0.S, 0.U, issueLmul),
118    Mux(isIndexed(issueInstType), issueSew(1, 0), issueEew(1, 0))
119  ) // max element number log2 in vd
120  val issueVlMax                      = instMicroOp.vlmaxInVd // max elementIdx in vd
121  val issueMaxIdxInIndex              = GenVLMAX(Mux(issueEmul.asSInt > 0.S, 0.U, issueEmul), issueEew) // index element index in index register
122  val issueMaxIdxInIndexMask          = UIntToMask(issueMaxIdxInIndex, elemIdxBits)
123  val issueMaxIdxInIndexLog2          = GenVLMAXLog2(Mux(issueEmul.asSInt > 0.S, 0.U, issueEmul), issueEew)
124  val issueIndexIdx                   = segmentIdx & issueMaxIdxInIndexMask
125  val segmentActive                   = (mask & UIntToOH(elemIdxInVd)).orR
126
127  // Segment instruction's FSM
128  /*
129  * s_idle: wait request
130  * s_flush_sbuffer_req: flush sbuffer
131  * s_wait_flush_sbuffer_resp: wait sbuffer empty
132  * s_tlb_req: request tlb
133  * s_wait_tlb_resp: wait tlb resp
134  * s_pm: check pmp
135  * s_cache_req: request cache
136  * s_cache_resp: wait cache resp
137  * s_latch_and_merge_data: for read data
138  * s_send_data: for send write data
139  * s_finish:
140  * */
141  val s_idle :: s_flush_sbuffer_req :: s_wait_flush_sbuffer_resp :: s_tlb_req :: s_wait_tlb_resp :: s_pm ::s_cache_req :: s_cache_resp :: s_latch_and_merge_data :: s_send_data :: s_finish :: Nil = Enum(11)
142  val state             = RegInit(s_idle)
143  val stateNext         = WireInit(s_idle)
144  val sbufferEmpty      = io.flush_sbuffer.empty
145
146  /**
147   * state update
148   */
149  state  := stateNext
150
151  /**
152   * state transfer
153   */
154  when(state === s_idle){
155    stateNext := Mux(isAfter(enqPtr, deqPtr), s_flush_sbuffer_req, s_idle)
156  }.elsewhen(state === s_flush_sbuffer_req){
157    stateNext := Mux(sbufferEmpty, s_tlb_req, s_wait_flush_sbuffer_resp) // if sbuffer is empty, go to query tlb
158
159  }.elsewhen(state === s_wait_flush_sbuffer_resp){
160    stateNext := Mux(sbufferEmpty, s_tlb_req, s_wait_flush_sbuffer_resp)
161
162  }.elsewhen(state === s_tlb_req){
163    stateNext := Mux(segmentActive, s_wait_tlb_resp, Mux(FuType.isVLoad(instMicroOp.uop.fuType), s_latch_and_merge_data, s_send_data))
164
165  }.elsewhen(state === s_wait_tlb_resp){
166    stateNext := Mux(!io.dtlb.resp.bits.miss && io.dtlb.resp.fire, s_pm, s_tlb_req)
167
168  }.elsewhen(state === s_pm){
169    /* if is vStore, send data to sbuffer, so don't need query dcache */
170    stateNext := Mux(exception_pa || exception_va,
171                     s_finish,
172                     Mux(FuType.isVLoad(instMicroOp.uop.fuType), s_cache_req, s_send_data))
173
174  }.elsewhen(state === s_cache_req){
175    stateNext := Mux(io.rdcache.req.fire, s_cache_resp, s_cache_req)
176
177  }.elsewhen(state === s_cache_resp){
178    when(io.rdcache.resp.fire) {
179      when(io.rdcache.resp.bits.miss) {
180        stateNext := s_cache_req
181      }.otherwise {
182        stateNext := Mux(FuType.isVLoad(instMicroOp.uop.fuType), s_latch_and_merge_data, s_send_data)
183      }
184    }.otherwise{
185      stateNext := s_cache_resp
186    }
187
188  }.elsewhen(state === s_latch_and_merge_data) {
189    when((segmentIdx === maxSegIdx) && (fieldIdx === maxNfields)) {
190      stateNext := s_finish // segment instruction finish
191    }.otherwise {
192      stateNext := s_tlb_req // need continue
193    }
194
195  }.elsewhen(state === s_send_data) { // when sbuffer accept data
196    when(!io.sbuffer.fire && segmentActive) {
197      stateNext := s_send_data
198    }.elsewhen((segmentIdx === maxSegIdx) && (fieldIdx === maxNfields)) {
199      stateNext := s_finish // segment instruction finish
200    }.otherwise {
201      stateNext := s_tlb_req // need continue
202    }
203  }.elsewhen(state === s_finish){ // writeback uop
204    stateNext := Mux(distanceBetween(enqPtr, deqPtr) === 0.U, s_idle, s_finish)
205
206  }.otherwise{
207    stateNext := s_idle
208    XSError(true.B, s"Unknown state!\n")
209  }
210
211  /*************************************************************************
212   *                            enqueue logic
213   *************************************************************************/
214  io.in.ready                         := true.B
215  val fuOpType                         = io.in.bits.uop.fuOpType
216  val vtype                            = io.in.bits.uop.vpu.vtype
217  val mop                              = fuOpType(6, 5)
218  val instType                         = Cat(true.B, mop)
219  val eew                              = io.in.bits.uop.vpu.veew
220  val sew                              = vtype.vsew
221  val lmul                             = vtype.vlmul
222  val vl                               = instMicroOp.vl
223  val vm                               = instMicroOp.uop.vpu.vm
224  val vstart                           = instMicroOp.uop.vpu.vstart
225  val srcMask                          = GenFlowMask(Mux(vm, Fill(VLEN, 1.U(1.W)), io.in.bits.src_mask), vstart, vl, true)
226  // first uop enqueue, we need to latch microOp of segment instruction
227  when(io.in.fire && !instMicroOp.valid){
228    val vlmaxInVd                      = GenVLMAX(Mux(lmul.asSInt > 0.S, 0.U, lmul), Mux(isIndexed(instType), sew(1, 0), eew(1, 0))) // element number in a vd
229    instMicroOp.vaddr                 := io.in.bits.src_rs1(VAddrBits - 1, 0)
230    instMicroOp.valid                 := true.B // if is first uop
231    instMicroOp.alignedType           := Mux(isIndexed(instType), sew(1, 0), eew(1, 0))
232    instMicroOp.uop                   := io.in.bits.uop
233    instMicroOp.mask                  := srcMask
234    instMicroOp.vstart                := 0.U
235    instMicroOp.vlmaxInVd             := vlmaxInVd
236    instMicroOp.vlmaxMaskInVd         := UIntToMask(vlmaxInVd, elemIdxBits) // for merge data
237    instMicroOp.vl                    := io.in.bits.src_vl.asTypeOf(VConfig()).vl
238    segmentOffset                     := 0.U
239  }
240  // latch data
241  when(io.in.fire){
242    data(enqPtr.value)                := io.in.bits.src_vs3
243    stride(enqPtr.value)              := io.in.bits.src_stride
244    uopIdx(enqPtr.value)              := io.in.bits.uop.vpu.vuopIdx
245    pdest(enqPtr.value)               := io.in.bits.uop.pdest
246  }
247
248  // update enqptr, only 1 port
249  when(io.in.fire){
250    enqPtr                            := enqPtr + 1.U
251  }
252
253  /*************************************************************************
254   *                            output logic
255   *************************************************************************/
256
257  val indexStride                     = IndexAddr( // index for indexed instruction
258                                                    index = stride(stridePtr.value),
259                                                    flow_inner_idx = issueIndexIdx,
260                                                    eew = issueEew
261                                                  )
262  val realSegmentOffset               = Mux(isIndexed(issueInstType),
263                                            indexStride,
264                                            segmentOffset)
265  val vaddr                           = baseVaddr + (fieldIdx << alignedType).asUInt + realSegmentOffset
266  /**
267   * tlb req and tlb resq
268   */
269
270  // query DTLB IO Assign
271  io.dtlb.req                         := DontCare
272  io.dtlb.resp.ready                  := true.B
273  io.dtlb.req.valid                   := state === s_tlb_req && segmentActive
274  io.dtlb.req.bits.cmd                := Mux(FuType.isVLoad(fuType), TlbCmd.read, TlbCmd.write)
275  io.dtlb.req.bits.vaddr              := vaddr
276  io.dtlb.req.bits.size               := instMicroOp.alignedType(2,0)
277  io.dtlb.req.bits.memidx.is_ld       := FuType.isVLoad(fuType)
278  io.dtlb.req.bits.memidx.is_st       := FuType.isVStore(fuType)
279  io.dtlb.req.bits.debug.robIdx       := instMicroOp.uop.robIdx
280  io.dtlb.req.bits.no_translate       := false.B
281  io.dtlb.req.bits.debug.pc           := instMicroOp.uop.pc
282  io.dtlb.req.bits.debug.isFirstIssue := DontCare
283  io.dtlb.req_kill                    := false.B
284
285  // tlb resp
286  when(io.dtlb.resp.fire && state === s_wait_tlb_resp){
287      exceptionVec(storePageFault)    := io.dtlb.resp.bits.excp(0).pf.st
288      exceptionVec(loadPageFault)     := io.dtlb.resp.bits.excp(0).pf.ld
289      exceptionVec(storeAccessFault)  := io.dtlb.resp.bits.excp(0).af.st
290      exceptionVec(loadAccessFault)   := io.dtlb.resp.bits.excp(0).af.ld
291      when(!io.dtlb.resp.bits.miss){
292        instMicroOp.paddr             := io.dtlb.resp.bits.paddr(0)
293      }
294  }
295  // pmp
296  // NOTE: only handle load/store exception here, if other exception happens, don't send here
297  val pmp = WireInit(io.pmpResp)
298  when(state === s_pm){
299    exception_va := exceptionVec(storePageFault) || exceptionVec(loadPageFault) ||
300    exceptionVec(storeAccessFault) || exceptionVec(loadAccessFault)
301    exception_pa := pmp.st || pmp.ld
302
303    instMicroOp.exception_pa       := exception_pa
304    instMicroOp.exception_va       := exception_va
305    // update storeAccessFault bit
306    exceptionVec(loadAccessFault)  := exceptionVec(loadAccessFault) || pmp.ld
307    exceptionVec(storeAccessFault) := exceptionVec(storeAccessFault) || pmp.st
308
309    when(exception_va || exception_pa){
310      instMicroOp.exceptionvaddr     := vaddr
311      instMicroOp.vl                 := segmentIdx // for exception
312      instMicroOp.vstart             := segmentIdx // for exception
313    }
314  }
315
316  /**
317   * flush sbuffer IO Assign
318   */
319  io.flush_sbuffer.valid           := !sbufferEmpty && (state === s_flush_sbuffer_req)
320
321
322  /**
323   * merge data for load
324   */
325  val cacheData = LookupTree(vaddr(3,0), List(
326    "b0000".U -> io.rdcache.resp.bits.data_delayed(63,    0),
327    "b0001".U -> io.rdcache.resp.bits.data_delayed(63,    8),
328    "b0010".U -> io.rdcache.resp.bits.data_delayed(63,   16),
329    "b0011".U -> io.rdcache.resp.bits.data_delayed(63,   24),
330    "b0100".U -> io.rdcache.resp.bits.data_delayed(63,   32),
331    "b0101".U -> io.rdcache.resp.bits.data_delayed(63,   40),
332    "b0110".U -> io.rdcache.resp.bits.data_delayed(63,   48),
333    "b0111".U -> io.rdcache.resp.bits.data_delayed(63,   56),
334    "b1000".U -> io.rdcache.resp.bits.data_delayed(127,  64),
335    "b1001".U -> io.rdcache.resp.bits.data_delayed(127,  72),
336    "b1010".U -> io.rdcache.resp.bits.data_delayed(127,  80),
337    "b1011".U -> io.rdcache.resp.bits.data_delayed(127,  88),
338    "b1100".U -> io.rdcache.resp.bits.data_delayed(127,  96),
339    "b1101".U -> io.rdcache.resp.bits.data_delayed(127, 104),
340    "b1110".U -> io.rdcache.resp.bits.data_delayed(127, 112),
341    "b1111".U -> io.rdcache.resp.bits.data_delayed(127, 120)
342  ))
343  val pickData  = rdataVecHelper(alignedType(1,0), cacheData)
344  val mergedData = mergeDataWithElemIdx(
345    oldData = data(splitPtr.value),
346    newData = Seq(pickData),
347    alignedType = alignedType(1,0),
348    elemIdx = Seq(elemIdxInVd),
349    valids = Seq(true.B)
350  )
351  when(state === s_latch_and_merge_data && segmentActive){
352    data(splitPtr.value) := mergedData
353  }
354  /**
355   * split data for store
356   * */
357  val splitData = genVSData(
358    data = data(splitPtr.value),
359    elemIdx = elemIdxInVd,
360    alignedType = alignedType
361  )
362  val flowData  = genVWdata(splitData, alignedType) // TODO: connect vstd, pass vector data
363  val wmask     = genVWmask(vaddr, alignedType(1, 0)) & Fill(VLENB, segmentActive)
364
365  /**
366   * rdcache req, write request don't need to query dcache, because we write element to sbuffer
367   */
368  io.rdcache.req                    := DontCare
369  io.rdcache.req.valid              := state === s_cache_req && FuType.isVLoad(fuType)
370  io.rdcache.req.bits.cmd           := MemoryOpConstants.M_XRD
371  io.rdcache.req.bits.vaddr         := vaddr
372  io.rdcache.req.bits.mask          := mask
373  io.rdcache.req.bits.data          := flowData
374  io.rdcache.pf_source              := LOAD_SOURCE.U
375  io.rdcache.req.bits.id            := DontCare
376  io.rdcache.resp.ready             := true.B
377  io.rdcache.s1_paddr_dup_lsu       := instMicroOp.paddr
378  io.rdcache.s1_paddr_dup_dcache    := instMicroOp.paddr
379  io.rdcache.s1_kill                := false.B
380  io.rdcache.s2_kill                := false.B
381  if (env.FPGAPlatform){
382    io.rdcache.s0_pc                := DontCare
383    io.rdcache.s1_pc                := DontCare
384    io.rdcache.s2_pc                := DontCare
385  }else{
386    io.rdcache.s0_pc                := instMicroOp.uop.pc
387    io.rdcache.s1_pc                := instMicroOp.uop.pc
388    io.rdcache.s2_pc                := instMicroOp.uop.pc
389  }
390  io.rdcache.replacementUpdated     := false.B
391  io.rdcache.is128Req               := false.B
392
393
394  /**
395   * write data to sbuffer
396   * */
397
398  io.sbuffer.bits                  := DontCare
399  io.sbuffer.valid                 := state === s_send_data && segmentActive
400  io.sbuffer.bits.vecValid         := state === s_send_data && segmentActive
401  io.sbuffer.bits.mask             := wmask
402  io.sbuffer.bits.data             := flowData
403  io.sbuffer.bits.vaddr            := vaddr
404  io.sbuffer.bits.cmd              := MemoryOpConstants.M_XWR
405  io.sbuffer.bits.id               := DontCare
406  io.sbuffer.bits.addr             := instMicroOp.paddr
407
408  /**
409   * update ptr
410   * */
411
412  val splitPtrOffset = Mux(lmul.asSInt < 0.S, 1.U, (1.U << lmul).asUInt)
413  splitPtrNext :=
414    Mux(fieldIdx === maxNfields,
415     (deqPtr + ((segmentIdx +& 1.U) >> issueVLMAXLog2).asUInt), // segment finish
416     (splitPtr + splitPtrOffset)) // next field
417  dontTouch(issueVLMAXLog2)
418  dontTouch(splitPtrNext)
419  dontTouch(stridePtr)
420
421  // update splitPtr
422  when(state === s_latch_and_merge_data || state === s_send_data){
423    splitPtr := splitPtrNext
424  }.elsewhen(io.in.fire && !instMicroOp.valid){
425    splitPtr := deqPtr // initial splitPtr
426  }
427
428  // update stridePtr, only use in index
429  val strideOffset = Mux(isIndexed(issueInstType), (segmentIdx +& 1.U) >> issueMaxIdxInIndexLog2, 0.U)
430  stridePtr       := deqPtr + strideOffset
431
432  // update fieldIdx
433  when(io.in.fire && !instMicroOp.valid){
434    fieldIdx := 0.U
435  }.elsewhen(fieldIdx === maxNfields && (state === s_latch_and_merge_data || state === s_send_data)){
436    fieldIdx := 0.U
437  }.elsewhen((state === s_latch_and_merge_data || state === s_send_data)){
438    fieldIdx := fieldIdx + 1.U
439  }.elsewhen(!segmentActive){ // if segment is inactive, pass segment
440    fieldIdx := maxNfields
441  }
442  //update segmentIdx
443  when(io.in.fire && !instMicroOp.valid){
444    segmentIdx := 0.U
445  }.elsewhen(fieldIdx === maxNfields && (state === s_latch_and_merge_data || state === s_send_data) && segmentIdx =/= maxSegIdx){
446    segmentIdx := segmentIdx + 1.U
447  }
448
449  //update segmentOffset
450  when(fieldIdx === maxNfields && (state === s_latch_and_merge_data || state === s_send_data)){
451    segmentOffset := segmentOffset + Mux(isUnitStride(issueInstType), (maxNfields +& 1.U) << issueEew, stride(stridePtr.value))
452  }
453
454  //update deqPtr
455  when(io.uopwriteback.fire){
456    deqPtr := deqPtr + 1.U
457  }
458
459  /*************************************************************************
460   *                            dequeue logic
461   *************************************************************************/
462  val uopIdxInField = GenUopIdxInField(instType, issueEmul, issueLmul, uopIdx(deqPtr.value))
463  val vdIdxInField  = GenVdIdxInField(instType, issueEmul, issueLmul, uopIdxInField) // for merge oldvd
464  /*select mask of vd, maybe remove in feature*/
465  val realEw        = Mux(isIndexed(issueInstType), issueSew(1, 0), issueEew(1, 0))
466  val maskDataVec: Vec[UInt] = VecDataToMaskDataVec(instMicroOp.mask, realEw)
467  val maskUsed      = maskDataVec(vdIdxInField)
468
469  when(stateNext === s_idle){
470    instMicroOp.valid := false.B
471  }
472  io.uopwriteback.valid               := (state === s_finish) && distanceBetween(enqPtr, deqPtr) =/= 0.U
473  io.uopwriteback.bits.uop            := instMicroOp.uop
474  io.uopwriteback.bits.mask.get       := instMicroOp.mask
475  io.uopwriteback.bits.data           := data(deqPtr.value)
476  io.uopwriteback.bits.vdIdx.get      := vdIdxInField
477  io.uopwriteback.bits.uop.vpu.vl     := instMicroOp.vl
478  io.uopwriteback.bits.uop.vpu.vstart := instMicroOp.vstart
479  io.uopwriteback.bits.uop.vpu.vmask  := maskUsed
480  io.uopwriteback.bits.uop.pdest      := pdest(deqPtr.value)
481  io.uopwriteback.bits.debug          := DontCare
482  io.uopwriteback.bits.vdIdxInField.get := DontCare
483
484  //to RS
485  io.feedback.valid                   := state === s_finish
486  io.feedback.bits.hit                := true.B
487  io.feedback.bits.robIdx             := instMicroOp.uop.robIdx
488  io.feedback.bits.sourceType         := DontCare
489  io.feedback.bits.flushState         := DontCare
490  io.feedback.bits.dataInvalidSqIdx   := DontCare
491  io.feedback.bits.uopIdx.get         := uopIdx(deqPtr.value)
492
493  // exception
494  io.exceptionAddr                    := DontCare // TODO: fix it when handle exception
495}
496
497