xref: /XiangShan/src/main/scala/xiangshan/mem/vector/VSegmentUnit.scala (revision 88afa79dd5e45b8c505e64170ddb3c3632710a3a)
1/***************************************************************************************
2  * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3  * Copyright (c) 2020-2021 Peng Cheng Laboratory
4  *
5  * XiangShan is licensed under Mulan PSL v2.
6  * You can use this software according to the terms and conditions of the Mulan PSL v2.
7  * You may obtain a copy of Mulan PSL v2 at:
8  *          http://license.coscl.org.cn/MulanPSL2
9  *
10  * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11  * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12  * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13  *
14  * See the Mulan PSL v2 for more details.
15  ***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.rob.RobPtr
26import xiangshan.backend.Bundles._
27import xiangshan.mem._
28import xiangshan.backend.fu.FuType
29import freechips.rocketchip.diplomacy.BufferParams
30import xiangshan.cache.mmu._
31import xiangshan.cache._
32import xiangshan.cache.wpu.ReplayCarry
33import xiangshan.backend.fu.util.SdtrigExt
34import xiangshan.ExceptionNO._
35import xiangshan.backend.fu.vector.Bundles.VConfig
36import xiangshan.backend.fu.vector.Utils.VecDataToMaskDataVec
37
38class VSegmentBundle(implicit p: Parameters) extends VLSUBundle
39{
40  val vaddr            = UInt(VAddrBits.W)
41  val uop              = new DynInst
42  val paddr            = UInt(PAddrBits.W)
43  val mask             = UInt(VLEN.W)
44  val valid            = Bool()
45  val alignedType      = UInt(alignTypeBits.W)
46  val vl               = UInt(elemIdxBits.W)
47  val vlmaxInVd        = UInt(elemIdxBits.W)
48  val vlmaxMaskInVd    = UInt(elemIdxBits.W)
49  // for exception
50  val vstart           = UInt(elemIdxBits.W)
51  val exceptionvaddr   = UInt(VAddrBits.W)
52  val exception_va     = Bool()
53  val exception_pa     = Bool()
54}
55
56class VSegmentUnit (implicit p: Parameters) extends VLSUModule
57  with HasDCacheParameters
58  with MemoryOpConstants
59  with SdtrigExt
60  with HasLoadHelper
61{
62  val io               = IO(new VSegmentUnitIO)
63
64  val maxSize          = VSegmentBufferSize
65
66  class VSegUPtr(implicit p: Parameters) extends CircularQueuePtr[VSegUPtr](maxSize){
67  }
68
69  object VSegUPtr {
70    def apply(f: Bool, v: UInt)(implicit p: Parameters): VSegUPtr = {
71      val ptr           = Wire(new VSegUPtr)
72      ptr.flag         := f
73      ptr.value        := v
74      ptr
75    }
76  }
77
78  // buffer uop
79  val instMicroOp       = Reg(new VSegmentBundle)
80  val data              = Reg(Vec(maxSize, UInt(VLEN.W)))
81  val pdest             = Reg(Vec(maxSize, UInt(PhyRegIdxWidth.W)))
82  val uopIdx            = Reg(Vec(maxSize, UopIdx()))
83  val stride            = Reg(Vec(maxSize, UInt(VLEN.W)))
84  val allocated         = RegInit(VecInit(Seq.fill(maxSize)(false.B)))
85  val enqPtr            = RegInit(0.U.asTypeOf(new VSegUPtr))
86  val deqPtr            = RegInit(0.U.asTypeOf(new VSegUPtr))
87  val stridePtr         = WireInit(0.U.asTypeOf(new VSegUPtr)) // for select stride/index
88
89  val segmentIdx        = RegInit(0.U(elemIdxBits.W))
90  val fieldIdx          = RegInit(0.U(fieldBits.W))
91  val segmentOffset     = RegInit(0.U(VAddrBits.W))
92  val splitPtr          = RegInit(0.U.asTypeOf(new VSegUPtr)) // for select load/store data
93  val splitPtrNext      = WireInit(0.U.asTypeOf(new VSegUPtr))
94
95  val exception_va      = WireInit(false.B)
96  val exception_pa      = WireInit(false.B)
97
98  val maxSegIdx         = instMicroOp.vl - 1.U
99  val maxNfields        = instMicroOp.uop.vpu.nf
100
101  XSError(segmentIdx > maxSegIdx, s"segmentIdx > vl, something error!\n")
102  XSError(fieldIdx > maxNfields, s"fieldIdx > nfields, something error!\n")
103
104  // MicroOp
105  val baseVaddr                       = instMicroOp.vaddr
106  val alignedType                     = instMicroOp.alignedType
107  val fuType                          = instMicroOp.uop.fuType
108  val mask                            = instMicroOp.mask
109  val exceptionVec                    = instMicroOp.uop.exceptionVec
110  val issueEew                        = instMicroOp.uop.vpu.veew
111  val issueLmul                       = instMicroOp.uop.vpu.vtype.vlmul
112  val issueSew                        = instMicroOp.uop.vpu.vtype.vsew
113  val issueEmul                       = EewLog2(issueEew) - issueSew + issueLmul
114  val elemIdxInVd                     = segmentIdx & instMicroOp.vlmaxMaskInVd
115  val issueInstType                   = Cat(true.B, instMicroOp.uop.fuOpType(6, 5)) // always segment instruction
116  val issueVLMAXLog2                  = GenVLMAXLog2(
117    Mux(issueLmul.asSInt > 0.S, 0.U, issueLmul),
118    Mux(isIndexed(issueInstType), issueSew(1, 0), issueEew(1, 0))
119  ) // max element number log2 in vd
120  val issueVlMax                      = instMicroOp.vlmaxInVd // max elementIdx in vd
121  val issueMaxIdxInIndex              = GenVLMAX(Mux(issueEmul.asSInt > 0.S, 0.U, issueEmul), issueEew) // index element index in index register
122  val issueMaxIdxInIndexMask          = UIntToMask(issueMaxIdxInIndex, elemIdxBits)
123  val issueMaxIdxInIndexLog2          = GenVLMAXLog2(Mux(issueEmul.asSInt > 0.S, 0.U, issueEmul), issueEew)
124  val issueIndexIdx                   = segmentIdx & issueMaxIdxInIndexMask
125  val segmentActive                   = (mask & UIntToOH(elemIdxInVd)).orR
126
127  // Segment instruction's FSM
128  /*
129  * s_idle: wait request
130  * s_flush_sbuffer_req: flush sbuffer
131  * s_wait_flush_sbuffer_resp: wait sbuffer empty
132  * s_tlb_req: request tlb
133  * s_wait_tlb_resp: wait tlb resp
134  * s_pm: check pmp
135  * s_cache_req: request cache
136  * s_cache_resp: wait cache resp
137  * s_latch_and_merge_data: for read data
138  * s_send_data: for send write data
139  * s_finish:
140  * */
141  val s_idle :: s_flush_sbuffer_req :: s_wait_flush_sbuffer_resp :: s_tlb_req :: s_wait_tlb_resp :: s_pm ::s_cache_req :: s_cache_resp :: s_latch_and_merge_data :: s_send_data :: s_finish :: Nil = Enum(11)
142  val state             = RegInit(s_idle)
143  val stateNext         = WireInit(s_idle)
144  val sbufferEmpty      = io.flush_sbuffer.empty
145
146  /**
147   * state update
148   */
149  state  := stateNext
150
151  /**
152   * state transfer
153   */
154  when(state === s_idle){
155    stateNext := Mux(isAfter(enqPtr, deqPtr), s_flush_sbuffer_req, s_idle)
156  }.elsewhen(state === s_flush_sbuffer_req){
157    stateNext := Mux(sbufferEmpty, s_tlb_req, s_wait_flush_sbuffer_resp) // if sbuffer is empty, go to query tlb
158
159  }.elsewhen(state === s_wait_flush_sbuffer_resp){
160    stateNext := Mux(sbufferEmpty, s_tlb_req, s_wait_flush_sbuffer_resp)
161
162  }.elsewhen(state === s_tlb_req){
163    stateNext := Mux(segmentActive, s_wait_tlb_resp, Mux(FuType.isVLoad(instMicroOp.uop.fuType), s_latch_and_merge_data, s_send_data))
164
165  }.elsewhen(state === s_wait_tlb_resp){
166    stateNext := Mux(!io.dtlb.resp.bits.miss && io.dtlb.resp.fire, s_pm, s_tlb_req)
167
168  }.elsewhen(state === s_pm){
169    stateNext := Mux(exception_pa || exception_va, s_finish, s_cache_req)
170
171  }.elsewhen(state === s_cache_req){
172    stateNext := Mux(io.wdcache.req.fire || io.rdcache.req.fire, s_cache_resp, s_cache_req)
173
174  }.elsewhen(state === s_cache_resp){
175    when(io.wdcache.resp.fire || io.rdcache.resp.fire) {
176      when(io.wdcache.resp.bits.miss && io.rdcache.resp.bits.miss) {
177        stateNext := s_cache_req
178      }.otherwise {
179        stateNext := Mux(FuType.isVLoad(instMicroOp.uop.fuType), s_latch_and_merge_data, s_send_data)
180      }
181    }.otherwise{
182      stateNext := s_cache_resp
183    }
184
185  }.elsewhen(state === s_latch_and_merge_data) {
186    when((segmentIdx === maxSegIdx) && (fieldIdx === maxNfields)) {
187      stateNext := s_finish // segment instruction finish
188    }.otherwise {
189      stateNext := s_tlb_req // need continue
190    }
191
192  }.elsewhen(state === s_send_data) { // when sbuffer accept data
193    when(!io.sbuffer.fire) {
194      stateNext := s_send_data
195    }.elsewhen((segmentIdx === maxSegIdx) && (fieldIdx === maxNfields)) {
196      stateNext := s_finish // segment instruction finish
197    }.otherwise {
198      stateNext := s_tlb_req // need continue
199    }
200  }.elsewhen(state === s_finish){ // writeback uop
201    stateNext := Mux(distanceBetween(enqPtr, deqPtr) === 0.U, s_idle, s_finish)
202
203  }.otherwise{
204    stateNext := s_idle
205    XSError(true.B, s"Unknown state!\n")
206  }
207
208  /*************************************************************************
209   *                            enqueue logic
210   *************************************************************************/
211  io.in.ready                         := true.B
212  val fuOpType                         = io.in.bits.uop.fuOpType
213  val vtype                            = io.in.bits.uop.vpu.vtype
214  val mop                              = fuOpType(6, 5)
215  val instType                         = Cat(true.B, mop)
216  val eew                              = io.in.bits.uop.vpu.veew
217  val sew                              = vtype.vsew
218  val lmul                             = vtype.vlmul
219  val vl                               = instMicroOp.vl
220  val vm                               = instMicroOp.uop.vpu.vm
221  val vstart                           = instMicroOp.uop.vpu.vstart
222  val srcMask                          = GenFlowMask(Mux(vm, Fill(VLEN, 1.U(1.W)), io.in.bits.src_mask), vstart, vl, true)
223  // first uop enqueue, we need to latch microOp of segment instruction
224  when(io.in.fire && !instMicroOp.valid){
225    val vlmaxInVd                      = GenVLMAX(Mux(lmul.asSInt > 0.S, 0.U, lmul), Mux(isIndexed(instType), sew(1, 0), eew(1, 0))) // element number in a vd
226    instMicroOp.vaddr                 := io.in.bits.src_rs1(VAddrBits - 1, 0)
227    instMicroOp.valid                 := true.B // if is first uop
228    instMicroOp.alignedType           := Mux(isIndexed(instType), sew(1, 0), eew(1, 0))
229    instMicroOp.uop                   := io.in.bits.uop
230    instMicroOp.mask                  := srcMask
231    instMicroOp.vstart                := 0.U
232    instMicroOp.vlmaxInVd             := vlmaxInVd
233    instMicroOp.vlmaxMaskInVd         := UIntToMask(vlmaxInVd, elemIdxBits) // for merge data
234    instMicroOp.vl                    := io.in.bits.src_vl.asTypeOf(VConfig()).vl
235    segmentOffset                     := 0.U
236  }
237  // latch data
238  when(io.in.fire){
239    data(enqPtr.value)                := io.in.bits.src_vs3
240    stride(enqPtr.value)              := io.in.bits.src_stride
241    uopIdx(enqPtr.value)              := io.in.bits.uop.vpu.vuopIdx
242    pdest(enqPtr.value)               := io.in.bits.uop.pdest
243  }
244
245  // update enqptr, only 1 port
246  when(io.in.fire){
247    enqPtr                            := enqPtr + 1.U
248  }
249
250  /*************************************************************************
251   *                            output logic
252   *************************************************************************/
253
254  val indexStride                     = IndexAddr( // index for indexed instruction
255                                                    index = stride(stridePtr.value),
256                                                    flow_inner_idx = issueIndexIdx,
257                                                    eew = issueEew
258                                                  )
259  val realSegmentOffset               = Mux(isIndexed(issueInstType),
260                                            indexStride,
261                                            segmentOffset)
262  val vaddr                           = baseVaddr + (fieldIdx << alignedType).asUInt + realSegmentOffset
263  /**
264   * tlb req and tlb resq
265   */
266
267  // query DTLB IO Assign
268  io.dtlb.req                         := DontCare
269  io.dtlb.resp.ready                  := true.B
270  io.dtlb.req.valid                   := state === s_tlb_req && segmentActive
271  io.dtlb.req.bits.cmd                := Mux(FuType.isVLoad(fuType), TlbCmd.read, TlbCmd.write)
272  io.dtlb.req.bits.vaddr              := vaddr
273  io.dtlb.req.bits.size               := instMicroOp.alignedType(2,0)
274  io.dtlb.req.bits.memidx.is_ld       := FuType.isVLoad(fuType)
275  io.dtlb.req.bits.memidx.is_st       := FuType.isVStore(fuType)
276  io.dtlb.req.bits.debug.robIdx       := instMicroOp.uop.robIdx
277  io.dtlb.req.bits.no_translate       := false.B
278  io.dtlb.req.bits.debug.pc           := instMicroOp.uop.pc
279  io.dtlb.req.bits.debug.isFirstIssue := DontCare
280  io.dtlb.req_kill                    := false.B
281
282  // tlb resp
283  when(io.dtlb.resp.fire && state === s_wait_tlb_resp){
284      exceptionVec(storePageFault)    := io.dtlb.resp.bits.excp(0).pf.st
285      exceptionVec(loadPageFault)     := io.dtlb.resp.bits.excp(0).pf.ld
286      exceptionVec(storeAccessFault)  := io.dtlb.resp.bits.excp(0).af.st
287      exceptionVec(loadAccessFault)   := io.dtlb.resp.bits.excp(0).af.ld
288      when(!io.dtlb.resp.bits.miss){
289        instMicroOp.paddr             := io.dtlb.resp.bits.paddr(0)
290      }
291  }
292  // pmp
293  // NOTE: only handle load/store exception here, if other exception happens, don't send here
294  val pmp = WireInit(io.pmpResp)
295  when(state === s_pm){
296    exception_va := exceptionVec(storePageFault) || exceptionVec(loadPageFault) ||
297    exceptionVec(storeAccessFault) || exceptionVec(loadAccessFault)
298    exception_pa := pmp.st || pmp.ld
299
300    instMicroOp.exception_pa       := exception_pa
301    instMicroOp.exception_va       := exception_va
302    // update storeAccessFault bit
303    exceptionVec(loadAccessFault)  := exceptionVec(loadAccessFault) || pmp.ld
304    exceptionVec(storeAccessFault) := exceptionVec(storeAccessFault) || pmp.st
305
306    when(exception_va || exception_pa){
307      instMicroOp.exceptionvaddr     := vaddr
308      instMicroOp.vl                 := segmentIdx // for exception
309      instMicroOp.vstart             := segmentIdx // for exception
310    }
311  }
312
313  /**
314   * flush sbuffer IO Assign
315   */
316  io.flush_sbuffer.valid           := !sbufferEmpty && (state === s_flush_sbuffer_req)
317
318
319  /**
320   * merge data for load
321   */
322  val cacheData = LookupTree(vaddr(3,0), List(
323    "b0000".U -> io.rdcache.resp.bits.data_delayed(63,    0),
324    "b0001".U -> io.rdcache.resp.bits.data_delayed(63,    8),
325    "b0010".U -> io.rdcache.resp.bits.data_delayed(63,   16),
326    "b0011".U -> io.rdcache.resp.bits.data_delayed(63,   24),
327    "b0100".U -> io.rdcache.resp.bits.data_delayed(63,   32),
328    "b0101".U -> io.rdcache.resp.bits.data_delayed(63,   40),
329    "b0110".U -> io.rdcache.resp.bits.data_delayed(63,   48),
330    "b0111".U -> io.rdcache.resp.bits.data_delayed(63,   56),
331    "b1000".U -> io.rdcache.resp.bits.data_delayed(127,  64),
332    "b1001".U -> io.rdcache.resp.bits.data_delayed(127,  72),
333    "b1010".U -> io.rdcache.resp.bits.data_delayed(127,  80),
334    "b1011".U -> io.rdcache.resp.bits.data_delayed(127,  88),
335    "b1100".U -> io.rdcache.resp.bits.data_delayed(127,  96),
336    "b1101".U -> io.rdcache.resp.bits.data_delayed(127, 104),
337    "b1110".U -> io.rdcache.resp.bits.data_delayed(127, 112),
338    "b1111".U -> io.rdcache.resp.bits.data_delayed(127, 120)
339  ))
340  val pickData  = rdataVecHelper(alignedType(1,0), cacheData)
341  val mergedData = mergeDataWithElemIdx(
342    oldData = data(splitPtr.value),
343    newData = Seq(pickData),
344    alignedType = alignedType(1,0),
345    elemIdx = Seq(elemIdxInVd),
346    valids = Seq(true.B)
347  )
348  when(state === s_latch_and_merge_data && segmentActive){
349    data(splitPtr.value) := mergedData
350  }
351  /**
352   * split data for store
353   * */
354  val splitData = genVSData(
355    data = data(splitPtr.value),
356    elemIdx = elemIdxInVd,
357    alignedType = alignedType
358  )
359  val flowData  = genVWdata(splitData, alignedType) // TODO: connect vstd, pass vector data
360  val wmask     = genVWmask(vaddr, alignedType(1, 0)) & mask(segmentIdx)
361
362  /**
363   * rdcache req
364   */
365  io.rdcache.req                    := DontCare
366  io.rdcache.req.valid              := state === s_cache_req && FuType.isVLoad(fuType)
367  io.rdcache.req.bits.cmd           := MemoryOpConstants.M_XRD
368  io.rdcache.req.bits.vaddr         := vaddr
369  io.rdcache.req.bits.mask          := mask
370  io.rdcache.req.bits.data          := flowData
371  io.rdcache.pf_source              := LOAD_SOURCE.U
372  io.rdcache.req.bits.id            := DontCare
373  io.rdcache.resp.ready             := true.B
374  io.rdcache.s1_paddr_dup_lsu       := instMicroOp.paddr
375  io.rdcache.s1_paddr_dup_dcache    := instMicroOp.paddr
376  io.rdcache.s1_kill                := false.B
377  io.rdcache.s2_kill                := false.B
378  if (env.FPGAPlatform){
379    io.rdcache.s0_pc                := DontCare
380    io.rdcache.s1_pc                := DontCare
381    io.rdcache.s2_pc                := DontCare
382  }else{
383    io.rdcache.s0_pc                := instMicroOp.uop.pc
384    io.rdcache.s1_pc                := instMicroOp.uop.pc
385    io.rdcache.s2_pc                := instMicroOp.uop.pc
386  }
387  io.rdcache.replacementUpdated     := false.B
388  io.rdcache.is128Req               := false.B
389
390  /**
391  * wdcache req
392  * */
393  io.wdcache.req                    := DontCare
394  io.wdcache.req.valid              := state === s_cache_req && FuType.isVStore(fuType)
395  io.wdcache.req.bits.cmd           := MemoryOpConstants.M_PFW
396  io.wdcache.req.bits.vaddr         := vaddr
397  io.wdcache.resp.ready             := true.B
398  io.wdcache.s1_paddr               := instMicroOp.paddr
399  io.wdcache.s1_kill                := false.B
400  io.wdcache.s2_kill                := false.B
401  io.wdcache.s2_pc                  := instMicroOp.uop.pc
402
403
404  /**
405   * write data to sbuffer
406   * */
407
408  io.sbuffer.bits                  := DontCare
409  io.sbuffer.valid                 := state === s_send_data
410  io.sbuffer.bits.mask             := wmask
411  io.sbuffer.bits.data             := flowData
412  io.sbuffer.bits.vaddr            := vaddr
413  io.sbuffer.bits.cmd              := MemoryOpConstants.M_XWR
414  io.sbuffer.bits.id               := DontCare
415  io.sbuffer.bits.addr             := instMicroOp.paddr
416
417  /**
418   * update ptr
419   * */
420
421  val splitPtrOffset = Mux(lmul.asSInt < 0.S, 1.U, (1.U << lmul).asUInt)
422  splitPtrNext :=
423    Mux(fieldIdx === maxNfields,
424     (deqPtr + ((segmentIdx +& 1.U) >> issueVLMAXLog2).asUInt), // segment finish
425     (splitPtr + splitPtrOffset)) // next field
426  dontTouch(issueVLMAXLog2)
427  dontTouch(splitPtrNext)
428  dontTouch(stridePtr)
429
430  // update splitPtr
431  when(state === s_latch_and_merge_data || state === s_send_data){
432    splitPtr := splitPtrNext
433  }.elsewhen(io.in.fire && !instMicroOp.valid){
434    splitPtr := deqPtr // initial splitPtr
435  }
436
437  // update stridePtr, only use in index
438  val strideOffset = Mux(isIndexed(issueInstType), (segmentIdx +& 1.U) >> issueMaxIdxInIndexLog2, 0.U)
439  stridePtr       := deqPtr + strideOffset
440
441  // update fieldIdx
442  when(io.in.fire && !instMicroOp.valid){
443    fieldIdx := 0.U
444  }.elsewhen(fieldIdx === maxNfields && (state === s_latch_and_merge_data || state === s_send_data)){
445    fieldIdx := 0.U
446  }.elsewhen((state === s_latch_and_merge_data || state === s_send_data)){
447    fieldIdx := fieldIdx + 1.U
448  }.elsewhen(!segmentActive){ // if segment is inactive, pass segment
449    fieldIdx := maxNfields
450  }
451  //update segmentIdx
452  when(io.in.fire && !instMicroOp.valid){
453    segmentIdx := 0.U
454  }.elsewhen(fieldIdx === maxNfields && (state === s_latch_and_merge_data || state === s_send_data) && segmentIdx =/= maxSegIdx){
455    segmentIdx := segmentIdx + 1.U
456  }
457
458  //update segmentOffset
459  when(fieldIdx === maxNfields && (state === s_latch_and_merge_data || state === s_send_data)){
460    segmentOffset := segmentOffset + Mux(isUnitStride(issueInstType), (maxNfields +& 1.U) << issueEew, stride(stridePtr.value))
461  }
462
463  //update deqPtr
464  when(io.uopwriteback.fire){
465    deqPtr := deqPtr + 1.U
466  }
467
468  /*************************************************************************
469   *                            dequeue logic
470   *************************************************************************/
471  val uopIdxInField = GenUopIdxInField(instType, issueEmul, issueLmul, uopIdx(deqPtr.value))
472  val vdIdxInField  = GenVdIdxInField(instType, issueEmul, issueLmul, uopIdxInField) // for merge oldvd
473  /*select mask of vd, maybe remove in feature*/
474  val realEw        = Mux(isIndexed(issueInstType), issueSew(1, 0), issueEew(1, 0))
475  val maskDataVec: Vec[UInt] = VecDataToMaskDataVec(instMicroOp.mask, realEw)
476  val maskUsed      = maskDataVec(vdIdxInField)
477
478  when(stateNext === s_idle){
479    instMicroOp.valid := false.B
480  }
481  io.uopwriteback.valid               := (state === s_finish) && distanceBetween(enqPtr, deqPtr) =/= 0.U
482  io.uopwriteback.bits.uop            := instMicroOp.uop
483  io.uopwriteback.bits.mask.get       := instMicroOp.mask
484  io.uopwriteback.bits.data           := data(deqPtr.value)
485  io.uopwriteback.bits.vdIdx.get      := vdIdxInField
486  io.uopwriteback.bits.uop.vpu.vl     := instMicroOp.vl
487  io.uopwriteback.bits.uop.vpu.vstart := instMicroOp.vstart
488  io.uopwriteback.bits.uop.vpu.vmask  := maskUsed
489  io.uopwriteback.bits.uop.pdest      := pdest(deqPtr.value)
490  io.uopwriteback.bits.debug          := DontCare
491  io.uopwriteback.bits.vdIdxInField.get := DontCare
492
493  //to RS
494  io.feedback.valid                   := state === s_finish
495  io.feedback.bits.hit                := true.B
496  io.feedback.bits.robIdx             := instMicroOp.uop.robIdx
497  io.feedback.bits.sourceType         := DontCare
498  io.feedback.bits.flushState         := DontCare
499  io.feedback.bits.dataInvalidSqIdx   := DontCare
500  io.feedback.bits.uopIdx.get         := uopIdx(deqPtr.value)
501
502  // exception
503  io.exceptionAddr                    := DontCare // TODO: fix it when handle exception
504}
505
506