xref: /XiangShan/src/main/scala/xiangshan/backend/fu/wrapper/VFALU.scala (revision bdda74fd3971d424f2297306eb935f30024d55b3)
1package xiangshan.backend.fu.wrapper
2
3import chipsalliance.rocketchip.config.Parameters
4import chisel3._
5import chisel3.util._
6import utils.XSError
7import xiangshan.backend.fu.FuConfig
8import xiangshan.backend.fu.vector.Bundles.VSew
9import xiangshan.backend.fu.vector.utils.VecDataSplitModule
10import xiangshan.backend.fu.vector.{Mgu, VecInfo, VecPipedFuncUnit}
11import yunsuan.{VfaluType, VfpuType}
12import yunsuan.vector.VectorFloatAdder
13
14class VFAlu(cfg: FuConfig)(implicit p: Parameters) extends VecPipedFuncUnit(cfg) {
15  XSError(io.in.valid && io.in.bits.ctrl.fuOpType === VfpuType.dummy, "Vfalu OpType not supported")
16
17  // params alias
18  private val dataWidth = cfg.dataBits
19  private val dataWidthOfDataModule = 64
20  private val numVecModule = dataWidth / dataWidthOfDataModule
21
22  // io alias
23  private val opcode  = fuOpType(4,0)
24  private val resWiden  = fuOpType(5)
25  private val opbWiden  = fuOpType(6)
26
27  // modules
28  private val vfalus = Seq.fill(numVecModule)(Module(new VectorFloatAdder))
29  private val vs2Split = Module(new VecDataSplitModule(dataWidth, dataWidthOfDataModule))
30  private val vs1Split = Module(new VecDataSplitModule(dataWidth, dataWidthOfDataModule))
31  private val oldVdSplit  = Module(new VecDataSplitModule(dataWidth, dataWidthOfDataModule))
32  private val mgu = Module(new Mgu(dataWidth))
33
34  /**
35    * In connection of [[vs2Split]], [[vs1Split]] and [[oldVdSplit]]
36    */
37  vs2Split.io.inVecData := vs2
38  vs1Split.io.inVecData := vs1
39  oldVdSplit.io.inVecData := oldVd
40
41  /**
42    * [[vfalus]]'s in connection
43    */
44  // Vec(vs2(31,0), vs2(63,32), vs2(95,64), vs2(127,96)) ==>
45  // Vec(
46  //   Cat(vs2(95,64),  vs2(31,0)),
47  //   Cat(vs2(127,96), vs2(63,32)),
48  // )
49  private val vs2GroupedVec: Vec[UInt] = VecInit(vs2Split.io.outVec32b.zipWithIndex.groupBy(_._2 % 2).map(x => x._1 -> x._2.map(_._1)).values.map(x => Cat(x.reverse)).toSeq)
50  private val vs1GroupedVec: Vec[UInt] = VecInit(vs1Split.io.outVec32b.zipWithIndex.groupBy(_._2 % 2).map(x => x._1 -> x._2.map(_._1)).values.map(x => Cat(x.reverse)).toSeq)
51  private val resultData = Wire(Vec(numVecModule,UInt(dataWidthOfDataModule.W)))
52  private val fflagsData = Wire(Vec(numVecModule,UInt(20.W)))
53  private val srcMaskRShift = Wire(UInt((4 * numVecModule).W))
54
55  def genMaskForMerge(inmask:UInt, sew:UInt, i:Int): UInt = {
56    val f64MaskNum = dataWidth / 64
57    val f32MaskNum = dataWidth / 32
58    val f16MaskNum = dataWidth / 16
59    val f64Mask = inmask(f64MaskNum-1,0)
60    val f32Mask = inmask(f32MaskNum-1,0)
61    val f16Mask = inmask(f16MaskNum-1,0)
62    val f64MaskI = Cat(0.U(3.W),f64Mask(i))
63    val f32MaskI = Cat(0.U(2.W),f32Mask(2*i+1,2*i))
64    val f16MaskI = f16Mask(4*i+3,4*i)
65    val outMask = Mux1H(
66      Seq(
67        (sew === 3.U) -> f64MaskI,
68        (sew === 2.U) -> f32MaskI,
69        (sew === 1.U) -> f16MaskI,
70      )
71    )
72    outMask
73  }
74  srcMaskRShift := (srcMask >> (vecCtrl.vuopIdx * (16.U >> vecCtrl.vsew)))(4 * numVecModule - 1, 0)
75  val fp_aIsFpCanonicalNAN = Wire(Vec(numVecModule,Bool()))
76  val fp_bIsFpCanonicalNAN = Wire(Vec(numVecModule,Bool()))
77  vfalus.zipWithIndex.foreach {
78    case (mod, i) =>
79      mod.io.fp_a         := Mux(opbWiden, vs1Split.io.outVec64b(i), vs2Split.io.outVec64b(i))  // very dirty TODO
80      mod.io.fp_b         := Mux(opbWiden, vs2Split.io.outVec64b(i), vs1Split.io.outVec64b(i))  // very dirty TODO
81      mod.io.widen_a      := Cat(vs2Split.io.outVec32b(i+numVecModule), vs2Split.io.outVec32b(i))
82      mod.io.widen_b      := Cat(vs1Split.io.outVec32b(i+numVecModule), vs1Split.io.outVec32b(i))
83      mod.io.frs1         := 0.U     // already vf -> vv
84      mod.io.is_frs1      := false.B // already vf -> vv
85      mod.io.mask         := genMaskForMerge(inmask = srcMaskRShift, sew = vsew, i = i)
86      mod.io.uop_idx      := vuopIdx(0)
87      mod.io.is_vec       := true.B // Todo
88      mod.io.round_mode   := frm
89      mod.io.fp_format    := Mux(resWiden, vsew + 1.U, vsew)
90      mod.io.opb_widening := opbWiden
91      mod.io.res_widening := resWiden
92      mod.io.op_code      := opcode
93      resultData(i)       := mod.io.fp_result
94      fflagsData(i)       := mod.io.fflags
95      fp_aIsFpCanonicalNAN(i) := vecCtrl.fpu.isFpToVecInst & (
96          ((vsew === VSew.e32) & (!vs2Split.io.outVec64b(i).head(32).andR)) |
97          ((vsew === VSew.e16) & (!vs2Split.io.outVec64b(i).head(48).andR))
98        )
99      fp_bIsFpCanonicalNAN(i) := vecCtrl.fpu.isFpToVecInst & (
100          ((vsew === VSew.e32) & (!vs1Split.io.outVec64b(i).head(32).andR)) |
101          ((vsew === VSew.e16) & (!vs1Split.io.outVec64b(i).head(48).andR))
102        )
103      mod.io.fp_aIsFpCanonicalNAN := fp_aIsFpCanonicalNAN(i)
104      mod.io.fp_bIsFpCanonicalNAN := fp_bIsFpCanonicalNAN(i)
105  }
106  val resultDataUInt = resultData.asUInt
107  val cmpResultWidth = dataWidth / 16
108  val cmpResult = Wire(Vec(cmpResultWidth, Bool()))
109  for (i <- 0 until cmpResultWidth) {
110    if(i == 0) {
111      cmpResult(i) := resultDataUInt(0)
112    }
113    else if(i < dataWidth / 64) {
114      cmpResult(i) := Mux1H(
115        Seq(
116          (outVecCtrl.vsew === 1.U) -> resultDataUInt(i*16),
117          (outVecCtrl.vsew === 2.U) -> resultDataUInt(i*32),
118          (outVecCtrl.vsew === 3.U) -> resultDataUInt(i*64)
119        )
120      )
121    }
122    else if(i < dataWidth / 32) {
123      cmpResult(i) := Mux1H(
124        Seq(
125          (outVecCtrl.vsew === 1.U) -> resultDataUInt(i * 16),
126          (outVecCtrl.vsew === 2.U) -> resultDataUInt(i * 32),
127          (outVecCtrl.vsew === 3.U) -> false.B
128        )
129      )
130    }
131    else if(i <  dataWidth / 16) {
132      cmpResult(i) := Mux(outVecCtrl.vsew === 1.U, resultDataUInt(i*16), false.B)
133    }
134  }
135
136  val outEew = Mux(RegNext(resWiden), outVecCtrl.vsew + 1.U, outVecCtrl.vsew)
137  val outVuopidx = outVecCtrl.vuopIdx(2, 0)
138  val vlMax = ((VLEN/8).U >> outEew).asUInt
139  val lmulAbs = Mux(outVecCtrl.vlmul(2), (~outVecCtrl.vlmul(1,0)).asUInt + 1.U, outVecCtrl.vlmul(1,0))
140  val outVlFix = Mux(outVecCtrl.fpu.isFpToVecInst, 1.U, outVl)
141  val vlMaxAllUop = Wire(outVl.cloneType)
142  vlMaxAllUop := Mux(outVecCtrl.vlmul(2), vlMax >> lmulAbs, vlMax << lmulAbs).asUInt
143  val vlMaxThisUop = Mux(outVecCtrl.vlmul(2), vlMax >> lmulAbs, vlMax).asUInt
144  val vlSetThisUop = Mux(outVlFix > outVuopidx*vlMaxThisUop, outVlFix - outVuopidx*vlMaxThisUop, 0.U)
145  val vlThisUop = Wire(UInt(3.W))
146  vlThisUop := Mux(vlSetThisUop < vlMaxThisUop, vlSetThisUop, vlMaxThisUop)
147  val vlMaskRShift = Wire(UInt((4 * numVecModule).W))
148  vlMaskRShift := Fill(4 * numVecModule, 1.U(1.W)) >> ((4 * numVecModule).U - vlThisUop)
149
150  private val needNoMask = (outCtrl.fuOpType === VfaluType.vfmerge) || outVecCtrl.fpu.isFpToVecInst
151  val maskToMgu = Mux(needNoMask, allMaskTrue, outSrcMask)
152  val allFFlagsEn = Wire(Vec(4*numVecModule,Bool()))
153  val outSrcMaskRShift = Wire(UInt((4*numVecModule).W))
154  outSrcMaskRShift := (maskToMgu >> (outVecCtrl.vuopIdx(2,0) * vlMax))(4*numVecModule-1,0)
155  val f16FFlagsEn = outSrcMaskRShift
156  val f32FFlagsEn = Wire(Vec(numVecModule,UInt(4.W)))
157  for (i <- 0 until numVecModule){
158    f32FFlagsEn(i) := Cat(Fill(2, 1.U),outSrcMaskRShift(2*i+1,2*i))
159  }
160  val f64FFlagsEn = Wire(Vec(numVecModule, UInt(4.W)))
161  for (i <- 0 until numVecModule) {
162    f64FFlagsEn(i) := Cat(Fill(3, 1.U), outSrcMaskRShift(i))
163  }
164  val fflagsEn= Mux1H(
165    Seq(
166      (outEew === 1.U) -> f16FFlagsEn.asUInt,
167      (outEew === 2.U) -> f32FFlagsEn.asUInt,
168      (outEew === 3.U) -> f64FFlagsEn.asUInt
169    )
170  )
171  allFFlagsEn := (fflagsEn & vlMaskRShift).asTypeOf(allFFlagsEn)
172
173  val allFFlags = fflagsData.asTypeOf(Vec(4*numVecModule,UInt(5.W)))
174  val outFFlags = allFFlagsEn.zip(allFFlags).map{
175    case(en,fflags) => Mux(en, fflags, 0.U(5.W))
176  }.reduce(_ | _)
177  io.out.bits.res.fflags.get := outFFlags
178
179
180  val cmpResultOldVd = Wire(UInt(cmpResultWidth.W))
181  cmpResultOldVd := (outOldVd >> (outVecCtrl.vuopIdx * (16.U >> outVecCtrl.vsew)))(4*numVecModule-1,0)
182  val cmpResultForMgu = Wire(Vec(cmpResultWidth, Bool()))
183  for (i <- 0 until cmpResultWidth) {
184    cmpResultForMgu(i) := Mux(outSrcMaskRShift(i), cmpResult(i), Mux(outVecCtrl.vma, true.B, cmpResultOldVd(i)))
185  }
186
187  mgu.io.in.vd := Mux(outVecCtrl.isDstMask, Cat(0.U((dataWidth / 16 * 15).W), cmpResultForMgu.asUInt), resultDataUInt)
188  mgu.io.in.oldVd := outOldVd
189  mgu.io.in.mask := maskToMgu
190  mgu.io.in.info.ta := outVecCtrl.vta
191  mgu.io.in.info.ma := outVecCtrl.vma
192  mgu.io.in.info.vl := outVlFix
193  mgu.io.in.info.vstart := outVecCtrl.vstart
194  mgu.io.in.info.eew := outEew
195  mgu.io.in.info.vdIdx := outVecCtrl.vuopIdx
196  mgu.io.in.info.narrow := outVecCtrl.isNarrow
197  mgu.io.in.info.dstMask := outVecCtrl.isDstMask
198  val resultFpMask = Wire(UInt(VLEN.W))
199  val isFclass = outVecCtrl.fpu.isFpToVecInst && (outCtrl.fuOpType === VfaluType.vfclass)
200  val fpCmpFuOpType = Seq(VfaluType.vfeq, VfaluType.vflt, VfaluType.vfle)
201  val isCmp = outVecCtrl.fpu.isFpToVecInst && (fpCmpFuOpType.map(_ === outCtrl.fuOpType).reduce(_|_))
202  resultFpMask := Mux(isFclass || isCmp, Fill(16, 1.U(1.W)), Fill(VLEN, 1.U(1.W)))
203  io.out.bits.res.data := mgu.io.out.vd & resultFpMask
204
205}
206
207class VFMgu(vlen:Int)(implicit p: Parameters) extends Module{
208  val io = IO(new VFMguIO(vlen))
209
210  val vd = io.in.vd
211  val oldvd = io.in.oldVd
212  val mask = io.in.mask
213  val vsew = io.in.info.eew
214  val num16bits = vlen / 16
215
216}
217
218class VFMguIO(vlen: Int)(implicit p: Parameters) extends Bundle {
219  val in = new Bundle {
220    val vd = Input(UInt(vlen.W))
221    val oldVd = Input(UInt(vlen.W))
222    val mask = Input(UInt(vlen.W))
223    val info = Input(new VecInfo)
224  }
225  val out = new Bundle {
226    val vd = Output(UInt(vlen.W))
227  }
228}