1package xiangshan.backend.fu.wrapper 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3.{VecInit, _} 5import chisel3.util._ 6import chisel3.util.experimental.decode.{QMCMinimizer, TruthTable, decoder} 7import utility.DelayN 8import utils.XSError 9import xiangshan.XSCoreParamsKey 10import xiangshan.backend.fu.vector.Bundles.{VConfig, VSew, ma} 11import xiangshan.backend.fu.vector.{Mgu, Mgtu, VecPipedFuncUnit} 12import xiangshan.backend.fu.vector.Utils.VecDataToMaskDataVec 13import xiangshan.backend.fu.vector.utils.VecDataSplitModule 14import xiangshan.backend.fu.{FuConfig, FuType} 15import yunsuan.{OpType, VialuFixType} 16import yunsuan.vector.alu.{VIntFixpAlu64b, VIntFixpDecode, VIntFixpTable} 17import yunsuan.encoding.{VdType, Vs1IntType, Vs2IntType} 18import yunsuan.encoding.Opcode.VialuOpcode 19import yunsuan.vector.SewOH 20 21class VIAluSrcTypeIO extends Bundle { 22 val in = Input(new Bundle { 23 val fuOpType: UInt = OpType() 24 val vsew: UInt = VSew() 25 val isReverse: Bool = Bool() // vrsub, vrdiv 26 val isExt: Bool = Bool() 27 val isDstMask: Bool = Bool() // vvm, vvvm, mmm 28 val isMove: Bool = Bool() // vmv.s.x, vmv.v.v, vmv.v.x, vmv.v.i 29 }) 30 val out = Output(new Bundle { 31 val vs1Type: UInt = Vs1IntType() 32 val vs2Type: UInt = Vs2IntType() 33 val vdType: UInt = VdType() 34 val illegal: Bool = Bool() 35 val isVextF2: Bool = Bool() 36 val isVextF4: Bool = Bool() 37 val isVextF8: Bool = Bool() 38 }) 39} 40 41class VIAluSrcTypeModule extends Module { 42 val io: VIAluSrcTypeIO = IO(new VIAluSrcTypeIO) 43 44 private val vsew = io.in.vsew 45 private val isExt = io.in.isExt 46 private val isDstMask = io.in.isDstMask 47 48 private val opcode = VialuFixType.getOpcode(io.in.fuOpType) 49 private val isSign = VialuFixType.isSigned(io.in.fuOpType) 50 private val format = VialuFixType.getFormat(io.in.fuOpType) 51 52 private val vsewX2 = vsew + 1.U 53 private val vsewF2 = vsew - 1.U 54 private val vsewF4 = vsew - 2.U 55 private val vsewF8 = vsew - 3.U 56 57 private val isAddSub = opcode === VialuOpcode.vadd || opcode === VialuOpcode.vsub 58 private val isShiftRight = Seq(VialuOpcode.vsrl, VialuOpcode.vsra, VialuOpcode.vssrl, VialuOpcode.vssra).map(fmt => fmt === format).reduce(_ || _) 59 private val isVext = opcode === VialuOpcode.vext 60 61 private val isWiden = isAddSub && Seq(VialuFixType.FMT.VVW, VialuFixType.FMT.WVW).map(fmt => fmt === format).reduce(_ || _) 62 private val isNarrow = isShiftRight && format === VialuFixType.FMT.WVV 63 private val isVextF2 = isVext && format === VialuFixType.FMT.VF2 64 private val isVextF4 = isVext && format === VialuFixType.FMT.VF4 65 private val isVextF8 = isVext && format === VialuFixType.FMT.VF8 66 67 // check illegal 68 private val widenIllegal = isWiden && vsewX2 === VSew.e8 69 private val narrowIllegal = isNarrow && vsewF2 === VSew.e64 70 private val vextIllegal = (isVextF2 && (vsewF2 === VSew.e64)) || 71 (isVextF4 && (vsewF4 === VSew.e64)) || 72 (isVextF8 && (vsewF8 === VSew.e64)) 73 // Todo: use it 74 private val illegal = widenIllegal || narrowIllegal || vextIllegal 75 76 private val intType = Cat(0.U(1.W), isSign) 77 78 private class Vs2Vs1VdSew extends Bundle { 79 val vs2 = VSew() 80 val vs1 = VSew() 81 val vd = VSew() 82 } 83 84 private class Vs2Vs1VdType extends Bundle { 85 val vs2 = Vs2IntType() 86 val vs1 = Vs1IntType() 87 val vd = VdType() 88 } 89 90 private val addSubSews = Mux1H(Seq( 91 (format === VialuFixType.FMT.VVV) -> Cat(vsew, vsew, vsew), 92 (format === VialuFixType.FMT.VVW) -> Cat(vsew, vsew, vsewX2), 93 (format === VialuFixType.FMT.WVW) -> Cat(vsewX2, vsew, vsewX2), 94 (format === VialuFixType.FMT.WVV) -> Cat(vsewX2, vsew, vsew), 95 )).asTypeOf(new Vs2Vs1VdSew) 96 97 private val vextSews = Mux1H(Seq( 98 (format === VialuFixType.FMT.VF2) -> Cat(vsewF2, vsewF2, vsew), 99 (format === VialuFixType.FMT.VF4) -> Cat(vsewF4, vsewF4, vsew), 100 (format === VialuFixType.FMT.VF8) -> Cat(vsewF8, vsewF8, vsew), 101 )).asTypeOf(new Vs2Vs1VdSew) 102 103 private val maskTypes = Mux1H(Seq( 104 (format === VialuFixType.FMT.VVM) -> Cat(Cat(intType, vsew), Cat(intType, vsew), VdType.mask), 105 (format === VialuFixType.FMT.VVMM) -> Cat(Cat(intType, vsew), Cat(intType, vsew), VdType.mask), 106 (format === VialuFixType.FMT.MMM) -> Cat(Vs2IntType.mask, Vs1IntType.mask, VdType.mask), 107 )).asTypeOf(new Vs2Vs1VdType) 108 109 private val vs2Type = Mux1H(Seq( 110 isDstMask -> maskTypes.vs2, 111 isExt -> Cat(intType, vextSews.vs2), 112 (!isExt && !isDstMask) -> Cat(intType, addSubSews.vs2), 113 )) 114 private val vs1Type = Mux1H(Seq( 115 isDstMask -> maskTypes.vs1, 116 isExt -> Cat(intType, vextSews.vs1), 117 (!isExt && !isDstMask) -> Cat(intType, addSubSews.vs1), 118 )) 119 private val vdType = Mux1H(Seq( 120 isDstMask -> maskTypes.vd, 121 isExt -> Cat(intType, vextSews.vd), 122 (!isExt && !isDstMask) -> Cat(intType, addSubSews.vd), 123 )) 124 125 io.out.vs2Type := vs2Type 126 io.out.vs1Type := vs1Type 127 io.out.vdType := vdType 128 io.out.illegal := illegal 129 io.out.isVextF2 := isVextF2 130 io.out.isVextF4 := isVextF4 131 io.out.isVextF8 := isVextF8 132} 133 134class VIAluFix(cfg: FuConfig)(implicit p: Parameters) extends VecPipedFuncUnit(cfg) { 135 XSError(io.in.valid && io.in.bits.ctrl.fuOpType === VialuFixType.dummy, "VialuF OpType not supported") 136 137 // config params 138 private val dataWidth = cfg.dataBits 139 private val dataWidthOfDataModule = 64 140 private val numVecModule = dataWidth / dataWidthOfDataModule 141 142 // modules 143 private val typeMod = Module(new VIAluSrcTypeModule) 144 private val vs2Split = Module(new VecDataSplitModule(dataWidth, dataWidthOfDataModule)) 145 private val vs1Split = Module(new VecDataSplitModule(dataWidth, dataWidthOfDataModule)) 146 private val oldVdSplit = Module(new VecDataSplitModule(dataWidth, dataWidthOfDataModule)) 147 private val vIntFixpAlus = Seq.fill(numVecModule)(Module(new VIntFixpAlu64b)) 148 private val mgu = Module(new Mgu(dataWidth)) 149 private val mgtu = Module(new Mgtu(dataWidth)) 150 151 /** 152 * [[typeMod]]'s in connection 153 */ 154 typeMod.io.in.fuOpType := fuOpType 155 typeMod.io.in.vsew := vsew 156 typeMod.io.in.isReverse := isReverse 157 typeMod.io.in.isExt := isExt 158 typeMod.io.in.isDstMask := vecCtrl.isDstMask 159 typeMod.io.in.isMove := isMove 160 161 private val vs2GroupedVec32b: Vec[UInt] = VecInit(vs2Split.io.outVec32b.zipWithIndex.groupBy(_._2 % 2).map(x => x._1 -> x._2.map(_._1)).values.map(x => Cat(x.reverse)).toSeq) 162 private val vs2GroupedVec16b: Vec[UInt] = VecInit(vs2Split.io.outVec16b.zipWithIndex.groupBy(_._2 % 2).map(x => x._1 -> x._2.map(_._1)).values.map(x => Cat(x.reverse)).toSeq) 163 private val vs2GroupedVec8b: Vec[UInt] = VecInit(vs2Split.io.outVec8b.zipWithIndex.groupBy(_._2 % 2).map(x => x._1 -> x._2.map(_._1)).values.map(x => Cat(x.reverse)).toSeq) 164 private val vs1GroupedVec: Vec[UInt] = VecInit(vs1Split.io.outVec32b.zipWithIndex.groupBy(_._2 % 2).map(x => x._1 -> x._2.map(_._1)).values.map(x => Cat(x.reverse)).toSeq) 165 166 /** 167 * In connection of [[vs2Split]], [[vs1Split]] and [[oldVdSplit]] 168 */ 169 vs2Split.io.inVecData := vs2 170 vs1Split.io.inVecData := vs1 171 oldVdSplit.io.inVecData := oldVd 172 173 /** 174 * [[vIntFixpAlus]]'s in connection 175 */ 176 private val opcode = VialuFixType.getOpcode(inCtrl.fuOpType).asTypeOf(vIntFixpAlus.head.io.opcode) 177 private val vs1Type = typeMod.io.out.vs1Type 178 private val vs2Type = typeMod.io.out.vs2Type 179 private val vdType = typeMod.io.out.vdType 180 private val isVextF2 = typeMod.io.out.isVextF2 181 private val isVextF4 = typeMod.io.out.isVextF4 182 private val isVextF8 = typeMod.io.out.isVextF8 183 184 private val truthTable = TruthTable(VIntFixpTable.table, VIntFixpTable.default) 185 private val decoderOut = decoder(QMCMinimizer, Cat(opcode.op), truthTable) 186 private val vIntFixpDecode = decoderOut.asTypeOf(new VIntFixpDecode) 187 private val isFixp = Mux(vIntFixpDecode.misc, opcode.isScalingShift, opcode.isSatAdd || opcode.isAvgAdd) 188 private val widen = opcode.isAddSub && vs1Type(1, 0) =/= vdType(1, 0) 189 private val widen_vs2 = widen && vs2Type(1, 0) =/= vdType(1, 0) 190 private val eewVs1 = SewOH(vs1Type(1, 0)) 191 private val eewVd = SewOH(vdType(1, 0)) 192 193 // Extension instructions 194 private val vf2 = isVextF2 195 private val vf4 = isVextF4 196 private val vf8 = isVextF8 197 198 private val vs1VecUsed: Vec[UInt] = Mux(widen || isNarrow, vs1GroupedVec, vs1Split.io.outVec64b) 199 private val vs2VecUsed = Wire(Vec(numVecModule, UInt(64.W))) 200 when(vf2) { 201 vs2VecUsed := vs2GroupedVec32b 202 }.elsewhen(vf4) { 203 vs2VecUsed := vs2GroupedVec16b 204 }.elsewhen(vf8) { 205 vs2VecUsed := vs2GroupedVec8b 206 }.otherwise { 207 vs2VecUsed := vs2Split.io.outVec64b 208 } 209 210 private val vs2Adder = Mux(widen, vs2GroupedVec32b, vs2Split.io.outVec64b) 211 212 // mask 213 private val maskDataVec: Vec[UInt] = VecDataToMaskDataVec(srcMask, vsew) 214 private val maskIdx = Mux(isNarrow, (vuopIdx >> 1.U).asUInt, vuopIdx) 215 private val eewVd_is_1b = vdType === VdType.mask 216 private val maskUsed = splitMask(maskDataVec(maskIdx), Mux(eewVd_is_1b, eewVs1, eewVd)) 217 218 private val oldVdUsed = splitMask(VecDataToMaskDataVec(oldVd, vs1Type(1, 0))(vuopIdx), eewVs1) 219 220 vIntFixpAlus.zipWithIndex.foreach { 221 case (mod, i) => 222 mod.io.opcode := opcode 223 224 mod.io.info.vm := vm 225 mod.io.info.ma := vma 226 mod.io.info.ta := vta 227 mod.io.info.vlmul := vlmul 228 mod.io.info.vl := vl 229 mod.io.info.vstart := vstart 230 mod.io.info.uopIdx := vuopIdx 231 mod.io.info.vxrm := vxrm 232 233 mod.io.srcType(0) := vs2Type 234 mod.io.srcType(1) := vs1Type 235 mod.io.vdType := vdType 236 mod.io.narrow := isNarrow 237 mod.io.isSub := vIntFixpDecode.sub 238 mod.io.isMisc := vIntFixpDecode.misc 239 mod.io.isFixp := isFixp 240 mod.io.widen := widen 241 mod.io.widen_vs2 := widen_vs2 242 mod.io.vs1 := vs1VecUsed(i) 243 mod.io.vs2_adder := vs2Adder(i) 244 mod.io.vs2_misc := vs2VecUsed(i) 245 mod.io.vmask := maskUsed(i) 246 mod.io.oldVd := oldVdUsed(i) 247 } 248 249 /** 250 * [[mgu]]'s in connection 251 */ 252 private val outEewVs1 = DelayN(eewVs1, latency) 253 254 private val outVd = Cat(vIntFixpAlus.reverse.map(_.io.vd)) 255 private val outCmp = Mux1H(outEewVs1.oneHot, Seq(8, 4, 2, 1).map( 256 k => Cat(vIntFixpAlus.reverse.map(_.io.cmpOut(k - 1, 0))))) 257 private val outNarrow = Cat(vIntFixpAlus.reverse.map(_.io.narrowVd)) 258 259 /* insts whose mask is not used to generate 'agnosticEn' and 'keepEn' in mgu: 260 * vadc, vmadc... 261 * vmerge 262 */ 263 private val needNoMask = VialuFixType.needNoMask(outCtrl.fuOpType) 264 private val maskToMgu = Mux(needNoMask, allMaskTrue, outSrcMask) 265 266 private val outFormat = VialuFixType.getFormat(outCtrl.fuOpType) 267 private val outWiden = (outFormat === VialuFixType.FMT.VVW | outFormat === VialuFixType.FMT.WVW) & !outVecCtrl.isExt & !outVecCtrl.isDstMask 268 private val narrow = outVecCtrl.isNarrow 269 private val dstMask = outVecCtrl.isDstMask 270 271 private val outEew = Mux(outWiden, outVecCtrl.vsew + 1.U, outVecCtrl.vsew) 272 273 mgu.io.in.vd := MuxCase(outVd, Seq( 274 narrow -> outNarrow, 275 dstMask -> outCmp, 276 )) 277 mgu.io.in.oldVd := outOldVd 278 mgu.io.in.mask := maskToMgu 279 mgu.io.in.info.ta := outVecCtrl.vta 280 mgu.io.in.info.ma := outVecCtrl.vma 281 mgu.io.in.info.vl := outVl 282 mgu.io.in.info.vlmul := outVecCtrl.vlmul 283 mgu.io.in.info.valid := io.out.valid 284 mgu.io.in.info.vstart := outVecCtrl.vstart 285 mgu.io.in.info.eew := outEew 286 mgu.io.in.info.vsew := outVecCtrl.vsew 287 mgu.io.in.info.vdIdx := outVecCtrl.vuopIdx 288 mgu.io.in.info.narrow := narrow 289 mgu.io.in.info.dstMask := dstMask 290 291 /** 292 * [[mgtu]]'s in connection, for vmask instructions 293 */ 294 mgtu.io.in.vd := outVd 295 mgtu.io.in.vl := outVl 296 297 io.out.bits.res.data := Mux(outVecCtrl.isOpMask, mgtu.io.out.vd, mgu.io.out.vd) 298 io.out.bits.res.vxsat.get := (Cat(vIntFixpAlus.map(_.io.vxsat)) & mgu.io.out.asUInt).orR 299 300 // util function 301 def splitMask(maskIn: UInt, sew: SewOH): Vec[UInt] = { 302 val maskWidth = maskIn.getWidth 303 val result = Wire(Vec(maskWidth / 8, UInt(8.W))) 304 for ((resultData, i) <- result.zipWithIndex) { 305 resultData := Mux1H(Seq( 306 sew.is8 -> maskIn(i * 8 + 7, i * 8), 307 sew.is16 -> Cat(0.U((8 - 4).W), maskIn(i * 4 + 3, i * 4)), 308 sew.is32 -> Cat(0.U((8 - 2).W), maskIn(i * 2 + 1, i * 2)), 309 sew.is64 -> Cat(0.U((8 - 1).W), maskIn(i)), 310 )) 311 } 312 result 313 } 314 315}