1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "assembler_riscv64.h"
18
19 #include "base/bit_utils.h"
20 #include "base/casts.h"
21 #include "base/logging.h"
22 #include "base/memory_region.h"
23
24 namespace art HIDDEN {
25 namespace riscv64 {
26
27 static_assert(static_cast<size_t>(kRiscv64PointerSize) == kRiscv64DoublewordSize,
28 "Unexpected Riscv64 pointer size.");
29 static_assert(kRiscv64PointerSize == PointerSize::k64, "Unexpected Riscv64 pointer size.");
30
31 // Split 32-bit offset into an `imm20` for LUI/AUIPC and
32 // a signed 12-bit short offset for ADDI/JALR/etc.
SplitOffset(int32_t offset)33 ALWAYS_INLINE static inline std::pair<uint32_t, int32_t> SplitOffset(int32_t offset) {
34 // The highest 0x800 values are out of range.
35 DCHECK_LT(offset, 0x7ffff800);
36 // Round `offset` to nearest 4KiB offset because short offset has range [-0x800, 0x800).
37 int32_t near_offset = (offset + 0x800) & ~0xfff;
38 // Calculate the short offset.
39 int32_t short_offset = offset - near_offset;
40 DCHECK(IsInt<12>(short_offset));
41 // Extract the `imm20`.
42 uint32_t imm20 = static_cast<uint32_t>(near_offset) >> 12;
43 // Return the result as a pair.
44 return std::make_pair(imm20, short_offset);
45 }
46
ToInt12(uint32_t uint12)47 ALWAYS_INLINE static inline int32_t ToInt12(uint32_t uint12) {
48 DCHECK(IsUint<12>(uint12));
49 return static_cast<int32_t>(uint12 - ((uint12 & 0x800) << 1));
50 }
51
FinalizeCode()52 void Riscv64Assembler::FinalizeCode() {
53 CHECK(!finalized_);
54 Assembler::FinalizeCode();
55 ReserveJumpTableSpace();
56 EmitLiterals();
57 PromoteBranches();
58 EmitBranches();
59 EmitJumpTables();
60 PatchCFI();
61 finalized_ = true;
62 }
63
64 /////////////////////////////// RV64 VARIANTS extension ///////////////////////////////
65
66 //////////////////////////////// RV64 "I" Instructions ////////////////////////////////
67
68 // LUI/AUIPC (RV32I, with sign-extension on RV64I), opcode = 0x17, 0x37
69
Lui(XRegister rd,uint32_t imm20)70 void Riscv64Assembler::Lui(XRegister rd, uint32_t imm20) {
71 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
72 if (rd != Zero && rd != SP && IsImmCLuiEncodable(imm20)) {
73 CLui(rd, imm20);
74 return;
75 }
76 }
77
78 EmitU(imm20, rd, 0x37);
79 }
80
Auipc(XRegister rd,uint32_t imm20)81 void Riscv64Assembler::Auipc(XRegister rd, uint32_t imm20) {
82 EmitU(imm20, rd, 0x17);
83 }
84
85 // Jump instructions (RV32I), opcode = 0x67, 0x6f
86
Jal(XRegister rd,int32_t offset)87 void Riscv64Assembler::Jal(XRegister rd, int32_t offset) {
88 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
89 if (rd == Zero && IsInt<12>(offset)) {
90 CJ(offset);
91 return;
92 }
93 // Note: `c.jal` is RV32-only.
94 }
95
96 EmitJ(offset, rd, 0x6F);
97 }
98
Jalr(XRegister rd,XRegister rs1,int32_t offset)99 void Riscv64Assembler::Jalr(XRegister rd, XRegister rs1, int32_t offset) {
100 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
101 if (rd == RA && rs1 != Zero && offset == 0) {
102 CJalr(rs1);
103 return;
104 } else if (rd == Zero && rs1 != Zero && offset == 0) {
105 CJr(rs1);
106 return;
107 }
108 }
109
110 EmitI(offset, rs1, 0x0, rd, 0x67);
111 }
112
113 // Branch instructions, opcode = 0x63 (subfunc from 0x0 ~ 0x7), 0x67, 0x6f
114
Beq(XRegister rs1,XRegister rs2,int32_t offset)115 void Riscv64Assembler::Beq(XRegister rs1, XRegister rs2, int32_t offset) {
116 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
117 if (rs2 == Zero && IsShortReg(rs1) && IsInt<9>(offset)) {
118 CBeqz(rs1, offset);
119 return;
120 } else if (rs1 == Zero && IsShortReg(rs2) && IsInt<9>(offset)) {
121 CBeqz(rs2, offset);
122 return;
123 }
124 }
125
126 EmitB(offset, rs2, rs1, 0x0, 0x63);
127 }
128
Bne(XRegister rs1,XRegister rs2,int32_t offset)129 void Riscv64Assembler::Bne(XRegister rs1, XRegister rs2, int32_t offset) {
130 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
131 if (rs2 == Zero && IsShortReg(rs1) && IsInt<9>(offset)) {
132 CBnez(rs1, offset);
133 return;
134 } else if (rs1 == Zero && IsShortReg(rs2) && IsInt<9>(offset)) {
135 CBnez(rs2, offset);
136 return;
137 }
138 }
139
140 EmitB(offset, rs2, rs1, 0x1, 0x63);
141 }
142
Blt(XRegister rs1,XRegister rs2,int32_t offset)143 void Riscv64Assembler::Blt(XRegister rs1, XRegister rs2, int32_t offset) {
144 EmitB(offset, rs2, rs1, 0x4, 0x63);
145 }
146
Bge(XRegister rs1,XRegister rs2,int32_t offset)147 void Riscv64Assembler::Bge(XRegister rs1, XRegister rs2, int32_t offset) {
148 EmitB(offset, rs2, rs1, 0x5, 0x63);
149 }
150
Bltu(XRegister rs1,XRegister rs2,int32_t offset)151 void Riscv64Assembler::Bltu(XRegister rs1, XRegister rs2, int32_t offset) {
152 EmitB(offset, rs2, rs1, 0x6, 0x63);
153 }
154
Bgeu(XRegister rs1,XRegister rs2,int32_t offset)155 void Riscv64Assembler::Bgeu(XRegister rs1, XRegister rs2, int32_t offset) {
156 EmitB(offset, rs2, rs1, 0x7, 0x63);
157 }
158
159 // Load instructions (RV32I+RV64I): opcode = 0x03, funct3 from 0x0 ~ 0x6
160
Lb(XRegister rd,XRegister rs1,int32_t offset)161 void Riscv64Assembler::Lb(XRegister rd, XRegister rs1, int32_t offset) {
162 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
163 EmitI(offset, rs1, 0x0, rd, 0x03);
164 }
165
Lh(XRegister rd,XRegister rs1,int32_t offset)166 void Riscv64Assembler::Lh(XRegister rd, XRegister rs1, int32_t offset) {
167 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
168
169 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
170 if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<2>(offset) && IsAligned<2>(offset)) {
171 CLh(rd, rs1, offset);
172 return;
173 }
174 }
175
176 EmitI(offset, rs1, 0x1, rd, 0x03);
177 }
178
Lw(XRegister rd,XRegister rs1,int32_t offset)179 void Riscv64Assembler::Lw(XRegister rd, XRegister rs1, int32_t offset) {
180 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
181
182 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
183 if (rd != Zero && rs1 == SP && IsUint<8>(offset) && IsAligned<4>(offset)) {
184 CLwsp(rd, offset);
185 return;
186 } else if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<7>(offset) && IsAligned<4>(offset)) {
187 CLw(rd, rs1, offset);
188 return;
189 }
190 }
191
192 EmitI(offset, rs1, 0x2, rd, 0x03);
193 }
194
Ld(XRegister rd,XRegister rs1,int32_t offset)195 void Riscv64Assembler::Ld(XRegister rd, XRegister rs1, int32_t offset) {
196 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
197
198 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
199 if (rd != Zero && rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
200 CLdsp(rd, offset);
201 return;
202 } else if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
203 CLd(rd, rs1, offset);
204 return;
205 }
206 }
207
208 EmitI(offset, rs1, 0x3, rd, 0x03);
209 }
210
Lbu(XRegister rd,XRegister rs1,int32_t offset)211 void Riscv64Assembler::Lbu(XRegister rd, XRegister rs1, int32_t offset) {
212 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
213
214 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
215 if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<2>(offset)) {
216 CLbu(rd, rs1, offset);
217 return;
218 }
219 }
220
221 EmitI(offset, rs1, 0x4, rd, 0x03);
222 }
223
Lhu(XRegister rd,XRegister rs1,int32_t offset)224 void Riscv64Assembler::Lhu(XRegister rd, XRegister rs1, int32_t offset) {
225 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
226
227 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
228 if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<2>(offset) && IsAligned<2>(offset)) {
229 CLhu(rd, rs1, offset);
230 return;
231 }
232 }
233
234 EmitI(offset, rs1, 0x5, rd, 0x03);
235 }
236
Lwu(XRegister rd,XRegister rs1,int32_t offset)237 void Riscv64Assembler::Lwu(XRegister rd, XRegister rs1, int32_t offset) {
238 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
239 EmitI(offset, rs1, 0x6, rd, 0x3);
240 }
241
242 // Store instructions (RV32I+RV64I): opcode = 0x23, funct3 from 0x0 ~ 0x3
243
Sb(XRegister rs2,XRegister rs1,int32_t offset)244 void Riscv64Assembler::Sb(XRegister rs2, XRegister rs1, int32_t offset) {
245 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
246
247 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
248 if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<2>(offset)) {
249 CSb(rs2, rs1, offset);
250 return;
251 }
252 }
253
254 EmitS(offset, rs2, rs1, 0x0, 0x23);
255 }
256
Sh(XRegister rs2,XRegister rs1,int32_t offset)257 void Riscv64Assembler::Sh(XRegister rs2, XRegister rs1, int32_t offset) {
258 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
259
260 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
261 if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<2>(offset) && IsAligned<2>(offset)) {
262 CSh(rs2, rs1, offset);
263 return;
264 }
265 }
266
267 EmitS(offset, rs2, rs1, 0x1, 0x23);
268 }
269
Sw(XRegister rs2,XRegister rs1,int32_t offset)270 void Riscv64Assembler::Sw(XRegister rs2, XRegister rs1, int32_t offset) {
271 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
272
273 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
274 if (rs1 == SP && IsUint<8>(offset) && IsAligned<4>(offset)) {
275 CSwsp(rs2, offset);
276 return;
277 } else if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<7>(offset) && IsAligned<4>(offset)) {
278 CSw(rs2, rs1, offset);
279 return;
280 }
281 }
282
283 EmitS(offset, rs2, rs1, 0x2, 0x23);
284 }
285
Sd(XRegister rs2,XRegister rs1,int32_t offset)286 void Riscv64Assembler::Sd(XRegister rs2, XRegister rs1, int32_t offset) {
287 AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
288
289 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
290 if (rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
291 CSdsp(rs2, offset);
292 return;
293 } else if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
294 CSd(rs2, rs1, offset);
295 return;
296 }
297 }
298
299 EmitS(offset, rs2, rs1, 0x3, 0x23);
300 }
301
302 // IMM ALU instructions (RV32I): opcode = 0x13, funct3 from 0x0 ~ 0x7
303
Addi(XRegister rd,XRegister rs1,int32_t imm12)304 void Riscv64Assembler::Addi(XRegister rd, XRegister rs1, int32_t imm12) {
305 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
306 if (rd != Zero) {
307 if (rs1 == Zero && IsInt<6>(imm12)) {
308 CLi(rd, imm12);
309 return;
310 } else if (imm12 != 0) {
311 if (rd == rs1) {
312 // We're testing against clang's assembler and therefore
313 // if both c.addi and c.addi16sp are viable, we use the c.addi just like clang.
314 if (IsInt<6>(imm12)) {
315 CAddi(rd, imm12);
316 return;
317 } else if (rd == SP && IsInt<10>(imm12) && IsAligned<16>(imm12)) {
318 CAddi16Sp(imm12);
319 return;
320 }
321 } else if (IsShortReg(rd) && rs1 == SP && IsUint<10>(imm12) && IsAligned<4>(imm12)) {
322 CAddi4Spn(rd, imm12);
323 return;
324 }
325 } else if (rs1 != Zero) {
326 CMv(rd, rs1);
327 return;
328 }
329 } else if (rd == rs1 && imm12 == 0) {
330 CNop();
331 return;
332 }
333 }
334
335 EmitI(imm12, rs1, 0x0, rd, 0x13);
336 }
337
Slti(XRegister rd,XRegister rs1,int32_t imm12)338 void Riscv64Assembler::Slti(XRegister rd, XRegister rs1, int32_t imm12) {
339 EmitI(imm12, rs1, 0x2, rd, 0x13);
340 }
341
Sltiu(XRegister rd,XRegister rs1,int32_t imm12)342 void Riscv64Assembler::Sltiu(XRegister rd, XRegister rs1, int32_t imm12) {
343 EmitI(imm12, rs1, 0x3, rd, 0x13);
344 }
345
Xori(XRegister rd,XRegister rs1,int32_t imm12)346 void Riscv64Assembler::Xori(XRegister rd, XRegister rs1, int32_t imm12) {
347 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
348 if (rd == rs1 && IsShortReg(rd) && imm12 == -1) {
349 CNot(rd);
350 return;
351 }
352 }
353
354 EmitI(imm12, rs1, 0x4, rd, 0x13);
355 }
356
Ori(XRegister rd,XRegister rs1,int32_t imm12)357 void Riscv64Assembler::Ori(XRegister rd, XRegister rs1, int32_t imm12) {
358 EmitI(imm12, rs1, 0x6, rd, 0x13);
359 }
360
Andi(XRegister rd,XRegister rs1,int32_t imm12)361 void Riscv64Assembler::Andi(XRegister rd, XRegister rs1, int32_t imm12) {
362 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
363 if (rd == rs1 && IsShortReg(rd) && IsInt<6>(imm12)) {
364 CAndi(rd, imm12);
365 return;
366 }
367 }
368
369 EmitI(imm12, rs1, 0x7, rd, 0x13);
370 }
371
372 // 0x1 Split: 0x0(6b) + imm12(6b)
Slli(XRegister rd,XRegister rs1,int32_t shamt)373 void Riscv64Assembler::Slli(XRegister rd, XRegister rs1, int32_t shamt) {
374 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
375
376 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
377 if (rd == rs1 && rd != Zero && shamt != 0) {
378 CSlli(rd, shamt);
379 return;
380 }
381 }
382
383 EmitI6(0x0, shamt, rs1, 0x1, rd, 0x13);
384 }
385
386 // 0x5 Split: 0x0(6b) + imm12(6b)
Srli(XRegister rd,XRegister rs1,int32_t shamt)387 void Riscv64Assembler::Srli(XRegister rd, XRegister rs1, int32_t shamt) {
388 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
389
390 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
391 if (rd == rs1 && IsShortReg(rd) && shamt != 0) {
392 CSrli(rd, shamt);
393 return;
394 }
395 }
396
397 EmitI6(0x0, shamt, rs1, 0x5, rd, 0x13);
398 }
399
400 // 0x5 Split: 0x10(6b) + imm12(6b)
Srai(XRegister rd,XRegister rs1,int32_t shamt)401 void Riscv64Assembler::Srai(XRegister rd, XRegister rs1, int32_t shamt) {
402 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
403
404 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
405 if (rd == rs1 && IsShortReg(rd) && shamt != 0) {
406 CSrai(rd, shamt);
407 return;
408 }
409 }
410
411 EmitI6(0x10, shamt, rs1, 0x5, rd, 0x13);
412 }
413
414 // ALU instructions (RV32I): opcode = 0x33, funct3 from 0x0 ~ 0x7
415
Add(XRegister rd,XRegister rs1,XRegister rs2)416 void Riscv64Assembler::Add(XRegister rd, XRegister rs1, XRegister rs2) {
417 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
418 if (rd != Zero) {
419 if (rs1 != Zero || rs2 != Zero) {
420 if (rs1 == Zero) {
421 DCHECK_NE(rs2, Zero);
422 CMv(rd, rs2);
423 return;
424 } else if (rs2 == Zero) {
425 DCHECK_NE(rs1, Zero);
426 CMv(rd, rs1);
427 return;
428 } else if (rd == rs1) {
429 DCHECK_NE(rs2, Zero);
430 CAdd(rd, rs2);
431 return;
432 } else if (rd == rs2) {
433 DCHECK_NE(rs1, Zero);
434 CAdd(rd, rs1);
435 return;
436 }
437 } else {
438 // TODO: we use clang for testing assembler and unfortunately it (clang 18.0.1) does not
439 // support conversion from 'add rd, Zero, Zero' into 'c.li. rd, 0' so once clang supports it
440 // the lines below should be uncommented
441
442 // CLi(rd, 0);
443 // return;
444 }
445 }
446 }
447
448 EmitR(0x0, rs2, rs1, 0x0, rd, 0x33);
449 }
450
Sub(XRegister rd,XRegister rs1,XRegister rs2)451 void Riscv64Assembler::Sub(XRegister rd, XRegister rs1, XRegister rs2) {
452 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
453 if (rd == rs1 && IsShortReg(rd) && IsShortReg(rs2)) {
454 CSub(rd, rs2);
455 return;
456 }
457 }
458
459 EmitR(0x20, rs2, rs1, 0x0, rd, 0x33);
460 }
461
Slt(XRegister rd,XRegister rs1,XRegister rs2)462 void Riscv64Assembler::Slt(XRegister rd, XRegister rs1, XRegister rs2) {
463 EmitR(0x0, rs2, rs1, 0x02, rd, 0x33);
464 }
465
Sltu(XRegister rd,XRegister rs1,XRegister rs2)466 void Riscv64Assembler::Sltu(XRegister rd, XRegister rs1, XRegister rs2) {
467 EmitR(0x0, rs2, rs1, 0x03, rd, 0x33);
468 }
469
Xor(XRegister rd,XRegister rs1,XRegister rs2)470 void Riscv64Assembler::Xor(XRegister rd, XRegister rs1, XRegister rs2) {
471 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
472 if (IsShortReg(rd)) {
473 if (rd == rs1 && IsShortReg(rs2)) {
474 CXor(rd, rs2);
475 return;
476 } else if (rd == rs2 && IsShortReg(rs1)) {
477 CXor(rd, rs1);
478 return;
479 }
480 }
481 }
482
483 EmitR(0x0, rs2, rs1, 0x04, rd, 0x33);
484 }
485
Or(XRegister rd,XRegister rs1,XRegister rs2)486 void Riscv64Assembler::Or(XRegister rd, XRegister rs1, XRegister rs2) {
487 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
488 if (IsShortReg(rd)) {
489 if (rd == rs1 && IsShortReg(rs2)) {
490 COr(rd, rs2);
491 return;
492 } else if (rd == rs2 && IsShortReg(rs1)) {
493 COr(rd, rs1);
494 return;
495 }
496 }
497 }
498
499 EmitR(0x0, rs2, rs1, 0x06, rd, 0x33);
500 }
501
And(XRegister rd,XRegister rs1,XRegister rs2)502 void Riscv64Assembler::And(XRegister rd, XRegister rs1, XRegister rs2) {
503 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
504 if (IsShortReg(rd)) {
505 if (rd == rs1 && IsShortReg(rs2)) {
506 CAnd(rd, rs2);
507 return;
508 } else if (rd == rs2 && IsShortReg(rs1)) {
509 CAnd(rd, rs1);
510 return;
511 }
512 }
513 }
514
515 EmitR(0x0, rs2, rs1, 0x07, rd, 0x33);
516 }
517
Sll(XRegister rd,XRegister rs1,XRegister rs2)518 void Riscv64Assembler::Sll(XRegister rd, XRegister rs1, XRegister rs2) {
519 EmitR(0x0, rs2, rs1, 0x01, rd, 0x33);
520 }
521
Srl(XRegister rd,XRegister rs1,XRegister rs2)522 void Riscv64Assembler::Srl(XRegister rd, XRegister rs1, XRegister rs2) {
523 EmitR(0x0, rs2, rs1, 0x05, rd, 0x33);
524 }
525
Sra(XRegister rd,XRegister rs1,XRegister rs2)526 void Riscv64Assembler::Sra(XRegister rd, XRegister rs1, XRegister rs2) {
527 EmitR(0x20, rs2, rs1, 0x05, rd, 0x33);
528 }
529
530 // 32bit Imm ALU instructions (RV64I): opcode = 0x1b, funct3 from 0x0, 0x1, 0x5
531
Addiw(XRegister rd,XRegister rs1,int32_t imm12)532 void Riscv64Assembler::Addiw(XRegister rd, XRegister rs1, int32_t imm12) {
533 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
534 if (rd != Zero && IsInt<6>(imm12)) {
535 if (rd == rs1) {
536 CAddiw(rd, imm12);
537 return;
538 } else if (rs1 == Zero) {
539 CLi(rd, imm12);
540 return;
541 }
542 }
543 }
544
545 EmitI(imm12, rs1, 0x0, rd, 0x1b);
546 }
547
Slliw(XRegister rd,XRegister rs1,int32_t shamt)548 void Riscv64Assembler::Slliw(XRegister rd, XRegister rs1, int32_t shamt) {
549 CHECK_LT(static_cast<uint32_t>(shamt), 32u);
550 EmitR(0x0, shamt, rs1, 0x1, rd, 0x1b);
551 }
552
Srliw(XRegister rd,XRegister rs1,int32_t shamt)553 void Riscv64Assembler::Srliw(XRegister rd, XRegister rs1, int32_t shamt) {
554 CHECK_LT(static_cast<uint32_t>(shamt), 32u);
555 EmitR(0x0, shamt, rs1, 0x5, rd, 0x1b);
556 }
557
Sraiw(XRegister rd,XRegister rs1,int32_t shamt)558 void Riscv64Assembler::Sraiw(XRegister rd, XRegister rs1, int32_t shamt) {
559 CHECK_LT(static_cast<uint32_t>(shamt), 32u);
560 EmitR(0x20, shamt, rs1, 0x5, rd, 0x1b);
561 }
562
563 // 32bit ALU instructions (RV64I): opcode = 0x3b, funct3 from 0x0 ~ 0x7
564
Addw(XRegister rd,XRegister rs1,XRegister rs2)565 void Riscv64Assembler::Addw(XRegister rd, XRegister rs1, XRegister rs2) {
566 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
567 if (IsShortReg(rd)) {
568 if (rd == rs1 && IsShortReg(rs2)) {
569 CAddw(rd, rs2);
570 return;
571 } else if (rd == rs2 && IsShortReg(rs1)) {
572 CAddw(rd, rs1);
573 return;
574 }
575 }
576 }
577
578 EmitR(0x0, rs2, rs1, 0x0, rd, 0x3b);
579 }
580
Subw(XRegister rd,XRegister rs1,XRegister rs2)581 void Riscv64Assembler::Subw(XRegister rd, XRegister rs1, XRegister rs2) {
582 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
583 if (rd == rs1 && IsShortReg(rd) && IsShortReg(rs2)) {
584 CSubw(rd, rs2);
585 return;
586 }
587 }
588
589 EmitR(0x20, rs2, rs1, 0x0, rd, 0x3b);
590 }
591
Sllw(XRegister rd,XRegister rs1,XRegister rs2)592 void Riscv64Assembler::Sllw(XRegister rd, XRegister rs1, XRegister rs2) {
593 EmitR(0x0, rs2, rs1, 0x1, rd, 0x3b);
594 }
595
Srlw(XRegister rd,XRegister rs1,XRegister rs2)596 void Riscv64Assembler::Srlw(XRegister rd, XRegister rs1, XRegister rs2) {
597 EmitR(0x0, rs2, rs1, 0x5, rd, 0x3b);
598 }
599
Sraw(XRegister rd,XRegister rs1,XRegister rs2)600 void Riscv64Assembler::Sraw(XRegister rd, XRegister rs1, XRegister rs2) {
601 EmitR(0x20, rs2, rs1, 0x5, rd, 0x3b);
602 }
603
604 // Environment call and breakpoint (RV32I), opcode = 0x73
605
Ecall()606 void Riscv64Assembler::Ecall() { EmitI(0x0, 0x0, 0x0, 0x0, 0x73); }
607
Ebreak()608 void Riscv64Assembler::Ebreak() {
609 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
610 CEbreak();
611 return;
612 }
613
614 EmitI(0x1, 0x0, 0x0, 0x0, 0x73);
615 }
616
617 // Fence instruction (RV32I): opcode = 0xf, funct3 = 0
618
Fence(uint32_t pred,uint32_t succ)619 void Riscv64Assembler::Fence(uint32_t pred, uint32_t succ) {
620 DCHECK(IsUint<4>(pred));
621 DCHECK(IsUint<4>(succ));
622 EmitI(/* normal fence */ 0x0 << 8 | pred << 4 | succ, 0x0, 0x0, 0x0, 0xf);
623 }
624
FenceTso()625 void Riscv64Assembler::FenceTso() {
626 static constexpr uint32_t kPred = kFenceWrite | kFenceRead;
627 static constexpr uint32_t kSucc = kFenceWrite | kFenceRead;
628 EmitI(ToInt12(/* TSO fence */ 0x8 << 8 | kPred << 4 | kSucc), 0x0, 0x0, 0x0, 0xf);
629 }
630
631 //////////////////////////////// RV64 "I" Instructions END ////////////////////////////////
632
633 /////////////////////////// RV64 "Zifencei" Instructions START ////////////////////////////
634
635 // "Zifencei" Standard Extension, opcode = 0xf, funct3 = 1
FenceI()636 void Riscv64Assembler::FenceI() {
637 AssertExtensionsEnabled(Riscv64Extension::kZifencei);
638 EmitI(0x0, 0x0, 0x1, 0x0, 0xf);
639 }
640
641 //////////////////////////// RV64 "Zifencei" Instructions END /////////////////////////////
642
643 /////////////////////////////// RV64 "M" Instructions START ///////////////////////////////
644
645 // RV32M Standard Extension: opcode = 0x33, funct3 from 0x0 ~ 0x7
646
Mul(XRegister rd,XRegister rs1,XRegister rs2)647 void Riscv64Assembler::Mul(XRegister rd, XRegister rs1, XRegister rs2) {
648 AssertExtensionsEnabled(Riscv64Extension::kM);
649
650 if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
651 if (IsShortReg(rd)) {
652 if (rd == rs1 && IsShortReg(rs2)) {
653 CMul(rd, rs2);
654 return;
655 } else if (rd == rs2 && IsShortReg(rs1)) {
656 CMul(rd, rs1);
657 return;
658 }
659 }
660 }
661
662 EmitR(0x1, rs2, rs1, 0x0, rd, 0x33);
663 }
664
Mulh(XRegister rd,XRegister rs1,XRegister rs2)665 void Riscv64Assembler::Mulh(XRegister rd, XRegister rs1, XRegister rs2) {
666 AssertExtensionsEnabled(Riscv64Extension::kM);
667 EmitR(0x1, rs2, rs1, 0x1, rd, 0x33);
668 }
669
Mulhsu(XRegister rd,XRegister rs1,XRegister rs2)670 void Riscv64Assembler::Mulhsu(XRegister rd, XRegister rs1, XRegister rs2) {
671 AssertExtensionsEnabled(Riscv64Extension::kM);
672 EmitR(0x1, rs2, rs1, 0x2, rd, 0x33);
673 }
674
Mulhu(XRegister rd,XRegister rs1,XRegister rs2)675 void Riscv64Assembler::Mulhu(XRegister rd, XRegister rs1, XRegister rs2) {
676 AssertExtensionsEnabled(Riscv64Extension::kM);
677 EmitR(0x1, rs2, rs1, 0x3, rd, 0x33);
678 }
679
Div(XRegister rd,XRegister rs1,XRegister rs2)680 void Riscv64Assembler::Div(XRegister rd, XRegister rs1, XRegister rs2) {
681 AssertExtensionsEnabled(Riscv64Extension::kM);
682 EmitR(0x1, rs2, rs1, 0x4, rd, 0x33);
683 }
684
Divu(XRegister rd,XRegister rs1,XRegister rs2)685 void Riscv64Assembler::Divu(XRegister rd, XRegister rs1, XRegister rs2) {
686 AssertExtensionsEnabled(Riscv64Extension::kM);
687 EmitR(0x1, rs2, rs1, 0x5, rd, 0x33);
688 }
689
Rem(XRegister rd,XRegister rs1,XRegister rs2)690 void Riscv64Assembler::Rem(XRegister rd, XRegister rs1, XRegister rs2) {
691 AssertExtensionsEnabled(Riscv64Extension::kM);
692 EmitR(0x1, rs2, rs1, 0x6, rd, 0x33);
693 }
694
Remu(XRegister rd,XRegister rs1,XRegister rs2)695 void Riscv64Assembler::Remu(XRegister rd, XRegister rs1, XRegister rs2) {
696 AssertExtensionsEnabled(Riscv64Extension::kM);
697 EmitR(0x1, rs2, rs1, 0x7, rd, 0x33);
698 }
699
700 // RV64M Standard Extension: opcode = 0x3b, funct3 0x0 and from 0x4 ~ 0x7
701
Mulw(XRegister rd,XRegister rs1,XRegister rs2)702 void Riscv64Assembler::Mulw(XRegister rd, XRegister rs1, XRegister rs2) {
703 AssertExtensionsEnabled(Riscv64Extension::kM);
704 EmitR(0x1, rs2, rs1, 0x0, rd, 0x3b);
705 }
706
Divw(XRegister rd,XRegister rs1,XRegister rs2)707 void Riscv64Assembler::Divw(XRegister rd, XRegister rs1, XRegister rs2) {
708 AssertExtensionsEnabled(Riscv64Extension::kM);
709 EmitR(0x1, rs2, rs1, 0x4, rd, 0x3b);
710 }
711
Divuw(XRegister rd,XRegister rs1,XRegister rs2)712 void Riscv64Assembler::Divuw(XRegister rd, XRegister rs1, XRegister rs2) {
713 AssertExtensionsEnabled(Riscv64Extension::kM);
714 EmitR(0x1, rs2, rs1, 0x5, rd, 0x3b);
715 }
716
Remw(XRegister rd,XRegister rs1,XRegister rs2)717 void Riscv64Assembler::Remw(XRegister rd, XRegister rs1, XRegister rs2) {
718 AssertExtensionsEnabled(Riscv64Extension::kM);
719 EmitR(0x1, rs2, rs1, 0x6, rd, 0x3b);
720 }
721
Remuw(XRegister rd,XRegister rs1,XRegister rs2)722 void Riscv64Assembler::Remuw(XRegister rd, XRegister rs1, XRegister rs2) {
723 AssertExtensionsEnabled(Riscv64Extension::kM);
724 EmitR(0x1, rs2, rs1, 0x7, rd, 0x3b);
725 }
726
727 //////////////////////////////// RV64 "M" Instructions END ////////////////////////////////
728
729 /////////////////////////////// RV64 "A" Instructions START ///////////////////////////////
730
LrW(XRegister rd,XRegister rs1,AqRl aqrl)731 void Riscv64Assembler::LrW(XRegister rd, XRegister rs1, AqRl aqrl) {
732 AssertExtensionsEnabled(Riscv64Extension::kA);
733 CHECK(aqrl != AqRl::kRelease);
734 EmitR4(0x2, enum_cast<uint32_t>(aqrl), 0x0, rs1, 0x2, rd, 0x2f);
735 }
736
LrD(XRegister rd,XRegister rs1,AqRl aqrl)737 void Riscv64Assembler::LrD(XRegister rd, XRegister rs1, AqRl aqrl) {
738 AssertExtensionsEnabled(Riscv64Extension::kA);
739 CHECK(aqrl != AqRl::kRelease);
740 EmitR4(0x2, enum_cast<uint32_t>(aqrl), 0x0, rs1, 0x3, rd, 0x2f);
741 }
742
ScW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)743 void Riscv64Assembler::ScW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
744 AssertExtensionsEnabled(Riscv64Extension::kA);
745 CHECK(aqrl != AqRl::kAcquire);
746 EmitR4(0x3, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
747 }
748
ScD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)749 void Riscv64Assembler::ScD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
750 AssertExtensionsEnabled(Riscv64Extension::kA);
751 CHECK(aqrl != AqRl::kAcquire);
752 EmitR4(0x3, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
753 }
754
AmoSwapW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)755 void Riscv64Assembler::AmoSwapW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
756 AssertExtensionsEnabled(Riscv64Extension::kA);
757 EmitR4(0x1, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
758 }
759
AmoSwapD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)760 void Riscv64Assembler::AmoSwapD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
761 AssertExtensionsEnabled(Riscv64Extension::kA);
762 EmitR4(0x1, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
763 }
764
AmoAddW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)765 void Riscv64Assembler::AmoAddW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
766 AssertExtensionsEnabled(Riscv64Extension::kA);
767 EmitR4(0x0, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
768 }
769
AmoAddD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)770 void Riscv64Assembler::AmoAddD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
771 AssertExtensionsEnabled(Riscv64Extension::kA);
772 EmitR4(0x0, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
773 }
774
AmoXorW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)775 void Riscv64Assembler::AmoXorW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
776 AssertExtensionsEnabled(Riscv64Extension::kA);
777 EmitR4(0x4, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
778 }
779
AmoXorD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)780 void Riscv64Assembler::AmoXorD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
781 AssertExtensionsEnabled(Riscv64Extension::kA);
782 EmitR4(0x4, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
783 }
784
AmoAndW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)785 void Riscv64Assembler::AmoAndW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
786 AssertExtensionsEnabled(Riscv64Extension::kA);
787 EmitR4(0xc, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
788 }
789
AmoAndD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)790 void Riscv64Assembler::AmoAndD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
791 AssertExtensionsEnabled(Riscv64Extension::kA);
792 EmitR4(0xc, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
793 }
794
AmoOrW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)795 void Riscv64Assembler::AmoOrW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
796 AssertExtensionsEnabled(Riscv64Extension::kA);
797 EmitR4(0x8, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
798 }
799
AmoOrD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)800 void Riscv64Assembler::AmoOrD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
801 AssertExtensionsEnabled(Riscv64Extension::kA);
802 EmitR4(0x8, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
803 }
804
AmoMinW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)805 void Riscv64Assembler::AmoMinW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
806 AssertExtensionsEnabled(Riscv64Extension::kA);
807 EmitR4(0x10, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
808 }
809
AmoMinD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)810 void Riscv64Assembler::AmoMinD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
811 AssertExtensionsEnabled(Riscv64Extension::kA);
812 EmitR4(0x10, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
813 }
814
AmoMaxW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)815 void Riscv64Assembler::AmoMaxW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
816 AssertExtensionsEnabled(Riscv64Extension::kA);
817 EmitR4(0x14, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
818 }
819
AmoMaxD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)820 void Riscv64Assembler::AmoMaxD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
821 AssertExtensionsEnabled(Riscv64Extension::kA);
822 EmitR4(0x14, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
823 }
824
AmoMinuW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)825 void Riscv64Assembler::AmoMinuW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
826 AssertExtensionsEnabled(Riscv64Extension::kA);
827 EmitR4(0x18, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
828 }
829
AmoMinuD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)830 void Riscv64Assembler::AmoMinuD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
831 AssertExtensionsEnabled(Riscv64Extension::kA);
832 EmitR4(0x18, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
833 }
834
AmoMaxuW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)835 void Riscv64Assembler::AmoMaxuW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
836 AssertExtensionsEnabled(Riscv64Extension::kA);
837 EmitR4(0x1c, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
838 }
839
AmoMaxuD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)840 void Riscv64Assembler::AmoMaxuD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
841 AssertExtensionsEnabled(Riscv64Extension::kA);
842 EmitR4(0x1c, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
843 }
844
845 /////////////////////////////// RV64 "A" Instructions END ///////////////////////////////
846
847 ///////////////////////////// RV64 "Zicsr" Instructions START /////////////////////////////
848
849 // "Zicsr" Standard Extension, opcode = 0x73, funct3 from 0x1 ~ 0x3 and 0x5 ~ 0x7
850
Csrrw(XRegister rd,uint32_t csr,XRegister rs1)851 void Riscv64Assembler::Csrrw(XRegister rd, uint32_t csr, XRegister rs1) {
852 AssertExtensionsEnabled(Riscv64Extension::kZicsr);
853 EmitI(ToInt12(csr), rs1, 0x1, rd, 0x73);
854 }
855
Csrrs(XRegister rd,uint32_t csr,XRegister rs1)856 void Riscv64Assembler::Csrrs(XRegister rd, uint32_t csr, XRegister rs1) {
857 AssertExtensionsEnabled(Riscv64Extension::kZicsr);
858 EmitI(ToInt12(csr), rs1, 0x2, rd, 0x73);
859 }
860
Csrrc(XRegister rd,uint32_t csr,XRegister rs1)861 void Riscv64Assembler::Csrrc(XRegister rd, uint32_t csr, XRegister rs1) {
862 AssertExtensionsEnabled(Riscv64Extension::kZicsr);
863 EmitI(ToInt12(csr), rs1, 0x3, rd, 0x73);
864 }
865
Csrrwi(XRegister rd,uint32_t csr,uint32_t uimm5)866 void Riscv64Assembler::Csrrwi(XRegister rd, uint32_t csr, uint32_t uimm5) {
867 AssertExtensionsEnabled(Riscv64Extension::kZicsr);
868 EmitI(ToInt12(csr), uimm5, 0x5, rd, 0x73);
869 }
870
Csrrsi(XRegister rd,uint32_t csr,uint32_t uimm5)871 void Riscv64Assembler::Csrrsi(XRegister rd, uint32_t csr, uint32_t uimm5) {
872 AssertExtensionsEnabled(Riscv64Extension::kZicsr);
873 EmitI(ToInt12(csr), uimm5, 0x6, rd, 0x73);
874 }
875
Csrrci(XRegister rd,uint32_t csr,uint32_t uimm5)876 void Riscv64Assembler::Csrrci(XRegister rd, uint32_t csr, uint32_t uimm5) {
877 AssertExtensionsEnabled(Riscv64Extension::kZicsr);
878 EmitI(ToInt12(csr), uimm5, 0x7, rd, 0x73);
879 }
880
881 ////////////////////////////// RV64 "Zicsr" Instructions END //////////////////////////////
882
883 /////////////////////////////// RV64 "FD" Instructions START ///////////////////////////////
884
885 // FP load/store instructions (RV32F+RV32D): opcode = 0x07, 0x27
886
FLw(FRegister rd,XRegister rs1,int32_t offset)887 void Riscv64Assembler::FLw(FRegister rd, XRegister rs1, int32_t offset) {
888 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kF);
889 EmitI(offset, rs1, 0x2, rd, 0x07);
890 }
891
FLd(FRegister rd,XRegister rs1,int32_t offset)892 void Riscv64Assembler::FLd(FRegister rd, XRegister rs1, int32_t offset) {
893 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kD);
894
895 if (IsExtensionEnabled(Riscv64Extension::kZcd)) {
896 if (rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
897 CFLdsp(rd, offset);
898 return;
899 } else if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
900 CFLd(rd, rs1, offset);
901 return;
902 }
903 }
904
905 EmitI(offset, rs1, 0x3, rd, 0x07);
906 }
907
FSw(FRegister rs2,XRegister rs1,int32_t offset)908 void Riscv64Assembler::FSw(FRegister rs2, XRegister rs1, int32_t offset) {
909 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kF);
910 EmitS(offset, rs2, rs1, 0x2, 0x27);
911 }
912
FSd(FRegister rs2,XRegister rs1,int32_t offset)913 void Riscv64Assembler::FSd(FRegister rs2, XRegister rs1, int32_t offset) {
914 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kD);
915
916 if (IsExtensionEnabled(Riscv64Extension::kZcd)) {
917 if (rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
918 CFSdsp(rs2, offset);
919 return;
920 } else if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
921 CFSd(rs2, rs1, offset);
922 return;
923 }
924 }
925
926 EmitS(offset, rs2, rs1, 0x3, 0x27);
927 }
928
929 // FP FMA instructions (RV32F+RV32D): opcode = 0x43, 0x47, 0x4b, 0x4f
930
FMAddS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)931 void Riscv64Assembler::FMAddS(
932 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
933 AssertExtensionsEnabled(Riscv64Extension::kF);
934 EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x43);
935 }
936
FMAddD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)937 void Riscv64Assembler::FMAddD(
938 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
939 AssertExtensionsEnabled(Riscv64Extension::kD);
940 EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x43);
941 }
942
FMSubS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)943 void Riscv64Assembler::FMSubS(
944 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
945 AssertExtensionsEnabled(Riscv64Extension::kF);
946 EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x47);
947 }
948
FMSubD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)949 void Riscv64Assembler::FMSubD(
950 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
951 AssertExtensionsEnabled(Riscv64Extension::kD);
952 EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x47);
953 }
954
FNMSubS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)955 void Riscv64Assembler::FNMSubS(
956 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
957 AssertExtensionsEnabled(Riscv64Extension::kF);
958 EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4b);
959 }
960
FNMSubD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)961 void Riscv64Assembler::FNMSubD(
962 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
963 AssertExtensionsEnabled(Riscv64Extension::kD);
964 EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4b);
965 }
966
FNMAddS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)967 void Riscv64Assembler::FNMAddS(
968 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
969 AssertExtensionsEnabled(Riscv64Extension::kF);
970 EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4f);
971 }
972
FNMAddD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)973 void Riscv64Assembler::FNMAddD(
974 FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
975 AssertExtensionsEnabled(Riscv64Extension::kD);
976 EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4f);
977 }
978
979 // Simple FP instructions (RV32F+RV32D): opcode = 0x53, funct7 = 0b0XXXX0D
980
FAddS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)981 void Riscv64Assembler::FAddS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
982 AssertExtensionsEnabled(Riscv64Extension::kF);
983 EmitR(0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
984 }
985
FAddD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)986 void Riscv64Assembler::FAddD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
987 AssertExtensionsEnabled(Riscv64Extension::kD);
988 EmitR(0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
989 }
990
FSubS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)991 void Riscv64Assembler::FSubS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
992 AssertExtensionsEnabled(Riscv64Extension::kF);
993 EmitR(0x4, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
994 }
995
FSubD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)996 void Riscv64Assembler::FSubD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
997 AssertExtensionsEnabled(Riscv64Extension::kD);
998 EmitR(0x5, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
999 }
1000
FMulS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1001 void Riscv64Assembler::FMulS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1002 AssertExtensionsEnabled(Riscv64Extension::kF);
1003 EmitR(0x8, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1004 }
1005
FMulD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1006 void Riscv64Assembler::FMulD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1007 AssertExtensionsEnabled(Riscv64Extension::kD);
1008 EmitR(0x9, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1009 }
1010
FDivS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1011 void Riscv64Assembler::FDivS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1012 AssertExtensionsEnabled(Riscv64Extension::kF);
1013 EmitR(0xc, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1014 }
1015
FDivD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1016 void Riscv64Assembler::FDivD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1017 AssertExtensionsEnabled(Riscv64Extension::kD);
1018 EmitR(0xd, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1019 }
1020
FSqrtS(FRegister rd,FRegister rs1,FPRoundingMode frm)1021 void Riscv64Assembler::FSqrtS(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1022 AssertExtensionsEnabled(Riscv64Extension::kF);
1023 EmitR(0x2c, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1024 }
1025
FSqrtD(FRegister rd,FRegister rs1,FPRoundingMode frm)1026 void Riscv64Assembler::FSqrtD(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1027 AssertExtensionsEnabled(Riscv64Extension::kD);
1028 EmitR(0x2d, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1029 }
1030
FSgnjS(FRegister rd,FRegister rs1,FRegister rs2)1031 void Riscv64Assembler::FSgnjS(FRegister rd, FRegister rs1, FRegister rs2) {
1032 AssertExtensionsEnabled(Riscv64Extension::kF);
1033 EmitR(0x10, rs2, rs1, 0x0, rd, 0x53);
1034 }
1035
FSgnjD(FRegister rd,FRegister rs1,FRegister rs2)1036 void Riscv64Assembler::FSgnjD(FRegister rd, FRegister rs1, FRegister rs2) {
1037 AssertExtensionsEnabled(Riscv64Extension::kD);
1038 EmitR(0x11, rs2, rs1, 0x0, rd, 0x53);
1039 }
1040
FSgnjnS(FRegister rd,FRegister rs1,FRegister rs2)1041 void Riscv64Assembler::FSgnjnS(FRegister rd, FRegister rs1, FRegister rs2) {
1042 AssertExtensionsEnabled(Riscv64Extension::kF);
1043 EmitR(0x10, rs2, rs1, 0x1, rd, 0x53);
1044 }
1045
FSgnjnD(FRegister rd,FRegister rs1,FRegister rs2)1046 void Riscv64Assembler::FSgnjnD(FRegister rd, FRegister rs1, FRegister rs2) {
1047 AssertExtensionsEnabled(Riscv64Extension::kD);
1048 EmitR(0x11, rs2, rs1, 0x1, rd, 0x53);
1049 }
1050
FSgnjxS(FRegister rd,FRegister rs1,FRegister rs2)1051 void Riscv64Assembler::FSgnjxS(FRegister rd, FRegister rs1, FRegister rs2) {
1052 AssertExtensionsEnabled(Riscv64Extension::kF);
1053 EmitR(0x10, rs2, rs1, 0x2, rd, 0x53);
1054 }
1055
FSgnjxD(FRegister rd,FRegister rs1,FRegister rs2)1056 void Riscv64Assembler::FSgnjxD(FRegister rd, FRegister rs1, FRegister rs2) {
1057 AssertExtensionsEnabled(Riscv64Extension::kD);
1058 EmitR(0x11, rs2, rs1, 0x2, rd, 0x53);
1059 }
1060
FMinS(FRegister rd,FRegister rs1,FRegister rs2)1061 void Riscv64Assembler::FMinS(FRegister rd, FRegister rs1, FRegister rs2) {
1062 AssertExtensionsEnabled(Riscv64Extension::kF);
1063 EmitR(0x14, rs2, rs1, 0x0, rd, 0x53);
1064 }
1065
FMinD(FRegister rd,FRegister rs1,FRegister rs2)1066 void Riscv64Assembler::FMinD(FRegister rd, FRegister rs1, FRegister rs2) {
1067 AssertExtensionsEnabled(Riscv64Extension::kD);
1068 EmitR(0x15, rs2, rs1, 0x0, rd, 0x53);
1069 }
1070
FMaxS(FRegister rd,FRegister rs1,FRegister rs2)1071 void Riscv64Assembler::FMaxS(FRegister rd, FRegister rs1, FRegister rs2) {
1072 AssertExtensionsEnabled(Riscv64Extension::kF);
1073 EmitR(0x14, rs2, rs1, 0x1, rd, 0x53);
1074 }
1075
FMaxD(FRegister rd,FRegister rs1,FRegister rs2)1076 void Riscv64Assembler::FMaxD(FRegister rd, FRegister rs1, FRegister rs2) {
1077 EmitR(0x15, rs2, rs1, 0x1, rd, 0x53);
1078 AssertExtensionsEnabled(Riscv64Extension::kD);
1079 }
1080
FCvtSD(FRegister rd,FRegister rs1,FPRoundingMode frm)1081 void Riscv64Assembler::FCvtSD(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1082 AssertExtensionsEnabled(Riscv64Extension::kF, Riscv64Extension::kD);
1083 EmitR(0x20, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1084 }
1085
FCvtDS(FRegister rd,FRegister rs1,FPRoundingMode frm)1086 void Riscv64Assembler::FCvtDS(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1087 AssertExtensionsEnabled(Riscv64Extension::kF, Riscv64Extension::kD);
1088 // Note: The `frm` is useless, the result can represent every value of the source exactly.
1089 EmitR(0x21, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1090 }
1091
1092 // FP compare instructions (RV32F+RV32D): opcode = 0x53, funct7 = 0b101000D
1093
FEqS(XRegister rd,FRegister rs1,FRegister rs2)1094 void Riscv64Assembler::FEqS(XRegister rd, FRegister rs1, FRegister rs2) {
1095 AssertExtensionsEnabled(Riscv64Extension::kF);
1096 EmitR(0x50, rs2, rs1, 0x2, rd, 0x53);
1097 }
1098
FEqD(XRegister rd,FRegister rs1,FRegister rs2)1099 void Riscv64Assembler::FEqD(XRegister rd, FRegister rs1, FRegister rs2) {
1100 AssertExtensionsEnabled(Riscv64Extension::kD);
1101 EmitR(0x51, rs2, rs1, 0x2, rd, 0x53);
1102 }
1103
FLtS(XRegister rd,FRegister rs1,FRegister rs2)1104 void Riscv64Assembler::FLtS(XRegister rd, FRegister rs1, FRegister rs2) {
1105 AssertExtensionsEnabled(Riscv64Extension::kF);
1106 EmitR(0x50, rs2, rs1, 0x1, rd, 0x53);
1107 }
1108
FLtD(XRegister rd,FRegister rs1,FRegister rs2)1109 void Riscv64Assembler::FLtD(XRegister rd, FRegister rs1, FRegister rs2) {
1110 AssertExtensionsEnabled(Riscv64Extension::kD);
1111 EmitR(0x51, rs2, rs1, 0x1, rd, 0x53);
1112 }
1113
FLeS(XRegister rd,FRegister rs1,FRegister rs2)1114 void Riscv64Assembler::FLeS(XRegister rd, FRegister rs1, FRegister rs2) {
1115 AssertExtensionsEnabled(Riscv64Extension::kF);
1116 EmitR(0x50, rs2, rs1, 0x0, rd, 0x53);
1117 }
1118
FLeD(XRegister rd,FRegister rs1,FRegister rs2)1119 void Riscv64Assembler::FLeD(XRegister rd, FRegister rs1, FRegister rs2) {
1120 AssertExtensionsEnabled(Riscv64Extension::kD);
1121 EmitR(0x51, rs2, rs1, 0x0, rd, 0x53);
1122 }
1123
1124 // FP conversion instructions (RV32F+RV32D+RV64F+RV64D): opcode = 0x53, funct7 = 0b110X00D
1125
FCvtWS(XRegister rd,FRegister rs1,FPRoundingMode frm)1126 void Riscv64Assembler::FCvtWS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1127 AssertExtensionsEnabled(Riscv64Extension::kF);
1128 EmitR(0x60, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1129 }
1130
FCvtWD(XRegister rd,FRegister rs1,FPRoundingMode frm)1131 void Riscv64Assembler::FCvtWD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1132 AssertExtensionsEnabled(Riscv64Extension::kD);
1133 EmitR(0x61, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1134 }
1135
FCvtWuS(XRegister rd,FRegister rs1,FPRoundingMode frm)1136 void Riscv64Assembler::FCvtWuS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1137 AssertExtensionsEnabled(Riscv64Extension::kF);
1138 EmitR(0x60, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1139 }
1140
FCvtWuD(XRegister rd,FRegister rs1,FPRoundingMode frm)1141 void Riscv64Assembler::FCvtWuD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1142 AssertExtensionsEnabled(Riscv64Extension::kD);
1143 EmitR(0x61, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1144 }
1145
FCvtLS(XRegister rd,FRegister rs1,FPRoundingMode frm)1146 void Riscv64Assembler::FCvtLS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1147 AssertExtensionsEnabled(Riscv64Extension::kF);
1148 EmitR(0x60, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1149 }
1150
FCvtLD(XRegister rd,FRegister rs1,FPRoundingMode frm)1151 void Riscv64Assembler::FCvtLD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1152 AssertExtensionsEnabled(Riscv64Extension::kD);
1153 EmitR(0x61, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1154 }
1155
FCvtLuS(XRegister rd,FRegister rs1,FPRoundingMode frm)1156 void Riscv64Assembler::FCvtLuS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1157 AssertExtensionsEnabled(Riscv64Extension::kF);
1158 EmitR(0x60, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1159 }
1160
FCvtLuD(XRegister rd,FRegister rs1,FPRoundingMode frm)1161 void Riscv64Assembler::FCvtLuD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1162 AssertExtensionsEnabled(Riscv64Extension::kD);
1163 EmitR(0x61, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1164 }
1165
FCvtSW(FRegister rd,XRegister rs1,FPRoundingMode frm)1166 void Riscv64Assembler::FCvtSW(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1167 AssertExtensionsEnabled(Riscv64Extension::kF);
1168 EmitR(0x68, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1169 }
1170
FCvtDW(FRegister rd,XRegister rs1,FPRoundingMode frm)1171 void Riscv64Assembler::FCvtDW(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1172 AssertExtensionsEnabled(Riscv64Extension::kD);
1173 // Note: The `frm` is useless, the result can represent every value of the source exactly.
1174 EmitR(0x69, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1175 }
1176
FCvtSWu(FRegister rd,XRegister rs1,FPRoundingMode frm)1177 void Riscv64Assembler::FCvtSWu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1178 AssertExtensionsEnabled(Riscv64Extension::kF);
1179 EmitR(0x68, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1180 }
1181
FCvtDWu(FRegister rd,XRegister rs1,FPRoundingMode frm)1182 void Riscv64Assembler::FCvtDWu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1183 AssertExtensionsEnabled(Riscv64Extension::kD);
1184 // Note: The `frm` is useless, the result can represent every value of the source exactly.
1185 EmitR(0x69, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1186 }
1187
FCvtSL(FRegister rd,XRegister rs1,FPRoundingMode frm)1188 void Riscv64Assembler::FCvtSL(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1189 AssertExtensionsEnabled(Riscv64Extension::kF);
1190 EmitR(0x68, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1191 }
1192
FCvtDL(FRegister rd,XRegister rs1,FPRoundingMode frm)1193 void Riscv64Assembler::FCvtDL(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1194 AssertExtensionsEnabled(Riscv64Extension::kD);
1195 EmitR(0x69, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1196 }
1197
FCvtSLu(FRegister rd,XRegister rs1,FPRoundingMode frm)1198 void Riscv64Assembler::FCvtSLu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1199 AssertExtensionsEnabled(Riscv64Extension::kF);
1200 EmitR(0x68, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1201 }
1202
FCvtDLu(FRegister rd,XRegister rs1,FPRoundingMode frm)1203 void Riscv64Assembler::FCvtDLu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1204 AssertExtensionsEnabled(Riscv64Extension::kD);
1205 EmitR(0x69, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1206 }
1207
1208 // FP move instructions (RV32F+RV32D): opcode = 0x53, funct3 = 0x0, funct7 = 0b111X00D
1209
FMvXW(XRegister rd,FRegister rs1)1210 void Riscv64Assembler::FMvXW(XRegister rd, FRegister rs1) {
1211 AssertExtensionsEnabled(Riscv64Extension::kF);
1212 EmitR(0x70, 0x0, rs1, 0x0, rd, 0x53);
1213 }
1214
FMvXD(XRegister rd,FRegister rs1)1215 void Riscv64Assembler::FMvXD(XRegister rd, FRegister rs1) {
1216 AssertExtensionsEnabled(Riscv64Extension::kD);
1217 EmitR(0x71, 0x0, rs1, 0x0, rd, 0x53);
1218 }
1219
FMvWX(FRegister rd,XRegister rs1)1220 void Riscv64Assembler::FMvWX(FRegister rd, XRegister rs1) {
1221 AssertExtensionsEnabled(Riscv64Extension::kF);
1222 EmitR(0x78, 0x0, rs1, 0x0, rd, 0x53);
1223 }
1224
FMvDX(FRegister rd,XRegister rs1)1225 void Riscv64Assembler::FMvDX(FRegister rd, XRegister rs1) {
1226 AssertExtensionsEnabled(Riscv64Extension::kD);
1227 EmitR(0x79, 0x0, rs1, 0x0, rd, 0x53);
1228 }
1229
1230 // FP classify instructions (RV32F+RV32D): opcode = 0x53, funct3 = 0x1, funct7 = 0b111X00D
1231
FClassS(XRegister rd,FRegister rs1)1232 void Riscv64Assembler::FClassS(XRegister rd, FRegister rs1) {
1233 AssertExtensionsEnabled(Riscv64Extension::kF);
1234 EmitR(0x70, 0x0, rs1, 0x1, rd, 0x53);
1235 }
1236
FClassD(XRegister rd,FRegister rs1)1237 void Riscv64Assembler::FClassD(XRegister rd, FRegister rs1) {
1238 AssertExtensionsEnabled(Riscv64Extension::kD);
1239 EmitR(0x71, 0x0, rs1, 0x1, rd, 0x53);
1240 }
1241
1242 /////////////////////////////// RV64 "FD" Instructions END ///////////////////////////////
1243
1244 /////////////////////////////// RV64 "C" Instructions START /////////////////////////////
1245
CLwsp(XRegister rd,int32_t offset)1246 void Riscv64Assembler::CLwsp(XRegister rd, int32_t offset) {
1247 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1248 DCHECK_NE(rd, Zero);
1249 EmitCI(0b010u, rd, ExtractOffset52_76(offset), 0b10u);
1250 }
1251
CLdsp(XRegister rd,int32_t offset)1252 void Riscv64Assembler::CLdsp(XRegister rd, int32_t offset) {
1253 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1254 DCHECK_NE(rd, Zero);
1255 EmitCI(0b011u, rd, ExtractOffset53_86(offset), 0b10u);
1256 }
1257
CFLdsp(FRegister rd,int32_t offset)1258 void Riscv64Assembler::CFLdsp(FRegister rd, int32_t offset) {
1259 AssertExtensionsEnabled(
1260 Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1261 EmitCI(0b001u, rd, ExtractOffset53_86(offset), 0b10u);
1262 }
1263
CSwsp(XRegister rs2,int32_t offset)1264 void Riscv64Assembler::CSwsp(XRegister rs2, int32_t offset) {
1265 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1266 EmitCSS(0b110u, ExtractOffset52_76(offset), rs2, 0b10u);
1267 }
1268
CSdsp(XRegister rs2,int32_t offset)1269 void Riscv64Assembler::CSdsp(XRegister rs2, int32_t offset) {
1270 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1271 EmitCSS(0b111u, ExtractOffset53_86(offset), rs2, 0b10u);
1272 }
1273
CFSdsp(FRegister rs2,int32_t offset)1274 void Riscv64Assembler::CFSdsp(FRegister rs2, int32_t offset) {
1275 AssertExtensionsEnabled(
1276 Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1277 EmitCSS(0b101u, ExtractOffset53_86(offset), rs2, 0b10u);
1278 }
1279
CLw(XRegister rd_s,XRegister rs1_s,int32_t offset)1280 void Riscv64Assembler::CLw(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1281 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1282 EmitCM(0b010u, ExtractOffset52_6(offset), rs1_s, rd_s, 0b00u);
1283 }
1284
CLd(XRegister rd_s,XRegister rs1_s,int32_t offset)1285 void Riscv64Assembler::CLd(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1286 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1287 EmitCM(0b011u, ExtractOffset53_76(offset), rs1_s, rd_s, 0b00u);
1288 }
1289
CFLd(FRegister rd_s,XRegister rs1_s,int32_t offset)1290 void Riscv64Assembler::CFLd(FRegister rd_s, XRegister rs1_s, int32_t offset) {
1291 AssertExtensionsEnabled(
1292 Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1293 EmitCM(0b001u, ExtractOffset53_76(offset), rs1_s, rd_s, 0b00u);
1294 }
1295
CSw(XRegister rs2_s,XRegister rs1_s,int32_t offset)1296 void Riscv64Assembler::CSw(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1297 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1298 EmitCM(0b110u, ExtractOffset52_6(offset), rs1_s, rs2_s, 0b00u);
1299 }
1300
CSd(XRegister rs2_s,XRegister rs1_s,int32_t offset)1301 void Riscv64Assembler::CSd(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1302 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1303 EmitCM(0b111u, ExtractOffset53_76(offset), rs1_s, rs2_s, 0b00u);
1304 }
1305
CFSd(FRegister rs2_s,XRegister rs1_s,int32_t offset)1306 void Riscv64Assembler::CFSd(FRegister rs2_s, XRegister rs1_s, int32_t offset) {
1307 AssertExtensionsEnabled(
1308 Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1309 EmitCM(0b101u, ExtractOffset53_76(offset), rs1_s, rs2_s, 0b00u);
1310 }
1311
CLi(XRegister rd,int32_t imm)1312 void Riscv64Assembler::CLi(XRegister rd, int32_t imm) {
1313 AssertExtensionsEnabled(Riscv64Extension::kZca);
1314 DCHECK_NE(rd, Zero);
1315 DCHECK(IsInt<6>(imm));
1316 EmitCI(0b010u, rd, EncodeInt6(imm), 0b01u);
1317 }
1318
CLui(XRegister rd,uint32_t nzimm6)1319 void Riscv64Assembler::CLui(XRegister rd, uint32_t nzimm6) {
1320 AssertExtensionsEnabled(Riscv64Extension::kZca);
1321 DCHECK_NE(rd, Zero);
1322 DCHECK_NE(rd, SP);
1323 DCHECK(IsImmCLuiEncodable(nzimm6));
1324 EmitCI(0b011u, rd, nzimm6 & MaskLeastSignificant<uint32_t>(6), 0b01u);
1325 }
1326
CAddi(XRegister rd,int32_t nzimm)1327 void Riscv64Assembler::CAddi(XRegister rd, int32_t nzimm) {
1328 AssertExtensionsEnabled(Riscv64Extension::kZca);
1329 DCHECK_NE(rd, Zero);
1330 DCHECK_NE(nzimm, 0);
1331 EmitCI(0b000u, rd, EncodeInt6(nzimm), 0b01u);
1332 }
1333
CAddiw(XRegister rd,int32_t imm)1334 void Riscv64Assembler::CAddiw(XRegister rd, int32_t imm) {
1335 AssertExtensionsEnabled(Riscv64Extension::kZca);
1336 DCHECK_NE(rd, Zero);
1337 EmitCI(0b001u, rd, EncodeInt6(imm), 0b01u);
1338 }
1339
CAddi16Sp(int32_t nzimm)1340 void Riscv64Assembler::CAddi16Sp(int32_t nzimm) {
1341 AssertExtensionsEnabled(Riscv64Extension::kZca);
1342 DCHECK_NE(nzimm, 0);
1343 DCHECK(IsAligned<16>(nzimm));
1344 DCHECK(IsInt<10>(nzimm));
1345
1346 uint32_t unzimm = static_cast<uint32_t>(nzimm);
1347
1348 // nzimm[9]
1349 uint32_t imms1 = BitFieldExtract(unzimm, 9, 1);
1350 // nzimm[4|6|8:7|5]
1351 uint32_t imms0 = (BitFieldExtract(unzimm, 4, 1) << 4) |
1352 (BitFieldExtract(unzimm, 6, 1) << 3) |
1353 (BitFieldExtract(unzimm, 7, 2) << 1) |
1354 BitFieldExtract(unzimm, 5, 1);
1355
1356 EmitCI(0b011u, SP, BitFieldInsert(imms0, imms1, 5, 1), 0b01u);
1357 }
1358
CAddi4Spn(XRegister rd_s,uint32_t nzuimm)1359 void Riscv64Assembler::CAddi4Spn(XRegister rd_s, uint32_t nzuimm) {
1360 AssertExtensionsEnabled(Riscv64Extension::kZca);
1361 DCHECK_NE(nzuimm, 0u);
1362 DCHECK(IsAligned<4>(nzuimm));
1363 DCHECK(IsUint<10>(nzuimm));
1364
1365 // nzuimm[5:4|9:6|2|3]
1366 uint32_t uimm = (BitFieldExtract(nzuimm, 4, 2) << 6) |
1367 (BitFieldExtract(nzuimm, 6, 4) << 2) |
1368 (BitFieldExtract(nzuimm, 2, 1) << 1) |
1369 BitFieldExtract(nzuimm, 3, 1);
1370
1371 EmitCIW(0b000u, uimm, rd_s, 0b00u);
1372 }
1373
CSlli(XRegister rd,int32_t shamt)1374 void Riscv64Assembler::CSlli(XRegister rd, int32_t shamt) {
1375 AssertExtensionsEnabled(Riscv64Extension::kZca);
1376 DCHECK_NE(shamt, 0);
1377 DCHECK_NE(rd, Zero);
1378 EmitCI(0b000u, rd, shamt, 0b10u);
1379 }
1380
CSrli(XRegister rd_s,int32_t shamt)1381 void Riscv64Assembler::CSrli(XRegister rd_s, int32_t shamt) {
1382 AssertExtensionsEnabled(Riscv64Extension::kZca);
1383 DCHECK_NE(shamt, 0);
1384 DCHECK(IsUint<6>(shamt));
1385 EmitCBArithmetic(0b100u, 0b00u, shamt, rd_s, 0b01u);
1386 }
1387
CSrai(XRegister rd_s,int32_t shamt)1388 void Riscv64Assembler::CSrai(XRegister rd_s, int32_t shamt) {
1389 AssertExtensionsEnabled(Riscv64Extension::kZca);
1390 DCHECK_NE(shamt, 0);
1391 DCHECK(IsUint<6>(shamt));
1392 EmitCBArithmetic(0b100u, 0b01u, shamt, rd_s, 0b01u);
1393 }
1394
CAndi(XRegister rd_s,int32_t imm)1395 void Riscv64Assembler::CAndi(XRegister rd_s, int32_t imm) {
1396 AssertExtensionsEnabled(Riscv64Extension::kZca);
1397 DCHECK(IsInt<6>(imm));
1398 EmitCBArithmetic(0b100u, 0b10u, imm, rd_s, 0b01u);
1399 }
1400
CMv(XRegister rd,XRegister rs2)1401 void Riscv64Assembler::CMv(XRegister rd, XRegister rs2) {
1402 AssertExtensionsEnabled(Riscv64Extension::kZca);
1403 DCHECK_NE(rd, Zero);
1404 DCHECK_NE(rs2, Zero);
1405 EmitCR(0b1000u, rd, rs2, 0b10u);
1406 }
1407
CAdd(XRegister rd,XRegister rs2)1408 void Riscv64Assembler::CAdd(XRegister rd, XRegister rs2) {
1409 AssertExtensionsEnabled(Riscv64Extension::kZca);
1410 DCHECK_NE(rd, Zero);
1411 DCHECK_NE(rs2, Zero);
1412 EmitCR(0b1001u, rd, rs2, 0b10u);
1413 }
1414
CAnd(XRegister rd_s,XRegister rs2_s)1415 void Riscv64Assembler::CAnd(XRegister rd_s, XRegister rs2_s) {
1416 AssertExtensionsEnabled(Riscv64Extension::kZca);
1417 EmitCAReg(0b100011u, rd_s, 0b11u, rs2_s, 0b01u);
1418 }
1419
COr(XRegister rd_s,XRegister rs2_s)1420 void Riscv64Assembler::COr(XRegister rd_s, XRegister rs2_s) {
1421 AssertExtensionsEnabled(Riscv64Extension::kZca);
1422 EmitCAReg(0b100011u, rd_s, 0b10u, rs2_s, 0b01u);
1423 }
1424
CXor(XRegister rd_s,XRegister rs2_s)1425 void Riscv64Assembler::CXor(XRegister rd_s, XRegister rs2_s) {
1426 AssertExtensionsEnabled(Riscv64Extension::kZca);
1427 EmitCAReg(0b100011u, rd_s, 0b01u, rs2_s, 0b01u);
1428 }
1429
CSub(XRegister rd_s,XRegister rs2_s)1430 void Riscv64Assembler::CSub(XRegister rd_s, XRegister rs2_s) {
1431 AssertExtensionsEnabled(Riscv64Extension::kZca);
1432 EmitCAReg(0b100011u, rd_s, 0b00u, rs2_s, 0b01u);
1433 }
1434
CAddw(XRegister rd_s,XRegister rs2_s)1435 void Riscv64Assembler::CAddw(XRegister rd_s, XRegister rs2_s) {
1436 AssertExtensionsEnabled(Riscv64Extension::kZca);
1437 EmitCAReg(0b100111u, rd_s, 0b01u, rs2_s, 0b01u);
1438 }
1439
CSubw(XRegister rd_s,XRegister rs2_s)1440 void Riscv64Assembler::CSubw(XRegister rd_s, XRegister rs2_s) {
1441 AssertExtensionsEnabled(Riscv64Extension::kZca);
1442 EmitCAReg(0b100111u, rd_s, 0b00u, rs2_s, 0b01u);
1443 }
1444
1445 // "Zcb" Standard Extension, part of "C", opcode = 0b00, 0b01, funct3 = 0b100.
1446
CLbu(XRegister rd_s,XRegister rs1_s,int32_t offset)1447 void Riscv64Assembler::CLbu(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1448 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1449 EmitCAReg(0b100000u, rs1_s, EncodeOffset0_1(offset), rd_s, 0b00u);
1450 }
1451
CLhu(XRegister rd_s,XRegister rs1_s,int32_t offset)1452 void Riscv64Assembler::CLhu(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1453 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1454 DCHECK(IsUint<2>(offset));
1455 DCHECK_ALIGNED(offset, 2);
1456 EmitCAReg(0b100001u, rs1_s, BitFieldExtract<uint32_t>(offset, 1, 1), rd_s, 0b00u);
1457 }
1458
CLh(XRegister rd_s,XRegister rs1_s,int32_t offset)1459 void Riscv64Assembler::CLh(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1460 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1461 DCHECK(IsUint<2>(offset));
1462 DCHECK_ALIGNED(offset, 2);
1463 EmitCAReg(0b100001u, rs1_s, 0b10 | BitFieldExtract<uint32_t>(offset, 1, 1), rd_s, 0b00u);
1464 }
1465
CSb(XRegister rs2_s,XRegister rs1_s,int32_t offset)1466 void Riscv64Assembler::CSb(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1467 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1468 EmitCAReg(0b100010u, rs1_s, EncodeOffset0_1(offset), rs2_s, 0b00u);
1469 }
1470
CSh(XRegister rs2_s,XRegister rs1_s,int32_t offset)1471 void Riscv64Assembler::CSh(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1472 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1473 DCHECK(IsUint<2>(offset));
1474 DCHECK_ALIGNED(offset, 2);
1475 EmitCAReg(0b100011u, rs1_s, BitFieldExtract<uint32_t>(offset, 1, 1), rs2_s, 0b00u);
1476 }
1477
CZextB(XRegister rd_rs1_s)1478 void Riscv64Assembler::CZextB(XRegister rd_rs1_s) {
1479 AssertExtensionsEnabled(Riscv64Extension::kZcb);
1480 EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b000u, 0b01u);
1481 }
1482
CSextB(XRegister rd_rs1_s)1483 void Riscv64Assembler::CSextB(XRegister rd_rs1_s) {
1484 AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
1485 EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b001u, 0b01u);
1486 }
1487
CZextH(XRegister rd_rs1_s)1488 void Riscv64Assembler::CZextH(XRegister rd_rs1_s) {
1489 AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
1490 EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b010u, 0b01u);
1491 }
1492
CSextH(XRegister rd_rs1_s)1493 void Riscv64Assembler::CSextH(XRegister rd_rs1_s) {
1494 AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
1495 EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b011u, 0b01u);
1496 }
1497
CZextW(XRegister rd_rs1_s)1498 void Riscv64Assembler::CZextW(XRegister rd_rs1_s) {
1499 AssertExtensionsEnabled(Riscv64Extension::kZba, Riscv64Extension::kZcb);
1500 EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b100u, 0b01u);
1501 }
1502
CNot(XRegister rd_rs1_s)1503 void Riscv64Assembler::CNot(XRegister rd_rs1_s) {
1504 AssertExtensionsEnabled(Riscv64Extension::kZcb);
1505 EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b101u, 0b01u);
1506 }
1507
CMul(XRegister rd_s,XRegister rs2_s)1508 void Riscv64Assembler::CMul(XRegister rd_s, XRegister rs2_s) {
1509 AssertExtensionsEnabled(Riscv64Extension::kM, Riscv64Extension::kZcb);
1510 EmitCAReg(0b100111u, rd_s, 0b10u, rs2_s, 0b01u);
1511 }
1512
CJ(int32_t offset)1513 void Riscv64Assembler::CJ(int32_t offset) {
1514 AssertExtensionsEnabled(Riscv64Extension::kZca);
1515 EmitCJ(0b101u, offset, 0b01u);
1516 }
1517
CJr(XRegister rs1)1518 void Riscv64Assembler::CJr(XRegister rs1) {
1519 AssertExtensionsEnabled(Riscv64Extension::kZca);
1520 DCHECK_NE(rs1, Zero);
1521 EmitCR(0b1000u, rs1, Zero, 0b10u);
1522 }
1523
CJalr(XRegister rs1)1524 void Riscv64Assembler::CJalr(XRegister rs1) {
1525 AssertExtensionsEnabled(Riscv64Extension::kZca);
1526 DCHECK_NE(rs1, Zero);
1527 EmitCR(0b1001u, rs1, Zero, 0b10u);
1528 }
1529
CBeqz(XRegister rs1_s,int32_t offset)1530 void Riscv64Assembler::CBeqz(XRegister rs1_s, int32_t offset) {
1531 AssertExtensionsEnabled(Riscv64Extension::kZca);
1532 EmitCBBranch(0b110u, offset, rs1_s, 0b01u);
1533 }
1534
CBnez(XRegister rs1_s,int32_t offset)1535 void Riscv64Assembler::CBnez(XRegister rs1_s, int32_t offset) {
1536 AssertExtensionsEnabled(Riscv64Extension::kZca);
1537 EmitCBBranch(0b111u, offset, rs1_s, 0b01u);
1538 }
1539
CEbreak()1540 void Riscv64Assembler::CEbreak() {
1541 AssertExtensionsEnabled(Riscv64Extension::kZca);
1542 EmitCR(0b1001u, Zero, Zero, 0b10u);
1543 }
1544
CNop()1545 void Riscv64Assembler::CNop() {
1546 AssertExtensionsEnabled(Riscv64Extension::kZca);
1547 EmitCI(0b000u, Zero, 0u, 0b01u);
1548 }
1549
CUnimp()1550 void Riscv64Assembler::CUnimp() {
1551 AssertExtensionsEnabled(Riscv64Extension::kZca);
1552 Emit16(0x0u);
1553 }
1554
1555 /////////////////////////////// RV64 "C" Instructions END ///////////////////////////////
1556
1557 ////////////////////////////// RV64 "Zba" Instructions START /////////////////////////////
1558
AddUw(XRegister rd,XRegister rs1,XRegister rs2)1559 void Riscv64Assembler::AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1560 AssertExtensionsEnabled(Riscv64Extension::kZba);
1561 EmitR(0x4, rs2, rs1, 0x0, rd, 0x3b);
1562 }
1563
Sh1Add(XRegister rd,XRegister rs1,XRegister rs2)1564 void Riscv64Assembler::Sh1Add(XRegister rd, XRegister rs1, XRegister rs2) {
1565 AssertExtensionsEnabled(Riscv64Extension::kZba);
1566 EmitR(0x10, rs2, rs1, 0x2, rd, 0x33);
1567 }
1568
Sh1AddUw(XRegister rd,XRegister rs1,XRegister rs2)1569 void Riscv64Assembler::Sh1AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1570 AssertExtensionsEnabled(Riscv64Extension::kZba);
1571 EmitR(0x10, rs2, rs1, 0x2, rd, 0x3b);
1572 }
1573
Sh2Add(XRegister rd,XRegister rs1,XRegister rs2)1574 void Riscv64Assembler::Sh2Add(XRegister rd, XRegister rs1, XRegister rs2) {
1575 AssertExtensionsEnabled(Riscv64Extension::kZba);
1576 EmitR(0x10, rs2, rs1, 0x4, rd, 0x33);
1577 }
1578
Sh2AddUw(XRegister rd,XRegister rs1,XRegister rs2)1579 void Riscv64Assembler::Sh2AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1580 AssertExtensionsEnabled(Riscv64Extension::kZba);
1581 EmitR(0x10, rs2, rs1, 0x4, rd, 0x3b);
1582 }
1583
Sh3Add(XRegister rd,XRegister rs1,XRegister rs2)1584 void Riscv64Assembler::Sh3Add(XRegister rd, XRegister rs1, XRegister rs2) {
1585 AssertExtensionsEnabled(Riscv64Extension::kZba);
1586 EmitR(0x10, rs2, rs1, 0x6, rd, 0x33);
1587 }
1588
Sh3AddUw(XRegister rd,XRegister rs1,XRegister rs2)1589 void Riscv64Assembler::Sh3AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1590 AssertExtensionsEnabled(Riscv64Extension::kZba);
1591 EmitR(0x10, rs2, rs1, 0x6, rd, 0x3b);
1592 }
1593
SlliUw(XRegister rd,XRegister rs1,int32_t shamt)1594 void Riscv64Assembler::SlliUw(XRegister rd, XRegister rs1, int32_t shamt) {
1595 AssertExtensionsEnabled(Riscv64Extension::kZba);
1596 EmitI6(0x2, shamt, rs1, 0x1, rd, 0x1b);
1597 }
1598
1599 /////////////////////////////// RV64 "Zba" Instructions END //////////////////////////////
1600
1601 ////////////////////////////// RV64 "Zbb" Instructions START /////////////////////////////
1602
Andn(XRegister rd,XRegister rs1,XRegister rs2)1603 void Riscv64Assembler::Andn(XRegister rd, XRegister rs1, XRegister rs2) {
1604 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1605 EmitR(0x20, rs2, rs1, 0x7, rd, 0x33);
1606 }
1607
Orn(XRegister rd,XRegister rs1,XRegister rs2)1608 void Riscv64Assembler::Orn(XRegister rd, XRegister rs1, XRegister rs2) {
1609 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1610 EmitR(0x20, rs2, rs1, 0x6, rd, 0x33);
1611 }
1612
Xnor(XRegister rd,XRegister rs1,XRegister rs2)1613 void Riscv64Assembler::Xnor(XRegister rd, XRegister rs1, XRegister rs2) {
1614 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1615 EmitR(0x20, rs2, rs1, 0x4, rd, 0x33);
1616 }
1617
Clz(XRegister rd,XRegister rs1)1618 void Riscv64Assembler::Clz(XRegister rd, XRegister rs1) {
1619 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1620 EmitR(0x30, 0x0, rs1, 0x1, rd, 0x13);
1621 }
1622
Clzw(XRegister rd,XRegister rs1)1623 void Riscv64Assembler::Clzw(XRegister rd, XRegister rs1) {
1624 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1625 EmitR(0x30, 0x0, rs1, 0x1, rd, 0x1b);
1626 }
1627
Ctz(XRegister rd,XRegister rs1)1628 void Riscv64Assembler::Ctz(XRegister rd, XRegister rs1) {
1629 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1630 EmitR(0x30, 0x1, rs1, 0x1, rd, 0x13);
1631 }
1632
Ctzw(XRegister rd,XRegister rs1)1633 void Riscv64Assembler::Ctzw(XRegister rd, XRegister rs1) {
1634 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1635 EmitR(0x30, 0x1, rs1, 0x1, rd, 0x1b);
1636 }
1637
Cpop(XRegister rd,XRegister rs1)1638 void Riscv64Assembler::Cpop(XRegister rd, XRegister rs1) {
1639 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1640 EmitR(0x30, 0x2, rs1, 0x1, rd, 0x13);
1641 }
1642
Cpopw(XRegister rd,XRegister rs1)1643 void Riscv64Assembler::Cpopw(XRegister rd, XRegister rs1) {
1644 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1645 EmitR(0x30, 0x2, rs1, 0x1, rd, 0x1b);
1646 }
1647
Min(XRegister rd,XRegister rs1,XRegister rs2)1648 void Riscv64Assembler::Min(XRegister rd, XRegister rs1, XRegister rs2) {
1649 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1650 EmitR(0x5, rs2, rs1, 0x4, rd, 0x33);
1651 }
1652
Minu(XRegister rd,XRegister rs1,XRegister rs2)1653 void Riscv64Assembler::Minu(XRegister rd, XRegister rs1, XRegister rs2) {
1654 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1655 EmitR(0x5, rs2, rs1, 0x5, rd, 0x33);
1656 }
1657
Max(XRegister rd,XRegister rs1,XRegister rs2)1658 void Riscv64Assembler::Max(XRegister rd, XRegister rs1, XRegister rs2) {
1659 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1660 EmitR(0x5, rs2, rs1, 0x6, rd, 0x33);
1661 }
1662
Maxu(XRegister rd,XRegister rs1,XRegister rs2)1663 void Riscv64Assembler::Maxu(XRegister rd, XRegister rs1, XRegister rs2) {
1664 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1665 EmitR(0x5, rs2, rs1, 0x7, rd, 0x33);
1666 }
1667
Rol(XRegister rd,XRegister rs1,XRegister rs2)1668 void Riscv64Assembler::Rol(XRegister rd, XRegister rs1, XRegister rs2) {
1669 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1670 EmitR(0x30, rs2, rs1, 0x1, rd, 0x33);
1671 }
1672
Rolw(XRegister rd,XRegister rs1,XRegister rs2)1673 void Riscv64Assembler::Rolw(XRegister rd, XRegister rs1, XRegister rs2) {
1674 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1675 EmitR(0x30, rs2, rs1, 0x1, rd, 0x3b);
1676 }
1677
Ror(XRegister rd,XRegister rs1,XRegister rs2)1678 void Riscv64Assembler::Ror(XRegister rd, XRegister rs1, XRegister rs2) {
1679 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1680 EmitR(0x30, rs2, rs1, 0x5, rd, 0x33);
1681 }
1682
Rorw(XRegister rd,XRegister rs1,XRegister rs2)1683 void Riscv64Assembler::Rorw(XRegister rd, XRegister rs1, XRegister rs2) {
1684 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1685 EmitR(0x30, rs2, rs1, 0x5, rd, 0x3b);
1686 }
1687
Rori(XRegister rd,XRegister rs1,int32_t shamt)1688 void Riscv64Assembler::Rori(XRegister rd, XRegister rs1, int32_t shamt) {
1689 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1690 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
1691 EmitI6(0x18, shamt, rs1, 0x5, rd, 0x13);
1692 }
1693
Roriw(XRegister rd,XRegister rs1,int32_t shamt)1694 void Riscv64Assembler::Roriw(XRegister rd, XRegister rs1, int32_t shamt) {
1695 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1696 CHECK_LT(static_cast<uint32_t>(shamt), 32u);
1697 EmitI6(0x18, shamt, rs1, 0x5, rd, 0x1b);
1698 }
1699
OrcB(XRegister rd,XRegister rs1)1700 void Riscv64Assembler::OrcB(XRegister rd, XRegister rs1) {
1701 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1702 EmitR(0x14, 0x7, rs1, 0x5, rd, 0x13);
1703 }
1704
Rev8(XRegister rd,XRegister rs1)1705 void Riscv64Assembler::Rev8(XRegister rd, XRegister rs1) {
1706 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1707 EmitR(0x35, 0x18, rs1, 0x5, rd, 0x13);
1708 }
1709
ZbbSextB(XRegister rd,XRegister rs1)1710 void Riscv64Assembler::ZbbSextB(XRegister rd, XRegister rs1) {
1711 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1712 EmitR(0x30, 0x4, rs1, 0x1, rd, 0x13);
1713 }
1714
ZbbSextH(XRegister rd,XRegister rs1)1715 void Riscv64Assembler::ZbbSextH(XRegister rd, XRegister rs1) {
1716 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1717 EmitR(0x30, 0x5, rs1, 0x1, rd, 0x13);
1718 }
1719
ZbbZextH(XRegister rd,XRegister rs1)1720 void Riscv64Assembler::ZbbZextH(XRegister rd, XRegister rs1) {
1721 AssertExtensionsEnabled(Riscv64Extension::kZbb);
1722 EmitR(0x4, 0x0, rs1, 0x4, rd, 0x3b);
1723 }
1724
1725 /////////////////////////////// RV64 "Zbb" Instructions END //////////////////////////////
1726
1727 ////////////////////////////// RV64 "Zbs" Instructions START /////////////////////////////
1728
Bclr(XRegister rd,XRegister rs1,XRegister rs2)1729 void Riscv64Assembler::Bclr(XRegister rd, XRegister rs1, XRegister rs2) {
1730 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1731 EmitR(0x24, rs2, rs1, 0x1, rd, 0x33);
1732 }
1733
Bclri(XRegister rd,XRegister rs1,int32_t shamt)1734 void Riscv64Assembler::Bclri(XRegister rd, XRegister rs1, int32_t shamt) {
1735 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
1736 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1737 EmitI6(0x12, shamt, rs1, 0x1, rd, 0x13);
1738 }
1739
Bext(XRegister rd,XRegister rs1,XRegister rs2)1740 void Riscv64Assembler::Bext(XRegister rd, XRegister rs1, XRegister rs2) {
1741 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1742 EmitR(0x24, rs2, rs1, 0x5, rd, 0x33);
1743 }
1744
Bexti(XRegister rd,XRegister rs1,int32_t shamt)1745 void Riscv64Assembler::Bexti(XRegister rd, XRegister rs1, int32_t shamt) {
1746 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
1747 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1748 EmitI6(0x12, shamt, rs1, 0x5, rd, 0x13);
1749 }
1750
Binv(XRegister rd,XRegister rs1,XRegister rs2)1751 void Riscv64Assembler::Binv(XRegister rd, XRegister rs1, XRegister rs2) {
1752 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1753 EmitR(0x34, rs2, rs1, 0x1, rd, 0x33);
1754 }
1755
Binvi(XRegister rd,XRegister rs1,int32_t shamt)1756 void Riscv64Assembler::Binvi(XRegister rd, XRegister rs1, int32_t shamt) {
1757 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
1758 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1759 EmitI6(0x1A, shamt, rs1, 0x1, rd, 0x13);
1760 }
1761
Bset(XRegister rd,XRegister rs1,XRegister rs2)1762 void Riscv64Assembler::Bset(XRegister rd, XRegister rs1, XRegister rs2) {
1763 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1764 EmitR(0x14, rs2, rs1, 0x1, rd, 0x33);
1765 }
1766
Bseti(XRegister rd,XRegister rs1,int32_t shamt)1767 void Riscv64Assembler::Bseti(XRegister rd, XRegister rs1, int32_t shamt) {
1768 CHECK_LT(static_cast<uint32_t>(shamt), 64u);
1769 AssertExtensionsEnabled(Riscv64Extension::kZbs);
1770 EmitI6(0xA, shamt, rs1, 0x1, rd, 0x13);
1771 }
1772
1773 /////////////////////////////// RV64 "Zbs" Instructions END //////////////////////////////
1774
1775 /////////////////////////////// RVV "VSet" Instructions START ////////////////////////////
1776
VSetvli(XRegister rd,XRegister rs1,uint32_t vtypei)1777 void Riscv64Assembler::VSetvli(XRegister rd, XRegister rs1, uint32_t vtypei) {
1778 AssertExtensionsEnabled(Riscv64Extension::kV);
1779 DCHECK(IsUint<11>(vtypei));
1780 EmitI(vtypei, rs1, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
1781 }
1782
VSetivli(XRegister rd,uint32_t uimm,uint32_t vtypei)1783 void Riscv64Assembler::VSetivli(XRegister rd, uint32_t uimm, uint32_t vtypei) {
1784 AssertExtensionsEnabled(Riscv64Extension::kV);
1785 DCHECK(IsUint<10>(vtypei));
1786 DCHECK(IsUint<5>(uimm));
1787 EmitI((~0U << 10 | vtypei), uimm, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
1788 }
1789
VSetvl(XRegister rd,XRegister rs1,XRegister rs2)1790 void Riscv64Assembler::VSetvl(XRegister rd, XRegister rs1, XRegister rs2) {
1791 AssertExtensionsEnabled(Riscv64Extension::kV);
1792 EmitR(0x40, rs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
1793 }
1794
1795 /////////////////////////////// RVV "VSet" Instructions END //////////////////////////////
1796
1797 /////////////////////////////// RVV Load/Store Instructions START ////////////////////////////
1798
VLe8(VRegister vd,XRegister rs1,VM vm)1799 void Riscv64Assembler::VLe8(VRegister vd, XRegister rs1, VM vm) {
1800 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1801 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1802 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1803 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1804 }
1805
VLe16(VRegister vd,XRegister rs1,VM vm)1806 void Riscv64Assembler::VLe16(VRegister vd, XRegister rs1, VM vm) {
1807 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1808 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1809 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1810 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1811 }
1812
VLe32(VRegister vd,XRegister rs1,VM vm)1813 void Riscv64Assembler::VLe32(VRegister vd, XRegister rs1, VM vm) {
1814 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1815 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1816 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1817 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1818 }
1819
VLe64(VRegister vd,XRegister rs1,VM vm)1820 void Riscv64Assembler::VLe64(VRegister vd, XRegister rs1, VM vm) {
1821 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1822 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1823 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1824 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1825 }
1826
VSe8(VRegister vs3,XRegister rs1,VM vm)1827 void Riscv64Assembler::VSe8(VRegister vs3, XRegister rs1, VM vm) {
1828 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1829 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1830 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1831 }
1832
VSe16(VRegister vs3,XRegister rs1,VM vm)1833 void Riscv64Assembler::VSe16(VRegister vs3, XRegister rs1, VM vm) {
1834 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1835 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1836 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
1837 }
1838
VSe32(VRegister vs3,XRegister rs1,VM vm)1839 void Riscv64Assembler::VSe32(VRegister vs3, XRegister rs1, VM vm) {
1840 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1841 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1842 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
1843 }
1844
VSe64(VRegister vs3,XRegister rs1,VM vm)1845 void Riscv64Assembler::VSe64(VRegister vs3, XRegister rs1, VM vm) {
1846 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1847 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1848 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
1849 }
1850
VLm(VRegister vd,XRegister rs1)1851 void Riscv64Assembler::VLm(VRegister vd, XRegister rs1) {
1852 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1853 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1854 EmitR(funct7, 0b01011, rs1, enum_cast<uint32_t>(VectorWidth::kMask), vd, 0x7);
1855 }
1856
VSm(VRegister vs3,XRegister rs1)1857 void Riscv64Assembler::VSm(VRegister vs3, XRegister rs1) {
1858 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1859 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1860 EmitR(funct7, 0b01011, rs1, enum_cast<uint32_t>(VectorWidth::kMask), vs3, 0x27);
1861 }
1862
VLe8ff(VRegister vd,XRegister rs1)1863 void Riscv64Assembler::VLe8ff(VRegister vd, XRegister rs1) {
1864 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1865 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1866 EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1867 }
1868
VLe16ff(VRegister vd,XRegister rs1)1869 void Riscv64Assembler::VLe16ff(VRegister vd, XRegister rs1) {
1870 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1871 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1872 EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1873 }
1874
VLe32ff(VRegister vd,XRegister rs1)1875 void Riscv64Assembler::VLe32ff(VRegister vd, XRegister rs1) {
1876 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1877 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1878 EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1879 }
1880
VLe64ff(VRegister vd,XRegister rs1)1881 void Riscv64Assembler::VLe64ff(VRegister vd, XRegister rs1) {
1882 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1883 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1884 EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1885 }
1886
VLse8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1887 void Riscv64Assembler::VLse8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1888 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1889 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1890 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1891 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1892 }
1893
VLse16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1894 void Riscv64Assembler::VLse16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1895 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1896 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1897 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1898 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1899 }
1900
VLse32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1901 void Riscv64Assembler::VLse32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1902 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1903 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1904 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1905 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1906 }
1907
VLse64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1908 void Riscv64Assembler::VLse64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1909 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1910 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1911 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1912 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1913 }
1914
VSse8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1915 void Riscv64Assembler::VSse8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1916 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1917 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1918 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1919 }
1920
VSse16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1921 void Riscv64Assembler::VSse16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1922 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1923 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1924 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
1925 }
1926
VSse32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1927 void Riscv64Assembler::VSse32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1928 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1929 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1930 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
1931 }
1932
VSse64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1933 void Riscv64Assembler::VSse64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1934 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1935 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1936 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
1937 }
1938
VLoxei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1939 void Riscv64Assembler::VLoxei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1940 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1941 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1942 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1943 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1944 }
1945
VLoxei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1946 void Riscv64Assembler::VLoxei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1947 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1948 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1949 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1950 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1951 }
1952
VLoxei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1953 void Riscv64Assembler::VLoxei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1954 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1955 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1956 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1957 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1958 }
1959
VLoxei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1960 void Riscv64Assembler::VLoxei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1961 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1962 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1963 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1964 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1965 }
1966
VLuxei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1967 void Riscv64Assembler::VLuxei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1968 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1969 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1970 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1971 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1972 }
1973
VLuxei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1974 void Riscv64Assembler::VLuxei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1975 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1976 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1977 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1978 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1979 }
1980
VLuxei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1981 void Riscv64Assembler::VLuxei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1982 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1983 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1984 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1985 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1986 }
1987
VLuxei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1988 void Riscv64Assembler::VLuxei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1989 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1990 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1991 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1992 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1993 }
1994
VSoxei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1995 void Riscv64Assembler::VSoxei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1996 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1997 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1998 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1999 }
2000
VSoxei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2001 void Riscv64Assembler::VSoxei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2002 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2003 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
2004 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2005 }
2006
VSoxei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2007 void Riscv64Assembler::VSoxei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2008 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2009 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
2010 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2011 }
2012
VSoxei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2013 void Riscv64Assembler::VSoxei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2014 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2015 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
2016 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2017 }
2018
VSuxei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2019 void Riscv64Assembler::VSuxei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2020 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2021 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
2022 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2023 }
2024
VSuxei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2025 void Riscv64Assembler::VSuxei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2026 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2027 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
2028 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2029 }
2030
VSuxei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2031 void Riscv64Assembler::VSuxei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2032 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2033 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
2034 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2035 }
2036
VSuxei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)2037 void Riscv64Assembler::VSuxei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
2038 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2039 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
2040 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2041 }
2042
VLseg2e8(VRegister vd,XRegister rs1,VM vm)2043 void Riscv64Assembler::VLseg2e8(VRegister vd, XRegister rs1, VM vm) {
2044 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2045 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2046 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2047 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2048 }
2049
VLseg2e16(VRegister vd,XRegister rs1,VM vm)2050 void Riscv64Assembler::VLseg2e16(VRegister vd, XRegister rs1, VM vm) {
2051 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2052 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2053 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2054 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2055 }
2056
VLseg2e32(VRegister vd,XRegister rs1,VM vm)2057 void Riscv64Assembler::VLseg2e32(VRegister vd, XRegister rs1, VM vm) {
2058 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2059 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2060 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2061 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2062 }
2063
VLseg2e64(VRegister vd,XRegister rs1,VM vm)2064 void Riscv64Assembler::VLseg2e64(VRegister vd, XRegister rs1, VM vm) {
2065 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2066 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2067 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2068 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2069 }
2070
VLseg3e8(VRegister vd,XRegister rs1,VM vm)2071 void Riscv64Assembler::VLseg3e8(VRegister vd, XRegister rs1, VM vm) {
2072 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2073 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2074 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2075 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2076 }
2077
VLseg3e16(VRegister vd,XRegister rs1,VM vm)2078 void Riscv64Assembler::VLseg3e16(VRegister vd, XRegister rs1, VM vm) {
2079 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2080 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2081 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2082 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2083 }
2084
VLseg3e32(VRegister vd,XRegister rs1,VM vm)2085 void Riscv64Assembler::VLseg3e32(VRegister vd, XRegister rs1, VM vm) {
2086 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2087 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2088 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2089 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2090 }
2091
VLseg3e64(VRegister vd,XRegister rs1,VM vm)2092 void Riscv64Assembler::VLseg3e64(VRegister vd, XRegister rs1, VM vm) {
2093 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2094 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2095 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2096 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2097 }
2098
VLseg4e8(VRegister vd,XRegister rs1,VM vm)2099 void Riscv64Assembler::VLseg4e8(VRegister vd, XRegister rs1, VM vm) {
2100 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2101 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2102 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2103 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2104 }
2105
VLseg4e16(VRegister vd,XRegister rs1,VM vm)2106 void Riscv64Assembler::VLseg4e16(VRegister vd, XRegister rs1, VM vm) {
2107 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2108 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2109 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2110 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2111 }
2112
VLseg4e32(VRegister vd,XRegister rs1,VM vm)2113 void Riscv64Assembler::VLseg4e32(VRegister vd, XRegister rs1, VM vm) {
2114 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2115 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2116 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2117 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2118 }
2119
VLseg4e64(VRegister vd,XRegister rs1,VM vm)2120 void Riscv64Assembler::VLseg4e64(VRegister vd, XRegister rs1, VM vm) {
2121 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2122 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2123 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2124 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2125 }
2126
VLseg5e8(VRegister vd,XRegister rs1,VM vm)2127 void Riscv64Assembler::VLseg5e8(VRegister vd, XRegister rs1, VM vm) {
2128 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2129 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2130 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2131 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2132 }
2133
VLseg5e16(VRegister vd,XRegister rs1,VM vm)2134 void Riscv64Assembler::VLseg5e16(VRegister vd, XRegister rs1, VM vm) {
2135 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2136 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2137 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2138 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2139 }
2140
VLseg5e32(VRegister vd,XRegister rs1,VM vm)2141 void Riscv64Assembler::VLseg5e32(VRegister vd, XRegister rs1, VM vm) {
2142 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2143 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2144 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2145 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2146 }
2147
VLseg5e64(VRegister vd,XRegister rs1,VM vm)2148 void Riscv64Assembler::VLseg5e64(VRegister vd, XRegister rs1, VM vm) {
2149 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2150 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2151 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2152 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2153 }
2154
VLseg6e8(VRegister vd,XRegister rs1,VM vm)2155 void Riscv64Assembler::VLseg6e8(VRegister vd, XRegister rs1, VM vm) {
2156 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2157 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2158 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2159 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2160 }
2161
VLseg6e16(VRegister vd,XRegister rs1,VM vm)2162 void Riscv64Assembler::VLseg6e16(VRegister vd, XRegister rs1, VM vm) {
2163 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2164 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2165 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2166 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2167 }
2168
VLseg6e32(VRegister vd,XRegister rs1,VM vm)2169 void Riscv64Assembler::VLseg6e32(VRegister vd, XRegister rs1, VM vm) {
2170 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2171 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2172 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2173 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2174 }
2175
VLseg6e64(VRegister vd,XRegister rs1,VM vm)2176 void Riscv64Assembler::VLseg6e64(VRegister vd, XRegister rs1, VM vm) {
2177 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2178 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2179 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2180 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2181 }
2182
VLseg7e8(VRegister vd,XRegister rs1,VM vm)2183 void Riscv64Assembler::VLseg7e8(VRegister vd, XRegister rs1, VM vm) {
2184 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2185 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2186 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2187 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2188 }
2189
VLseg7e16(VRegister vd,XRegister rs1,VM vm)2190 void Riscv64Assembler::VLseg7e16(VRegister vd, XRegister rs1, VM vm) {
2191 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2192 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2193 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2194 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2195 }
2196
VLseg7e32(VRegister vd,XRegister rs1,VM vm)2197 void Riscv64Assembler::VLseg7e32(VRegister vd, XRegister rs1, VM vm) {
2198 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2199 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2200 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2201 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2202 }
2203
VLseg7e64(VRegister vd,XRegister rs1,VM vm)2204 void Riscv64Assembler::VLseg7e64(VRegister vd, XRegister rs1, VM vm) {
2205 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2206 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2207 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2208 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2209 }
2210
VLseg8e8(VRegister vd,XRegister rs1,VM vm)2211 void Riscv64Assembler::VLseg8e8(VRegister vd, XRegister rs1, VM vm) {
2212 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2213 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2214 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2215 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2216 }
2217
VLseg8e16(VRegister vd,XRegister rs1,VM vm)2218 void Riscv64Assembler::VLseg8e16(VRegister vd, XRegister rs1, VM vm) {
2219 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2220 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2221 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2222 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2223 }
2224
VLseg8e32(VRegister vd,XRegister rs1,VM vm)2225 void Riscv64Assembler::VLseg8e32(VRegister vd, XRegister rs1, VM vm) {
2226 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2227 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2228 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2229 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2230 }
2231
VLseg8e64(VRegister vd,XRegister rs1,VM vm)2232 void Riscv64Assembler::VLseg8e64(VRegister vd, XRegister rs1, VM vm) {
2233 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2234 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2235 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2236 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2237 }
2238
VSseg2e8(VRegister vs3,XRegister rs1,VM vm)2239 void Riscv64Assembler::VSseg2e8(VRegister vs3, XRegister rs1, VM vm) {
2240 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2241 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2242 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2243 }
2244
VSseg2e16(VRegister vs3,XRegister rs1,VM vm)2245 void Riscv64Assembler::VSseg2e16(VRegister vs3, XRegister rs1, VM vm) {
2246 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2247 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2248 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2249 }
2250
VSseg2e32(VRegister vs3,XRegister rs1,VM vm)2251 void Riscv64Assembler::VSseg2e32(VRegister vs3, XRegister rs1, VM vm) {
2252 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2253 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2254 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2255 }
2256
VSseg2e64(VRegister vs3,XRegister rs1,VM vm)2257 void Riscv64Assembler::VSseg2e64(VRegister vs3, XRegister rs1, VM vm) {
2258 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2259 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2260 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2261 }
2262
VSseg3e8(VRegister vs3,XRegister rs1,VM vm)2263 void Riscv64Assembler::VSseg3e8(VRegister vs3, XRegister rs1, VM vm) {
2264 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2265 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2266 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2267 }
2268
VSseg3e16(VRegister vs3,XRegister rs1,VM vm)2269 void Riscv64Assembler::VSseg3e16(VRegister vs3, XRegister rs1, VM vm) {
2270 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2271 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2272 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2273 }
2274
VSseg3e32(VRegister vs3,XRegister rs1,VM vm)2275 void Riscv64Assembler::VSseg3e32(VRegister vs3, XRegister rs1, VM vm) {
2276 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2277 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2278 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2279 }
2280
VSseg3e64(VRegister vs3,XRegister rs1,VM vm)2281 void Riscv64Assembler::VSseg3e64(VRegister vs3, XRegister rs1, VM vm) {
2282 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2283 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2284 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2285 }
2286
VSseg4e8(VRegister vs3,XRegister rs1,VM vm)2287 void Riscv64Assembler::VSseg4e8(VRegister vs3, XRegister rs1, VM vm) {
2288 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2289 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2290 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2291 }
2292
VSseg4e16(VRegister vs3,XRegister rs1,VM vm)2293 void Riscv64Assembler::VSseg4e16(VRegister vs3, XRegister rs1, VM vm) {
2294 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2295 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2296 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2297 }
2298
VSseg4e32(VRegister vs3,XRegister rs1,VM vm)2299 void Riscv64Assembler::VSseg4e32(VRegister vs3, XRegister rs1, VM vm) {
2300 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2301 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2302 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2303 }
2304
VSseg4e64(VRegister vs3,XRegister rs1,VM vm)2305 void Riscv64Assembler::VSseg4e64(VRegister vs3, XRegister rs1, VM vm) {
2306 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2307 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2308 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2309 }
2310
VSseg5e8(VRegister vs3,XRegister rs1,VM vm)2311 void Riscv64Assembler::VSseg5e8(VRegister vs3, XRegister rs1, VM vm) {
2312 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2313 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2314 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2315 }
2316
VSseg5e16(VRegister vs3,XRegister rs1,VM vm)2317 void Riscv64Assembler::VSseg5e16(VRegister vs3, XRegister rs1, VM vm) {
2318 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2319 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2320 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2321 }
2322
VSseg5e32(VRegister vs3,XRegister rs1,VM vm)2323 void Riscv64Assembler::VSseg5e32(VRegister vs3, XRegister rs1, VM vm) {
2324 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2325 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2326 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2327 }
2328
VSseg5e64(VRegister vs3,XRegister rs1,VM vm)2329 void Riscv64Assembler::VSseg5e64(VRegister vs3, XRegister rs1, VM vm) {
2330 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2331 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2332 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2333 }
2334
VSseg6e8(VRegister vs3,XRegister rs1,VM vm)2335 void Riscv64Assembler::VSseg6e8(VRegister vs3, XRegister rs1, VM vm) {
2336 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2337 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2338 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2339 }
2340
VSseg6e16(VRegister vs3,XRegister rs1,VM vm)2341 void Riscv64Assembler::VSseg6e16(VRegister vs3, XRegister rs1, VM vm) {
2342 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2343 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2344 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2345 }
2346
VSseg6e32(VRegister vs3,XRegister rs1,VM vm)2347 void Riscv64Assembler::VSseg6e32(VRegister vs3, XRegister rs1, VM vm) {
2348 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2349 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2350 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2351 }
2352
VSseg6e64(VRegister vs3,XRegister rs1,VM vm)2353 void Riscv64Assembler::VSseg6e64(VRegister vs3, XRegister rs1, VM vm) {
2354 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2355 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2356 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2357 }
2358
VSseg7e8(VRegister vs3,XRegister rs1,VM vm)2359 void Riscv64Assembler::VSseg7e8(VRegister vs3, XRegister rs1, VM vm) {
2360 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2361 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2362 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2363 }
2364
VSseg7e16(VRegister vs3,XRegister rs1,VM vm)2365 void Riscv64Assembler::VSseg7e16(VRegister vs3, XRegister rs1, VM vm) {
2366 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2367 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2368 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2369 }
2370
VSseg7e32(VRegister vs3,XRegister rs1,VM vm)2371 void Riscv64Assembler::VSseg7e32(VRegister vs3, XRegister rs1, VM vm) {
2372 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2373 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2374 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2375 }
2376
VSseg7e64(VRegister vs3,XRegister rs1,VM vm)2377 void Riscv64Assembler::VSseg7e64(VRegister vs3, XRegister rs1, VM vm) {
2378 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2379 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2380 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2381 }
2382
VSseg8e8(VRegister vs3,XRegister rs1,VM vm)2383 void Riscv64Assembler::VSseg8e8(VRegister vs3, XRegister rs1, VM vm) {
2384 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2385 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2386 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2387 }
2388
VSseg8e16(VRegister vs3,XRegister rs1,VM vm)2389 void Riscv64Assembler::VSseg8e16(VRegister vs3, XRegister rs1, VM vm) {
2390 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2391 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2392 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2393 }
2394
VSseg8e32(VRegister vs3,XRegister rs1,VM vm)2395 void Riscv64Assembler::VSseg8e32(VRegister vs3, XRegister rs1, VM vm) {
2396 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2397 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2398 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2399 }
2400
VSseg8e64(VRegister vs3,XRegister rs1,VM vm)2401 void Riscv64Assembler::VSseg8e64(VRegister vs3, XRegister rs1, VM vm) {
2402 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2403 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2404 EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2405 }
2406
VLseg2e8ff(VRegister vd,XRegister rs1,VM vm)2407 void Riscv64Assembler::VLseg2e8ff(VRegister vd, XRegister rs1, VM vm) {
2408 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2409 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2410 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2411 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2412 }
2413
VLseg2e16ff(VRegister vd,XRegister rs1,VM vm)2414 void Riscv64Assembler::VLseg2e16ff(VRegister vd, XRegister rs1, VM vm) {
2415 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2416 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2417 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2418 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2419 }
2420
VLseg2e32ff(VRegister vd,XRegister rs1,VM vm)2421 void Riscv64Assembler::VLseg2e32ff(VRegister vd, XRegister rs1, VM vm) {
2422 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2423 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2424 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2425 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2426 }
2427
VLseg2e64ff(VRegister vd,XRegister rs1,VM vm)2428 void Riscv64Assembler::VLseg2e64ff(VRegister vd, XRegister rs1, VM vm) {
2429 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2430 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2431 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2432 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2433 }
2434
VLseg3e8ff(VRegister vd,XRegister rs1,VM vm)2435 void Riscv64Assembler::VLseg3e8ff(VRegister vd, XRegister rs1, VM vm) {
2436 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2437 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2438 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2439 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2440 }
2441
VLseg3e16ff(VRegister vd,XRegister rs1,VM vm)2442 void Riscv64Assembler::VLseg3e16ff(VRegister vd, XRegister rs1, VM vm) {
2443 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2444 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2445 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2446 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2447 }
2448
VLseg3e32ff(VRegister vd,XRegister rs1,VM vm)2449 void Riscv64Assembler::VLseg3e32ff(VRegister vd, XRegister rs1, VM vm) {
2450 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2451 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2452 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2453 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2454 }
2455
VLseg3e64ff(VRegister vd,XRegister rs1,VM vm)2456 void Riscv64Assembler::VLseg3e64ff(VRegister vd, XRegister rs1, VM vm) {
2457 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2458 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2459 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2460 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2461 }
2462
VLseg4e8ff(VRegister vd,XRegister rs1,VM vm)2463 void Riscv64Assembler::VLseg4e8ff(VRegister vd, XRegister rs1, VM vm) {
2464 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2465 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2466 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2467 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2468 }
2469
VLseg4e16ff(VRegister vd,XRegister rs1,VM vm)2470 void Riscv64Assembler::VLseg4e16ff(VRegister vd, XRegister rs1, VM vm) {
2471 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2472 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2473 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2474 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2475 }
2476
VLseg4e32ff(VRegister vd,XRegister rs1,VM vm)2477 void Riscv64Assembler::VLseg4e32ff(VRegister vd, XRegister rs1, VM vm) {
2478 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2479 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2480 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2481 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2482 }
2483
VLseg4e64ff(VRegister vd,XRegister rs1,VM vm)2484 void Riscv64Assembler::VLseg4e64ff(VRegister vd, XRegister rs1, VM vm) {
2485 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2486 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2487 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2488 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2489 }
2490
VLseg5e8ff(VRegister vd,XRegister rs1,VM vm)2491 void Riscv64Assembler::VLseg5e8ff(VRegister vd, XRegister rs1, VM vm) {
2492 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2493 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2494 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2495 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2496 }
2497
VLseg5e16ff(VRegister vd,XRegister rs1,VM vm)2498 void Riscv64Assembler::VLseg5e16ff(VRegister vd, XRegister rs1, VM vm) {
2499 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2500 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2501 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2502 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2503 }
2504
VLseg5e32ff(VRegister vd,XRegister rs1,VM vm)2505 void Riscv64Assembler::VLseg5e32ff(VRegister vd, XRegister rs1, VM vm) {
2506 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2507 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2508 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2509 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2510 }
2511
VLseg5e64ff(VRegister vd,XRegister rs1,VM vm)2512 void Riscv64Assembler::VLseg5e64ff(VRegister vd, XRegister rs1, VM vm) {
2513 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2514 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2515 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2516 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2517 }
2518
VLseg6e8ff(VRegister vd,XRegister rs1,VM vm)2519 void Riscv64Assembler::VLseg6e8ff(VRegister vd, XRegister rs1, VM vm) {
2520 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2521 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2522 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2523 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2524 }
2525
VLseg6e16ff(VRegister vd,XRegister rs1,VM vm)2526 void Riscv64Assembler::VLseg6e16ff(VRegister vd, XRegister rs1, VM vm) {
2527 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2528 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2529 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2530 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2531 }
2532
VLseg6e32ff(VRegister vd,XRegister rs1,VM vm)2533 void Riscv64Assembler::VLseg6e32ff(VRegister vd, XRegister rs1, VM vm) {
2534 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2535 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2536 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2537 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2538 }
2539
VLseg6e64ff(VRegister vd,XRegister rs1,VM vm)2540 void Riscv64Assembler::VLseg6e64ff(VRegister vd, XRegister rs1, VM vm) {
2541 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2542 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2543 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2544 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2545 }
2546
VLseg7e8ff(VRegister vd,XRegister rs1,VM vm)2547 void Riscv64Assembler::VLseg7e8ff(VRegister vd, XRegister rs1, VM vm) {
2548 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2549 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2550 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2551 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2552 }
2553
VLseg7e16ff(VRegister vd,XRegister rs1,VM vm)2554 void Riscv64Assembler::VLseg7e16ff(VRegister vd, XRegister rs1, VM vm) {
2555 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2556 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2557 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2558 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2559 }
2560
VLseg7e32ff(VRegister vd,XRegister rs1,VM vm)2561 void Riscv64Assembler::VLseg7e32ff(VRegister vd, XRegister rs1, VM vm) {
2562 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2563 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2564 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2565 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2566 }
2567
VLseg7e64ff(VRegister vd,XRegister rs1,VM vm)2568 void Riscv64Assembler::VLseg7e64ff(VRegister vd, XRegister rs1, VM vm) {
2569 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2570 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2571 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2572 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2573 }
2574
VLseg8e8ff(VRegister vd,XRegister rs1,VM vm)2575 void Riscv64Assembler::VLseg8e8ff(VRegister vd, XRegister rs1, VM vm) {
2576 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2577 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2578 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2579 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2580 }
2581
VLseg8e16ff(VRegister vd,XRegister rs1,VM vm)2582 void Riscv64Assembler::VLseg8e16ff(VRegister vd, XRegister rs1, VM vm) {
2583 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2584 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2585 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2586 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2587 }
2588
VLseg8e32ff(VRegister vd,XRegister rs1,VM vm)2589 void Riscv64Assembler::VLseg8e32ff(VRegister vd, XRegister rs1, VM vm) {
2590 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2591 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2592 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2593 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2594 }
2595
VLseg8e64ff(VRegister vd,XRegister rs1,VM vm)2596 void Riscv64Assembler::VLseg8e64ff(VRegister vd, XRegister rs1, VM vm) {
2597 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2598 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2599 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2600 EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2601 }
2602
VLsseg2e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2603 void Riscv64Assembler::VLsseg2e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2604 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2605 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2606 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2607 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2608 }
2609
VLsseg2e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2610 void Riscv64Assembler::VLsseg2e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2611 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2612 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2613 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2614 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2615 }
2616
VLsseg2e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2617 void Riscv64Assembler::VLsseg2e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2618 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2619 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2620 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2621 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2622 }
2623
VLsseg2e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2624 void Riscv64Assembler::VLsseg2e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2625 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2626 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2627 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2628 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2629 }
2630
VLsseg3e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2631 void Riscv64Assembler::VLsseg3e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2632 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2633 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2634 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2635 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2636 }
2637
VLsseg3e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2638 void Riscv64Assembler::VLsseg3e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2639 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2640 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2641 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2642 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2643 }
2644
VLsseg3e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2645 void Riscv64Assembler::VLsseg3e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2646 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2647 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2648 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2649 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2650 }
2651
VLsseg3e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2652 void Riscv64Assembler::VLsseg3e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2653 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2654 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2655 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2656 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2657 }
2658
VLsseg4e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2659 void Riscv64Assembler::VLsseg4e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2660 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2661 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2662 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2663 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2664 }
2665
VLsseg4e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2666 void Riscv64Assembler::VLsseg4e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2667 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2668 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2669 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2670 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2671 }
2672
VLsseg4e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2673 void Riscv64Assembler::VLsseg4e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2674 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2675 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2676 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2677 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2678 }
2679
VLsseg4e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2680 void Riscv64Assembler::VLsseg4e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2681 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2682 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2683 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2684 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2685 }
2686
VLsseg5e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2687 void Riscv64Assembler::VLsseg5e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2688 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2689 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2690 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2691 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2692 }
2693
VLsseg5e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2694 void Riscv64Assembler::VLsseg5e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2695 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2696 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2697 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2698 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2699 }
2700
VLsseg5e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2701 void Riscv64Assembler::VLsseg5e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2702 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2703 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2704 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2705 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2706 }
2707
VLsseg5e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2708 void Riscv64Assembler::VLsseg5e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2709 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2710 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2711 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2712 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2713 }
2714
VLsseg6e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2715 void Riscv64Assembler::VLsseg6e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2716 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2717 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2718 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2719 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2720 }
2721
VLsseg6e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2722 void Riscv64Assembler::VLsseg6e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2723 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2724 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2725 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2726 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2727 }
2728
VLsseg6e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2729 void Riscv64Assembler::VLsseg6e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2730 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2731 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2732 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2733 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2734 }
2735
VLsseg6e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2736 void Riscv64Assembler::VLsseg6e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2737 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2738 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2739 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2740 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2741 }
2742
VLsseg7e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2743 void Riscv64Assembler::VLsseg7e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2744 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2745 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2746 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2747 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2748 }
2749
VLsseg7e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2750 void Riscv64Assembler::VLsseg7e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2751 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2752 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2753 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2754 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2755 }
2756
VLsseg7e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2757 void Riscv64Assembler::VLsseg7e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2758 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2759 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2760 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2761 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2762 }
2763
VLsseg7e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2764 void Riscv64Assembler::VLsseg7e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2765 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2766 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2767 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2768 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2769 }
2770
VLsseg8e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2771 void Riscv64Assembler::VLsseg8e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2772 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2773 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2774 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2775 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2776 }
2777
VLsseg8e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2778 void Riscv64Assembler::VLsseg8e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2779 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2780 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2781 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2782 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2783 }
2784
VLsseg8e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2785 void Riscv64Assembler::VLsseg8e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2786 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2787 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2788 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2789 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2790 }
2791
VLsseg8e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2792 void Riscv64Assembler::VLsseg8e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2793 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2794 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2795 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2796 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2797 }
2798
VSsseg2e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2799 void Riscv64Assembler::VSsseg2e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2800 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2801 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2802 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2803 }
2804
VSsseg2e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2805 void Riscv64Assembler::VSsseg2e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2806 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2807 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2808 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2809 }
2810
VSsseg2e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2811 void Riscv64Assembler::VSsseg2e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2812 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2813 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2814 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2815 }
2816
VSsseg2e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2817 void Riscv64Assembler::VSsseg2e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2818 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2819 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2820 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2821 }
2822
VSsseg3e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2823 void Riscv64Assembler::VSsseg3e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2824 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2825 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2826 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2827 }
2828
VSsseg3e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2829 void Riscv64Assembler::VSsseg3e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2830 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2831 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2832 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2833 }
2834
VSsseg3e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2835 void Riscv64Assembler::VSsseg3e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2836 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2837 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2838 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2839 }
2840
VSsseg3e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2841 void Riscv64Assembler::VSsseg3e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2842 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2843 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2844 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2845 }
2846
VSsseg4e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2847 void Riscv64Assembler::VSsseg4e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2848 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2849 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2850 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2851 }
2852
VSsseg4e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2853 void Riscv64Assembler::VSsseg4e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2854 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2855 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2856 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2857 }
2858
VSsseg4e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2859 void Riscv64Assembler::VSsseg4e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2860 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2861 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2862 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2863 }
2864
VSsseg4e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2865 void Riscv64Assembler::VSsseg4e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2866 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2867 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2868 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2869 }
2870
VSsseg5e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2871 void Riscv64Assembler::VSsseg5e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2872 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2873 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2874 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2875 }
2876
VSsseg5e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2877 void Riscv64Assembler::VSsseg5e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2878 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2879 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2880 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2881 }
2882
VSsseg5e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2883 void Riscv64Assembler::VSsseg5e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2884 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2885 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2886 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2887 }
2888
VSsseg5e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2889 void Riscv64Assembler::VSsseg5e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2890 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2891 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2892 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2893 }
2894
VSsseg6e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2895 void Riscv64Assembler::VSsseg6e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2896 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2897 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2898 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2899 }
2900
VSsseg6e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2901 void Riscv64Assembler::VSsseg6e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2902 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2903 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2904 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2905 }
2906
VSsseg6e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2907 void Riscv64Assembler::VSsseg6e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2908 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2909 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2910 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2911 }
2912
VSsseg6e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2913 void Riscv64Assembler::VSsseg6e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2914 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2915 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2916 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2917 }
2918
VSsseg7e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2919 void Riscv64Assembler::VSsseg7e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2920 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2921 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2922 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2923 }
2924
VSsseg7e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2925 void Riscv64Assembler::VSsseg7e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2926 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2927 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2928 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2929 }
2930
VSsseg7e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2931 void Riscv64Assembler::VSsseg7e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2932 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2933 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2934 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2935 }
2936
VSsseg7e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2937 void Riscv64Assembler::VSsseg7e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2938 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2939 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2940 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2941 }
2942
VSsseg8e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2943 void Riscv64Assembler::VSsseg8e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2944 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2945 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2946 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2947 }
2948
VSsseg8e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2949 void Riscv64Assembler::VSsseg8e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2950 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2951 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2952 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2953 }
2954
VSsseg8e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2955 void Riscv64Assembler::VSsseg8e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2956 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2957 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2958 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2959 }
2960
VSsseg8e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2961 void Riscv64Assembler::VSsseg8e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2962 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2963 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2964 EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2965 }
2966
VLuxseg2ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2967 void Riscv64Assembler::VLuxseg2ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2968 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2969 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2970 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2971 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2972 }
2973
VLuxseg2ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2974 void Riscv64Assembler::VLuxseg2ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2975 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2976 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2977 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2978 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2979 }
2980
VLuxseg2ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2981 void Riscv64Assembler::VLuxseg2ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2982 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2983 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2984 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2985 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2986 }
2987
VLuxseg2ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2988 void Riscv64Assembler::VLuxseg2ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2989 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2990 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2991 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2992 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2993 }
2994
VLuxseg3ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2995 void Riscv64Assembler::VLuxseg3ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2996 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2997 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2998 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
2999 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3000 }
3001
VLuxseg3ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3002 void Riscv64Assembler::VLuxseg3ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3003 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3004 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3005 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3006 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3007 }
3008
VLuxseg3ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3009 void Riscv64Assembler::VLuxseg3ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3010 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3011 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3012 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3013 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3014 }
3015
VLuxseg3ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3016 void Riscv64Assembler::VLuxseg3ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3017 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3018 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3019 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3020 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3021 }
3022
VLuxseg4ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3023 void Riscv64Assembler::VLuxseg4ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3024 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3025 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3026 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3027 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3028 }
3029
VLuxseg4ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3030 void Riscv64Assembler::VLuxseg4ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3031 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3032 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3033 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3034 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3035 }
3036
VLuxseg4ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3037 void Riscv64Assembler::VLuxseg4ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3038 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3039 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3040 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3041 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3042 }
3043
VLuxseg4ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3044 void Riscv64Assembler::VLuxseg4ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3045 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3046 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3047 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3048 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3049 }
3050
VLuxseg5ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3051 void Riscv64Assembler::VLuxseg5ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3052 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3053 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3054 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3055 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3056 }
3057
VLuxseg5ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3058 void Riscv64Assembler::VLuxseg5ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3059 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3060 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3061 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3062 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3063 }
3064
VLuxseg5ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3065 void Riscv64Assembler::VLuxseg5ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3066 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3067 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3068 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3069 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3070 }
3071
VLuxseg5ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3072 void Riscv64Assembler::VLuxseg5ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3073 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3074 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3075 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3076 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3077 }
3078
VLuxseg6ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3079 void Riscv64Assembler::VLuxseg6ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3080 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3081 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3082 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3083 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3084 }
3085
VLuxseg6ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3086 void Riscv64Assembler::VLuxseg6ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3087 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3088 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3089 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3090 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3091 }
3092
VLuxseg6ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3093 void Riscv64Assembler::VLuxseg6ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3094 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3095 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3096 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3097 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3098 }
3099
VLuxseg6ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3100 void Riscv64Assembler::VLuxseg6ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3101 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3102 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3103 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3104 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3105 }
3106
VLuxseg7ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3107 void Riscv64Assembler::VLuxseg7ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3108 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3109 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3110 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3111 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3112 }
3113
VLuxseg7ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3114 void Riscv64Assembler::VLuxseg7ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3115 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3116 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3117 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3118 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3119 }
3120
VLuxseg7ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3121 void Riscv64Assembler::VLuxseg7ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3122 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3123 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3124 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3125 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3126 }
3127
VLuxseg7ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3128 void Riscv64Assembler::VLuxseg7ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3129 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3130 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3131 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3132 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3133 }
3134
VLuxseg8ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3135 void Riscv64Assembler::VLuxseg8ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3136 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3137 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3138 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3139 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3140 }
3141
VLuxseg8ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3142 void Riscv64Assembler::VLuxseg8ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3143 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3144 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3145 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3146 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3147 }
3148
VLuxseg8ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3149 void Riscv64Assembler::VLuxseg8ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3150 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3151 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3152 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3153 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3154 }
3155
VLuxseg8ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3156 void Riscv64Assembler::VLuxseg8ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3157 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3158 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3159 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3160 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3161 }
3162
VSuxseg2ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3163 void Riscv64Assembler::VSuxseg2ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3164 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3165 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3166 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3167 }
3168
VSuxseg2ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3169 void Riscv64Assembler::VSuxseg2ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3170 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3171 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3172 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3173 }
3174
VSuxseg2ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3175 void Riscv64Assembler::VSuxseg2ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3176 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3177 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3178 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3179 }
3180
VSuxseg2ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3181 void Riscv64Assembler::VSuxseg2ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3182 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3183 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3184 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3185 }
3186
VSuxseg3ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3187 void Riscv64Assembler::VSuxseg3ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3188 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3189 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3190 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3191 }
3192
VSuxseg3ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3193 void Riscv64Assembler::VSuxseg3ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3194 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3195 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3196 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3197 }
3198
VSuxseg3ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3199 void Riscv64Assembler::VSuxseg3ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3200 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3201 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3202 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3203 }
3204
VSuxseg3ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3205 void Riscv64Assembler::VSuxseg3ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3206 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3207 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3208 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3209 }
3210
VSuxseg4ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3211 void Riscv64Assembler::VSuxseg4ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3212 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3213 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3214 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3215 }
3216
VSuxseg4ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3217 void Riscv64Assembler::VSuxseg4ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3218 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3219 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3220 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3221 }
3222
VSuxseg4ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3223 void Riscv64Assembler::VSuxseg4ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3224 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3225 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3226 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3227 }
3228
VSuxseg4ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3229 void Riscv64Assembler::VSuxseg4ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3230 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3231 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3232 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3233 }
3234
VSuxseg5ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3235 void Riscv64Assembler::VSuxseg5ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3236 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3237 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3238 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3239 }
3240
VSuxseg5ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3241 void Riscv64Assembler::VSuxseg5ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3242 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3243 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3244 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3245 }
3246
VSuxseg5ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3247 void Riscv64Assembler::VSuxseg5ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3248 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3249 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3250 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3251 }
3252
VSuxseg5ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3253 void Riscv64Assembler::VSuxseg5ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3254 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3255 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3256 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3257 }
3258
VSuxseg6ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3259 void Riscv64Assembler::VSuxseg6ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3260 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3261 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3262 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3263 }
3264
VSuxseg6ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3265 void Riscv64Assembler::VSuxseg6ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3266 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3267 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3268 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3269 }
3270
VSuxseg6ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3271 void Riscv64Assembler::VSuxseg6ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3272 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3273 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3274 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3275 }
3276
VSuxseg6ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3277 void Riscv64Assembler::VSuxseg6ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3278 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3279 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3280 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3281 }
3282
VSuxseg7ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3283 void Riscv64Assembler::VSuxseg7ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3284 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3285 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3286 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3287 }
3288
VSuxseg7ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3289 void Riscv64Assembler::VSuxseg7ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3290 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3291 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3292 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3293 }
3294
VSuxseg7ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3295 void Riscv64Assembler::VSuxseg7ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3296 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3297 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3298 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3299 }
3300
VSuxseg7ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3301 void Riscv64Assembler::VSuxseg7ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3302 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3303 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3304 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3305 }
3306
VSuxseg8ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3307 void Riscv64Assembler::VSuxseg8ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3308 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3309 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3310 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3311 }
3312
VSuxseg8ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3313 void Riscv64Assembler::VSuxseg8ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3314 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3315 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3316 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3317 }
3318
VSuxseg8ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3319 void Riscv64Assembler::VSuxseg8ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3320 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3321 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3322 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3323 }
3324
VSuxseg8ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3325 void Riscv64Assembler::VSuxseg8ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3326 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3327 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3328 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3329 }
3330
VLoxseg2ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3331 void Riscv64Assembler::VLoxseg2ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3332 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3333 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3334 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3335 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3336 }
3337
VLoxseg2ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3338 void Riscv64Assembler::VLoxseg2ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3339 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3340 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3341 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3342 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3343 }
3344
VLoxseg2ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3345 void Riscv64Assembler::VLoxseg2ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3346 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3347 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3348 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3349 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3350 }
3351
VLoxseg2ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3352 void Riscv64Assembler::VLoxseg2ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3353 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3354 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3355 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3356 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3357 }
3358
VLoxseg3ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3359 void Riscv64Assembler::VLoxseg3ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3360 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3361 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3362 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3363 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3364 }
3365
VLoxseg3ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3366 void Riscv64Assembler::VLoxseg3ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3367 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3368 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3369 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3370 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3371 }
3372
VLoxseg3ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3373 void Riscv64Assembler::VLoxseg3ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3374 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3375 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3376 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3377 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3378 }
3379
VLoxseg3ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3380 void Riscv64Assembler::VLoxseg3ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3381 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3382 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3383 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3384 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3385 }
3386
VLoxseg4ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3387 void Riscv64Assembler::VLoxseg4ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3388 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3389 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3390 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3391 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3392 }
3393
VLoxseg4ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3394 void Riscv64Assembler::VLoxseg4ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3395 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3396 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3397 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3398 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3399 }
3400
VLoxseg4ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3401 void Riscv64Assembler::VLoxseg4ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3402 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3403 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3404 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3405 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3406 }
3407
VLoxseg4ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3408 void Riscv64Assembler::VLoxseg4ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3409 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3410 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3411 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3412 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3413 }
3414
VLoxseg5ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3415 void Riscv64Assembler::VLoxseg5ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3416 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3417 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3418 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3419 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3420 }
3421
VLoxseg5ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3422 void Riscv64Assembler::VLoxseg5ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3423 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3424 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3425 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3426 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3427 }
3428
VLoxseg5ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3429 void Riscv64Assembler::VLoxseg5ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3430 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3431 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3432 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3433 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3434 }
3435
VLoxseg5ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3436 void Riscv64Assembler::VLoxseg5ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3437 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3438 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3439 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3440 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3441 }
3442
VLoxseg6ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3443 void Riscv64Assembler::VLoxseg6ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3444 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3445 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3446 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3447 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3448 }
3449
VLoxseg6ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3450 void Riscv64Assembler::VLoxseg6ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3451 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3452 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3453 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3454 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3455 }
3456
VLoxseg6ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3457 void Riscv64Assembler::VLoxseg6ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3458 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3459 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3460 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3461 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3462 }
3463
VLoxseg6ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3464 void Riscv64Assembler::VLoxseg6ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3465 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3466 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3467 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3468 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3469 }
3470
VLoxseg7ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3471 void Riscv64Assembler::VLoxseg7ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3472 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3473 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3474 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3475 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3476 }
3477
VLoxseg7ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3478 void Riscv64Assembler::VLoxseg7ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3479 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3480 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3481 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3482 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3483 }
3484
VLoxseg7ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3485 void Riscv64Assembler::VLoxseg7ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3486 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3487 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3488 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3489 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3490 }
3491
VLoxseg7ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3492 void Riscv64Assembler::VLoxseg7ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3493 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3494 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3495 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3496 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3497 }
3498
VLoxseg8ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3499 void Riscv64Assembler::VLoxseg8ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3500 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3501 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3502 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3503 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3504 }
3505
VLoxseg8ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3506 void Riscv64Assembler::VLoxseg8ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3507 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3508 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3509 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3510 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3511 }
3512
VLoxseg8ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3513 void Riscv64Assembler::VLoxseg8ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3514 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3515 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3516 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3517 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3518 }
3519
VLoxseg8ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3520 void Riscv64Assembler::VLoxseg8ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3521 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3522 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3523 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3524 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3525 }
3526
VSoxseg2ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3527 void Riscv64Assembler::VSoxseg2ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3528 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3529 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3530 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3531 }
3532
VSoxseg2ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3533 void Riscv64Assembler::VSoxseg2ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3534 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3535 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3536 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3537 }
3538
VSoxseg2ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3539 void Riscv64Assembler::VSoxseg2ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3540 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3541 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3542 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3543 }
3544
VSoxseg2ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3545 void Riscv64Assembler::VSoxseg2ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3546 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3547 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3548 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3549 }
3550
VSoxseg3ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3551 void Riscv64Assembler::VSoxseg3ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3552 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3553 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3554 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3555 }
3556
VSoxseg3ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3557 void Riscv64Assembler::VSoxseg3ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3558 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3559 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3560 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3561 }
3562
VSoxseg3ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3563 void Riscv64Assembler::VSoxseg3ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3564 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3565 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3566 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3567 }
3568
VSoxseg3ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3569 void Riscv64Assembler::VSoxseg3ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3570 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3571 const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3572 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3573 }
3574
VSoxseg4ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3575 void Riscv64Assembler::VSoxseg4ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3576 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3577 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3578 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3579 }
3580
VSoxseg4ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3581 void Riscv64Assembler::VSoxseg4ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3582 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3583 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3584 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3585 }
3586
VSoxseg4ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3587 void Riscv64Assembler::VSoxseg4ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3588 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3589 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3590 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3591 }
3592
VSoxseg4ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3593 void Riscv64Assembler::VSoxseg4ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3594 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3595 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3596 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3597 }
3598
VSoxseg5ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3599 void Riscv64Assembler::VSoxseg5ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3600 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3601 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3602 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3603 }
3604
VSoxseg5ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3605 void Riscv64Assembler::VSoxseg5ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3606 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3607 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3608 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3609 }
3610
VSoxseg5ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3611 void Riscv64Assembler::VSoxseg5ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3612 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3613 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3614 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3615 }
3616
VSoxseg5ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3617 void Riscv64Assembler::VSoxseg5ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3618 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3619 const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3620 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3621 }
3622
VSoxseg6ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3623 void Riscv64Assembler::VSoxseg6ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3624 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3625 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3626 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3627 }
3628
VSoxseg6ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3629 void Riscv64Assembler::VSoxseg6ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3630 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3631 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3632 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3633 }
3634
VSoxseg6ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3635 void Riscv64Assembler::VSoxseg6ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3636 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3637 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3638 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3639 }
3640
VSoxseg6ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3641 void Riscv64Assembler::VSoxseg6ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3642 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3643 const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3644 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3645 }
3646
VSoxseg7ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3647 void Riscv64Assembler::VSoxseg7ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3648 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3649 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3650 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3651 }
3652
VSoxseg7ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3653 void Riscv64Assembler::VSoxseg7ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3654 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3655 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3656 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3657 }
3658
VSoxseg7ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3659 void Riscv64Assembler::VSoxseg7ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3660 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3661 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3662 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3663 }
3664
VSoxseg7ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3665 void Riscv64Assembler::VSoxseg7ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3666 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3667 const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3668 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3669 }
3670
VSoxseg8ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3671 void Riscv64Assembler::VSoxseg8ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3672 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3673 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3674 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3675 }
3676
VSoxseg8ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3677 void Riscv64Assembler::VSoxseg8ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3678 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3679 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3680 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3681 }
3682
VSoxseg8ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3683 void Riscv64Assembler::VSoxseg8ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3684 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3685 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3686 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3687 }
3688
VSoxseg8ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3689 void Riscv64Assembler::VSoxseg8ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3690 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3691 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3692 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3693 }
3694
VL1re8(VRegister vd,XRegister rs1)3695 void Riscv64Assembler::VL1re8(VRegister vd, XRegister rs1) {
3696 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3697 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3698 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3699 }
3700
VL1re16(VRegister vd,XRegister rs1)3701 void Riscv64Assembler::VL1re16(VRegister vd, XRegister rs1) {
3702 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3703 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3704 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3705 }
3706
VL1re32(VRegister vd,XRegister rs1)3707 void Riscv64Assembler::VL1re32(VRegister vd, XRegister rs1) {
3708 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3709 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3710 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3711 }
3712
VL1re64(VRegister vd,XRegister rs1)3713 void Riscv64Assembler::VL1re64(VRegister vd, XRegister rs1) {
3714 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3715 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3716 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3717 }
3718
VL2re8(VRegister vd,XRegister rs1)3719 void Riscv64Assembler::VL2re8(VRegister vd, XRegister rs1) {
3720 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3721 DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3722 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3723 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3724 }
3725
VL2re16(VRegister vd,XRegister rs1)3726 void Riscv64Assembler::VL2re16(VRegister vd, XRegister rs1) {
3727 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3728 DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3729 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3730 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3731 }
3732
VL2re32(VRegister vd,XRegister rs1)3733 void Riscv64Assembler::VL2re32(VRegister vd, XRegister rs1) {
3734 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3735 DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3736 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3737 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3738 }
3739
VL2re64(VRegister vd,XRegister rs1)3740 void Riscv64Assembler::VL2re64(VRegister vd, XRegister rs1) {
3741 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3742 DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3743 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3744 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3745 }
3746
VL4re8(VRegister vd,XRegister rs1)3747 void Riscv64Assembler::VL4re8(VRegister vd, XRegister rs1) {
3748 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3749 DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3750 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3751 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3752 }
3753
VL4re16(VRegister vd,XRegister rs1)3754 void Riscv64Assembler::VL4re16(VRegister vd, XRegister rs1) {
3755 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3756 DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3757 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3758 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3759 }
3760
VL4re32(VRegister vd,XRegister rs1)3761 void Riscv64Assembler::VL4re32(VRegister vd, XRegister rs1) {
3762 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3763 DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3764 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3765 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3766 }
3767
VL4re64(VRegister vd,XRegister rs1)3768 void Riscv64Assembler::VL4re64(VRegister vd, XRegister rs1) {
3769 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3770 DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3771 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3772 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3773 }
3774
VL8re8(VRegister vd,XRegister rs1)3775 void Riscv64Assembler::VL8re8(VRegister vd, XRegister rs1) {
3776 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3777 DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3778 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3779 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3780 }
3781
VL8re16(VRegister vd,XRegister rs1)3782 void Riscv64Assembler::VL8re16(VRegister vd, XRegister rs1) {
3783 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3784 DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3785 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3786 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3787 }
3788
VL8re32(VRegister vd,XRegister rs1)3789 void Riscv64Assembler::VL8re32(VRegister vd, XRegister rs1) {
3790 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3791 DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3792 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3793 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3794 }
3795
VL8re64(VRegister vd,XRegister rs1)3796 void Riscv64Assembler::VL8re64(VRegister vd, XRegister rs1) {
3797 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3798 DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3799 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3800 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3801 }
3802
VL1r(VRegister vd,XRegister rs1)3803 void Riscv64Assembler::VL1r(VRegister vd, XRegister rs1) { VL1re8(vd, rs1); }
3804
VL2r(VRegister vd,XRegister rs1)3805 void Riscv64Assembler::VL2r(VRegister vd, XRegister rs1) { VL2re8(vd, rs1); }
3806
VL4r(VRegister vd,XRegister rs1)3807 void Riscv64Assembler::VL4r(VRegister vd, XRegister rs1) { VL4re8(vd, rs1); }
3808
VL8r(VRegister vd,XRegister rs1)3809 void Riscv64Assembler::VL8r(VRegister vd, XRegister rs1) { VL8re8(vd, rs1); }
3810
VS1r(VRegister vs3,XRegister rs1)3811 void Riscv64Assembler::VS1r(VRegister vs3, XRegister rs1) {
3812 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3813 const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3814 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3815 }
3816
VS2r(VRegister vs3,XRegister rs1)3817 void Riscv64Assembler::VS2r(VRegister vs3, XRegister rs1) {
3818 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3819 const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3820 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3821 }
3822
VS4r(VRegister vs3,XRegister rs1)3823 void Riscv64Assembler::VS4r(VRegister vs3, XRegister rs1) {
3824 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3825 const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3826 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3827 }
3828
VS8r(VRegister vs3,XRegister rs1)3829 void Riscv64Assembler::VS8r(VRegister vs3, XRegister rs1) {
3830 AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3831 const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3832 EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3833 }
3834
3835 /////////////////////////////// RVV Load/Store Instructions END //////////////////////////////
3836
3837 /////////////////////////////// RVV Arithmetic Instructions START ////////////////////////////
3838
VAdd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3839 void Riscv64Assembler::VAdd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3840 AssertExtensionsEnabled(Riscv64Extension::kV);
3841 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3842 const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
3843 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3844 }
3845
VAdd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3846 void Riscv64Assembler::VAdd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3847 AssertExtensionsEnabled(Riscv64Extension::kV);
3848 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3849 const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
3850 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3851 }
3852
VAdd_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3853 void Riscv64Assembler::VAdd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3854 AssertExtensionsEnabled(Riscv64Extension::kV);
3855 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3856 const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
3857 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3858 }
3859
VSub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3860 void Riscv64Assembler::VSub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3861 AssertExtensionsEnabled(Riscv64Extension::kV);
3862 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3863 const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
3864 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3865 }
3866
VSub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3867 void Riscv64Assembler::VSub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3868 AssertExtensionsEnabled(Riscv64Extension::kV);
3869 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3870 const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
3871 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3872 }
3873
VRsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3874 void Riscv64Assembler::VRsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3875 AssertExtensionsEnabled(Riscv64Extension::kV);
3876 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3877 const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
3878 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3879 }
3880
VRsub_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3881 void Riscv64Assembler::VRsub_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3882 AssertExtensionsEnabled(Riscv64Extension::kV);
3883 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3884 const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
3885 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3886 }
3887
VNeg_v(VRegister vd,VRegister vs2)3888 void Riscv64Assembler::VNeg_v(VRegister vd, VRegister vs2) { VRsub_vx(vd, vs2, Zero); }
3889
VMinu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3890 void Riscv64Assembler::VMinu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3891 AssertExtensionsEnabled(Riscv64Extension::kV);
3892 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3893 const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
3894 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3895 }
3896
VMinu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3897 void Riscv64Assembler::VMinu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3898 AssertExtensionsEnabled(Riscv64Extension::kV);
3899 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3900 const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
3901 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3902 }
3903
VMin_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3904 void Riscv64Assembler::VMin_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3905 AssertExtensionsEnabled(Riscv64Extension::kV);
3906 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3907 const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
3908 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3909 }
3910
VMin_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3911 void Riscv64Assembler::VMin_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3912 AssertExtensionsEnabled(Riscv64Extension::kV);
3913 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3914 const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
3915 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3916 }
3917
VMaxu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3918 void Riscv64Assembler::VMaxu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3919 AssertExtensionsEnabled(Riscv64Extension::kV);
3920 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3921 const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
3922 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3923 }
3924
VMaxu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3925 void Riscv64Assembler::VMaxu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3926 AssertExtensionsEnabled(Riscv64Extension::kV);
3927 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3928 const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
3929 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3930 }
3931
VMax_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3932 void Riscv64Assembler::VMax_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3933 AssertExtensionsEnabled(Riscv64Extension::kV);
3934 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3935 const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
3936 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3937 }
3938
VMax_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3939 void Riscv64Assembler::VMax_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3940 AssertExtensionsEnabled(Riscv64Extension::kV);
3941 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3942 const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
3943 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3944 }
3945
VAnd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3946 void Riscv64Assembler::VAnd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3947 AssertExtensionsEnabled(Riscv64Extension::kV);
3948 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3949 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
3950 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3951 }
3952
VAnd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3953 void Riscv64Assembler::VAnd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3954 AssertExtensionsEnabled(Riscv64Extension::kV);
3955 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3956 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
3957 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3958 }
3959
VAnd_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3960 void Riscv64Assembler::VAnd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3961 AssertExtensionsEnabled(Riscv64Extension::kV);
3962 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3963 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
3964 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3965 }
3966
VOr_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3967 void Riscv64Assembler::VOr_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3968 AssertExtensionsEnabled(Riscv64Extension::kV);
3969 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3970 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
3971 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3972 }
3973
VOr_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3974 void Riscv64Assembler::VOr_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3975 AssertExtensionsEnabled(Riscv64Extension::kV);
3976 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
3977 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3978 }
3979
VOr_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3980 void Riscv64Assembler::VOr_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3981 AssertExtensionsEnabled(Riscv64Extension::kV);
3982 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3983 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
3984 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3985 }
3986
VXor_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3987 void Riscv64Assembler::VXor_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3988 AssertExtensionsEnabled(Riscv64Extension::kV);
3989 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3990 const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
3991 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3992 }
3993
VXor_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3994 void Riscv64Assembler::VXor_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3995 AssertExtensionsEnabled(Riscv64Extension::kV);
3996 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3997 const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
3998 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3999 }
4000
VXor_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4001 void Riscv64Assembler::VXor_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4002 AssertExtensionsEnabled(Riscv64Extension::kV);
4003 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4004 const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
4005 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4006 }
4007
VNot_v(VRegister vd,VRegister vs2,VM vm)4008 void Riscv64Assembler::VNot_v(VRegister vd, VRegister vs2, VM vm) { VXor_vi(vd, vs2, -1, vm); }
4009
VRgather_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4010 void Riscv64Assembler::VRgather_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4011 AssertExtensionsEnabled(Riscv64Extension::kV);
4012 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4013 DCHECK(vd != vs1);
4014 DCHECK(vd != vs2);
4015 const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
4016 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4017 }
4018
VRgather_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4019 void Riscv64Assembler::VRgather_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4020 AssertExtensionsEnabled(Riscv64Extension::kV);
4021 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4022 DCHECK(vd != vs2);
4023 const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
4024 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4025 }
4026
VRgather_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4027 void Riscv64Assembler::VRgather_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4028 AssertExtensionsEnabled(Riscv64Extension::kV);
4029 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4030 DCHECK(vd != vs2);
4031 const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
4032 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4033 }
4034
VSlideup_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4035 void Riscv64Assembler::VSlideup_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4036 AssertExtensionsEnabled(Riscv64Extension::kV);
4037 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4038 DCHECK(vd != vs2);
4039 const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4040 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4041 }
4042
VSlideup_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4043 void Riscv64Assembler::VSlideup_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4044 AssertExtensionsEnabled(Riscv64Extension::kV);
4045 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4046 DCHECK(vd != vs2);
4047 const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4048 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4049 }
4050
VRgatherei16_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4051 void Riscv64Assembler::VRgatherei16_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4052 AssertExtensionsEnabled(Riscv64Extension::kV);
4053 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4054 DCHECK(vd != vs1);
4055 DCHECK(vd != vs2);
4056 const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4057 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4058 }
4059
VSlidedown_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4060 void Riscv64Assembler::VSlidedown_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4061 AssertExtensionsEnabled(Riscv64Extension::kV);
4062 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4063 DCHECK(vd != vs2);
4064 const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
4065 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4066 }
4067
VSlidedown_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4068 void Riscv64Assembler::VSlidedown_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4069 AssertExtensionsEnabled(Riscv64Extension::kV);
4070 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4071 const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
4072 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4073 }
4074
VAdc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4075 void Riscv64Assembler::VAdc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4076 AssertExtensionsEnabled(Riscv64Extension::kV);
4077 DCHECK(vd != V0);
4078 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
4079 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4080 }
4081
VAdc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4082 void Riscv64Assembler::VAdc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4083 AssertExtensionsEnabled(Riscv64Extension::kV);
4084 DCHECK(vd != V0);
4085 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
4086 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4087 }
4088
VAdc_vim(VRegister vd,VRegister vs2,int32_t imm5)4089 void Riscv64Assembler::VAdc_vim(VRegister vd, VRegister vs2, int32_t imm5) {
4090 AssertExtensionsEnabled(Riscv64Extension::kV);
4091 DCHECK(vd != V0);
4092 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
4093 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4094 }
4095
VMadc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4096 void Riscv64Assembler::VMadc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4097 AssertExtensionsEnabled(Riscv64Extension::kV);
4098 const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
4099 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4100 }
4101
VMadc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4102 void Riscv64Assembler::VMadc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4103 AssertExtensionsEnabled(Riscv64Extension::kV);
4104 const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
4105 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4106 }
4107
VMadc_vim(VRegister vd,VRegister vs2,int32_t imm5)4108 void Riscv64Assembler::VMadc_vim(VRegister vd, VRegister vs2, int32_t imm5) {
4109 AssertExtensionsEnabled(Riscv64Extension::kV);
4110 const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
4111 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4112 }
4113
VMadc_vv(VRegister vd,VRegister vs2,VRegister vs1)4114 void Riscv64Assembler::VMadc_vv(VRegister vd, VRegister vs2, VRegister vs1) {
4115 AssertExtensionsEnabled(Riscv64Extension::kV);
4116 const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
4117 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4118 }
4119
VMadc_vx(VRegister vd,VRegister vs2,XRegister rs1)4120 void Riscv64Assembler::VMadc_vx(VRegister vd, VRegister vs2, XRegister rs1) {
4121 AssertExtensionsEnabled(Riscv64Extension::kV);
4122 const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
4123 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4124 }
4125
VMadc_vi(VRegister vd,VRegister vs2,int32_t imm5)4126 void Riscv64Assembler::VMadc_vi(VRegister vd, VRegister vs2, int32_t imm5) {
4127 AssertExtensionsEnabled(Riscv64Extension::kV);
4128 const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
4129 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4130 }
4131
VSbc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4132 void Riscv64Assembler::VSbc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4133 AssertExtensionsEnabled(Riscv64Extension::kV);
4134 DCHECK(vd != V0);
4135 const uint32_t funct7 = EncodeRVVF7(0b010010, VM::kV0_t);
4136 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4137 }
4138
VSbc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4139 void Riscv64Assembler::VSbc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4140 AssertExtensionsEnabled(Riscv64Extension::kV);
4141 DCHECK(vd != V0);
4142 const uint32_t funct7 = EncodeRVVF7(0b010010, VM::kV0_t);
4143 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4144 }
4145
VMsbc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4146 void Riscv64Assembler::VMsbc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4147 AssertExtensionsEnabled(Riscv64Extension::kV);
4148 const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kV0_t);
4149 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4150 }
4151
VMsbc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4152 void Riscv64Assembler::VMsbc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4153 AssertExtensionsEnabled(Riscv64Extension::kV);
4154 const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kV0_t);
4155 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4156 }
4157
VMsbc_vv(VRegister vd,VRegister vs2,VRegister vs1)4158 void Riscv64Assembler::VMsbc_vv(VRegister vd, VRegister vs2, VRegister vs1) {
4159 AssertExtensionsEnabled(Riscv64Extension::kV);
4160 const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kUnmasked);
4161 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4162 }
4163
VMsbc_vx(VRegister vd,VRegister vs2,XRegister rs1)4164 void Riscv64Assembler::VMsbc_vx(VRegister vd, VRegister vs2, XRegister rs1) {
4165 AssertExtensionsEnabled(Riscv64Extension::kV);
4166 const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kUnmasked);
4167 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4168 }
4169
VMerge_vvm(VRegister vd,VRegister vs2,VRegister vs1)4170 void Riscv64Assembler::VMerge_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4171 AssertExtensionsEnabled(Riscv64Extension::kV);
4172 DCHECK(vd != V0);
4173 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
4174 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4175 }
4176
VMerge_vxm(VRegister vd,VRegister vs2,XRegister rs1)4177 void Riscv64Assembler::VMerge_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4178 AssertExtensionsEnabled(Riscv64Extension::kV);
4179 DCHECK(vd != V0);
4180 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
4181 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4182 }
4183
VMerge_vim(VRegister vd,VRegister vs2,int32_t imm5)4184 void Riscv64Assembler::VMerge_vim(VRegister vd, VRegister vs2, int32_t imm5) {
4185 AssertExtensionsEnabled(Riscv64Extension::kV);
4186 DCHECK(vd != V0);
4187 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
4188 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4189 }
4190
VMv_vv(VRegister vd,VRegister vs1)4191 void Riscv64Assembler::VMv_vv(VRegister vd, VRegister vs1) {
4192 AssertExtensionsEnabled(Riscv64Extension::kV);
4193 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4194 EmitR(funct7, V0, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4195 }
4196
VMv_vx(VRegister vd,XRegister rs1)4197 void Riscv64Assembler::VMv_vx(VRegister vd, XRegister rs1) {
4198 AssertExtensionsEnabled(Riscv64Extension::kV);
4199 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4200 EmitR(funct7, V0, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4201 }
4202
VMv_vi(VRegister vd,int32_t imm5)4203 void Riscv64Assembler::VMv_vi(VRegister vd, int32_t imm5) {
4204 AssertExtensionsEnabled(Riscv64Extension::kV);
4205 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4206 EmitR(funct7, V0, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4207 }
4208
VMseq_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4209 void Riscv64Assembler::VMseq_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4210 AssertExtensionsEnabled(Riscv64Extension::kV);
4211 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4212 const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
4213 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4214 }
4215
VMseq_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4216 void Riscv64Assembler::VMseq_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4217 AssertExtensionsEnabled(Riscv64Extension::kV);
4218 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4219 const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
4220 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4221 }
4222
VMseq_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4223 void Riscv64Assembler::VMseq_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4224 AssertExtensionsEnabled(Riscv64Extension::kV);
4225 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4226 const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
4227 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4228 }
4229
VMsne_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4230 void Riscv64Assembler::VMsne_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4231 AssertExtensionsEnabled(Riscv64Extension::kV);
4232 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4233 const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
4234 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4235 }
4236
VMsne_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4237 void Riscv64Assembler::VMsne_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4238 AssertExtensionsEnabled(Riscv64Extension::kV);
4239 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4240 const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
4241 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4242 }
4243
VMsne_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4244 void Riscv64Assembler::VMsne_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4245 AssertExtensionsEnabled(Riscv64Extension::kV);
4246 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4247 const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
4248 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4249 }
4250
VMsltu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4251 void Riscv64Assembler::VMsltu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4252 AssertExtensionsEnabled(Riscv64Extension::kV);
4253 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4254 const uint32_t funct7 = EncodeRVVF7(0b011010, vm);
4255 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4256 }
4257
VMsltu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4258 void Riscv64Assembler::VMsltu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4259 AssertExtensionsEnabled(Riscv64Extension::kV);
4260 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4261 const uint32_t funct7 = EncodeRVVF7(0b011010, vm);
4262 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4263 }
4264
VMsgtu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4265 void Riscv64Assembler::VMsgtu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4266 AssertExtensionsEnabled(Riscv64Extension::kV);
4267 VMsltu_vv(vd, vs1, vs2, vm);
4268 }
4269
VMslt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4270 void Riscv64Assembler::VMslt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4271 AssertExtensionsEnabled(Riscv64Extension::kV);
4272 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4273 const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
4274 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4275 }
4276
VMslt_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4277 void Riscv64Assembler::VMslt_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4278 AssertExtensionsEnabled(Riscv64Extension::kV);
4279 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4280 const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
4281 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4282 }
4283
VMsgt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4284 void Riscv64Assembler::VMsgt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4285 VMslt_vv(vd, vs1, vs2, vm);
4286 }
4287
VMsleu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4288 void Riscv64Assembler::VMsleu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4289 AssertExtensionsEnabled(Riscv64Extension::kV);
4290 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4291 const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
4292 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4293 }
4294
VMsleu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4295 void Riscv64Assembler::VMsleu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4296 AssertExtensionsEnabled(Riscv64Extension::kV);
4297 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4298 const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
4299 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4300 }
4301
VMsleu_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4302 void Riscv64Assembler::VMsleu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4303 AssertExtensionsEnabled(Riscv64Extension::kV);
4304 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4305 const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
4306 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4307 }
4308
VMsgeu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4309 void Riscv64Assembler::VMsgeu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4310 VMsleu_vv(vd, vs1, vs2, vm);
4311 }
4312
VMsltu_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4313 void Riscv64Assembler::VMsltu_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4314 CHECK(IsUint<4>(aimm5 - 1)) << "Should be between [1, 16]" << aimm5;
4315 VMsleu_vi(vd, vs2, aimm5 - 1, vm);
4316 }
4317
VMsle_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4318 void Riscv64Assembler::VMsle_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4319 AssertExtensionsEnabled(Riscv64Extension::kV);
4320 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4321 const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
4322 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4323 }
4324
VMsle_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4325 void Riscv64Assembler::VMsle_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4326 AssertExtensionsEnabled(Riscv64Extension::kV);
4327 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4328 const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
4329 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4330 }
4331
VMsle_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4332 void Riscv64Assembler::VMsle_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4333 AssertExtensionsEnabled(Riscv64Extension::kV);
4334 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4335 const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
4336 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4337 }
4338
VMsge_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4339 void Riscv64Assembler::VMsge_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4340 VMsle_vv(vd, vs1, vs2, vm);
4341 }
4342
VMslt_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4343 void Riscv64Assembler::VMslt_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4344 VMsle_vi(vd, vs2, aimm5 - 1, vm);
4345 }
4346
VMsgtu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4347 void Riscv64Assembler::VMsgtu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4348 AssertExtensionsEnabled(Riscv64Extension::kV);
4349 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4350 const uint32_t funct7 = EncodeRVVF7(0b011110, vm);
4351 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4352 }
4353
VMsgtu_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4354 void Riscv64Assembler::VMsgtu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4355 AssertExtensionsEnabled(Riscv64Extension::kV);
4356 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4357 const uint32_t funct7 = EncodeRVVF7(0b011110, vm);
4358 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4359 }
4360
VMsgeu_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4361 void Riscv64Assembler::VMsgeu_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4362 AssertExtensionsEnabled(Riscv64Extension::kV);
4363 CHECK(IsUint<4>(aimm5 - 1)) << "Should be between [1, 16]" << aimm5;
4364 VMsgtu_vi(vd, vs2, aimm5 - 1, vm);
4365 }
4366
VMsgt_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4367 void Riscv64Assembler::VMsgt_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4368 AssertExtensionsEnabled(Riscv64Extension::kV);
4369 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4370 const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
4371 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4372 }
4373
VMsgt_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4374 void Riscv64Assembler::VMsgt_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4375 AssertExtensionsEnabled(Riscv64Extension::kV);
4376 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4377 const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
4378 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4379 }
4380
VMsge_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4381 void Riscv64Assembler::VMsge_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4382 VMsgt_vi(vd, vs2, aimm5 - 1, vm);
4383 }
4384
VSaddu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4385 void Riscv64Assembler::VSaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4386 AssertExtensionsEnabled(Riscv64Extension::kV);
4387 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4388 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4389 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4390 }
4391
VSaddu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4392 void Riscv64Assembler::VSaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4393 AssertExtensionsEnabled(Riscv64Extension::kV);
4394 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4395 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4396 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4397 }
4398
VSaddu_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4399 void Riscv64Assembler::VSaddu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4400 AssertExtensionsEnabled(Riscv64Extension::kV);
4401 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4402 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4403 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4404 }
4405
VSadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4406 void Riscv64Assembler::VSadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4407 AssertExtensionsEnabled(Riscv64Extension::kV);
4408 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4409 const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4410 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4411 }
4412
VSadd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4413 void Riscv64Assembler::VSadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4414 AssertExtensionsEnabled(Riscv64Extension::kV);
4415 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4416 const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4417 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4418 }
4419
VSadd_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4420 void Riscv64Assembler::VSadd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4421 AssertExtensionsEnabled(Riscv64Extension::kV);
4422 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4423 const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4424 EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4425 }
4426
VSsubu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4427 void Riscv64Assembler::VSsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4428 AssertExtensionsEnabled(Riscv64Extension::kV);
4429 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4430 const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4431 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4432 }
4433
VSsubu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4434 void Riscv64Assembler::VSsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4435 AssertExtensionsEnabled(Riscv64Extension::kV);
4436 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4437 const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4438 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4439 }
4440
VSsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4441 void Riscv64Assembler::VSsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4442 AssertExtensionsEnabled(Riscv64Extension::kV);
4443 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4444 const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4445 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4446 }
4447
VSsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4448 void Riscv64Assembler::VSsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4449 AssertExtensionsEnabled(Riscv64Extension::kV);
4450 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4451 const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4452 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4453 }
4454
VSll_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4455 void Riscv64Assembler::VSll_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4456 AssertExtensionsEnabled(Riscv64Extension::kV);
4457 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4458 const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4459 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4460 }
4461
VSll_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4462 void Riscv64Assembler::VSll_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4463 AssertExtensionsEnabled(Riscv64Extension::kV);
4464 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4465 const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4466 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4467 }
4468
VSll_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4469 void Riscv64Assembler::VSll_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4470 AssertExtensionsEnabled(Riscv64Extension::kV);
4471 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4472 const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4473 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4474 }
4475
VSmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4476 void Riscv64Assembler::VSmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4477 AssertExtensionsEnabled(Riscv64Extension::kV);
4478 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4479 const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4480 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4481 }
4482
VSmul_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4483 void Riscv64Assembler::VSmul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4484 AssertExtensionsEnabled(Riscv64Extension::kV);
4485 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4486 const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4487 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4488 }
4489
Vmv1r_v(VRegister vd,VRegister vs2)4490 void Riscv64Assembler::Vmv1r_v(VRegister vd, VRegister vs2) {
4491 AssertExtensionsEnabled(Riscv64Extension::kV);
4492 const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4493 EmitR(
4494 funct7, vs2, enum_cast<uint32_t>(Nf::k1), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4495 }
4496
Vmv2r_v(VRegister vd,VRegister vs2)4497 void Riscv64Assembler::Vmv2r_v(VRegister vd, VRegister vs2) {
4498 AssertExtensionsEnabled(Riscv64Extension::kV);
4499 DCHECK_EQ(enum_cast<uint32_t>(vd) % 2, 0u);
4500 DCHECK_EQ(enum_cast<uint32_t>(vs2) % 2, 0u);
4501 const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4502 EmitR(
4503 funct7, vs2, enum_cast<uint32_t>(Nf::k2), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4504 }
4505
Vmv4r_v(VRegister vd,VRegister vs2)4506 void Riscv64Assembler::Vmv4r_v(VRegister vd, VRegister vs2) {
4507 AssertExtensionsEnabled(Riscv64Extension::kV);
4508 DCHECK_EQ(enum_cast<uint32_t>(vd) % 4, 0u);
4509 DCHECK_EQ(enum_cast<uint32_t>(vs2) % 4, 0u);
4510 const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4511 EmitR(
4512 funct7, vs2, enum_cast<uint32_t>(Nf::k4), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4513 }
4514
Vmv8r_v(VRegister vd,VRegister vs2)4515 void Riscv64Assembler::Vmv8r_v(VRegister vd, VRegister vs2) {
4516 AssertExtensionsEnabled(Riscv64Extension::kV);
4517 DCHECK_EQ(enum_cast<uint32_t>(vd) % 8, 0u);
4518 DCHECK_EQ(enum_cast<uint32_t>(vs2) % 8, 0u);
4519 const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4520 EmitR(
4521 funct7, vs2, enum_cast<uint32_t>(Nf::k8), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4522 }
4523
VSrl_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4524 void Riscv64Assembler::VSrl_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4525 AssertExtensionsEnabled(Riscv64Extension::kV);
4526 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4527 const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
4528 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4529 }
4530
VSrl_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4531 void Riscv64Assembler::VSrl_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4532 AssertExtensionsEnabled(Riscv64Extension::kV);
4533 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4534 const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
4535 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4536 }
4537
VSrl_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4538 void Riscv64Assembler::VSrl_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4539 AssertExtensionsEnabled(Riscv64Extension::kV);
4540 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4541 const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
4542 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4543 }
4544
VSra_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4545 void Riscv64Assembler::VSra_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4546 AssertExtensionsEnabled(Riscv64Extension::kV);
4547 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4548 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4549 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4550 }
4551
VSra_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4552 void Riscv64Assembler::VSra_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4553 AssertExtensionsEnabled(Riscv64Extension::kV);
4554 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4555 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4556 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4557 }
4558
VSra_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4559 void Riscv64Assembler::VSra_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4560 AssertExtensionsEnabled(Riscv64Extension::kV);
4561 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4562 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4563 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4564 }
4565
VSsrl_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4566 void Riscv64Assembler::VSsrl_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4567 AssertExtensionsEnabled(Riscv64Extension::kV);
4568 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4569 const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
4570 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4571 }
4572
VSsrl_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4573 void Riscv64Assembler::VSsrl_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4574 AssertExtensionsEnabled(Riscv64Extension::kV);
4575 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4576 const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
4577 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4578 }
4579
VSsrl_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4580 void Riscv64Assembler::VSsrl_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4581 AssertExtensionsEnabled(Riscv64Extension::kV);
4582 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4583 const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
4584 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4585 }
4586
VSsra_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4587 void Riscv64Assembler::VSsra_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4588 AssertExtensionsEnabled(Riscv64Extension::kV);
4589 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4590 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4591 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4592 }
4593
VSsra_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4594 void Riscv64Assembler::VSsra_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4595 AssertExtensionsEnabled(Riscv64Extension::kV);
4596 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4597 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4598 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4599 }
4600
VSsra_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4601 void Riscv64Assembler::VSsra_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4602 AssertExtensionsEnabled(Riscv64Extension::kV);
4603 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4604 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4605 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4606 }
4607
VNsrl_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4608 void Riscv64Assembler::VNsrl_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4609 AssertExtensionsEnabled(Riscv64Extension::kV);
4610 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4611 const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
4612 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4613 }
4614
VNsrl_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4615 void Riscv64Assembler::VNsrl_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4616 AssertExtensionsEnabled(Riscv64Extension::kV);
4617 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4618 const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
4619 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4620 }
4621
VNsrl_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4622 void Riscv64Assembler::VNsrl_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4623 AssertExtensionsEnabled(Riscv64Extension::kV);
4624 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4625 const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
4626 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4627 }
4628
VNcvt_x_x_w(VRegister vd,VRegister vs2,VM vm)4629 void Riscv64Assembler::VNcvt_x_x_w(VRegister vd, VRegister vs2, VM vm) {
4630 AssertExtensionsEnabled(Riscv64Extension::kV);
4631 VNsrl_wx(vd, vs2, Zero, vm);
4632 }
4633
VNsra_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4634 void Riscv64Assembler::VNsra_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4635 AssertExtensionsEnabled(Riscv64Extension::kV);
4636 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4637 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4638 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4639 }
4640
VNsra_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4641 void Riscv64Assembler::VNsra_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4642 AssertExtensionsEnabled(Riscv64Extension::kV);
4643 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4644 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4645 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4646 }
4647
VNsra_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4648 void Riscv64Assembler::VNsra_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4649 AssertExtensionsEnabled(Riscv64Extension::kV);
4650 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4651 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4652 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4653 }
4654
VNclipu_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4655 void Riscv64Assembler::VNclipu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4656 AssertExtensionsEnabled(Riscv64Extension::kV);
4657 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4658 const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
4659 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4660 }
4661
VNclipu_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4662 void Riscv64Assembler::VNclipu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4663 AssertExtensionsEnabled(Riscv64Extension::kV);
4664 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4665 const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
4666 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4667 }
4668
VNclipu_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4669 void Riscv64Assembler::VNclipu_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4670 AssertExtensionsEnabled(Riscv64Extension::kV);
4671 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4672 const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
4673 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4674 }
4675
VNclip_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4676 void Riscv64Assembler::VNclip_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4677 AssertExtensionsEnabled(Riscv64Extension::kV);
4678 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4679 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
4680 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4681 }
4682
VNclip_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4683 void Riscv64Assembler::VNclip_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4684 AssertExtensionsEnabled(Riscv64Extension::kV);
4685 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4686 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
4687 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4688 }
4689
VNclip_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4690 void Riscv64Assembler::VNclip_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4691 AssertExtensionsEnabled(Riscv64Extension::kV);
4692 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4693 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
4694 EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4695 }
4696
VWredsumu_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4697 void Riscv64Assembler::VWredsumu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4698 AssertExtensionsEnabled(Riscv64Extension::kV);
4699 const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
4700 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4701 }
4702
VWredsum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4703 void Riscv64Assembler::VWredsum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4704 AssertExtensionsEnabled(Riscv64Extension::kV);
4705 const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
4706 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4707 }
4708
VRedsum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4709 void Riscv64Assembler::VRedsum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4710 AssertExtensionsEnabled(Riscv64Extension::kV);
4711 const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
4712 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4713 }
4714
VRedand_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4715 void Riscv64Assembler::VRedand_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4716 AssertExtensionsEnabled(Riscv64Extension::kV);
4717 const uint32_t funct7 = EncodeRVVF7(0b000001, vm);
4718 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4719 }
4720
VRedor_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4721 void Riscv64Assembler::VRedor_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4722 AssertExtensionsEnabled(Riscv64Extension::kV);
4723 const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
4724 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4725 }
4726
VRedxor_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4727 void Riscv64Assembler::VRedxor_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4728 AssertExtensionsEnabled(Riscv64Extension::kV);
4729 const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
4730 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4731 }
4732
VRedminu_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4733 void Riscv64Assembler::VRedminu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4734 AssertExtensionsEnabled(Riscv64Extension::kV);
4735 const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
4736 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4737 }
4738
VRedmin_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4739 void Riscv64Assembler::VRedmin_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4740 AssertExtensionsEnabled(Riscv64Extension::kV);
4741 const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
4742 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4743 }
4744
VRedmaxu_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4745 void Riscv64Assembler::VRedmaxu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4746 AssertExtensionsEnabled(Riscv64Extension::kV);
4747 const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
4748 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4749 }
4750
VRedmax_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4751 void Riscv64Assembler::VRedmax_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4752 AssertExtensionsEnabled(Riscv64Extension::kV);
4753 const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
4754 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4755 }
4756
VAaddu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4757 void Riscv64Assembler::VAaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4758 AssertExtensionsEnabled(Riscv64Extension::kV);
4759 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4760 const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
4761 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4762 }
4763
VAaddu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4764 void Riscv64Assembler::VAaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4765 AssertExtensionsEnabled(Riscv64Extension::kV);
4766 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4767 const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
4768 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4769 }
4770
VAadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4771 void Riscv64Assembler::VAadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4772 AssertExtensionsEnabled(Riscv64Extension::kV);
4773 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4774 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
4775 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4776 }
4777
VAadd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4778 void Riscv64Assembler::VAadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4779 AssertExtensionsEnabled(Riscv64Extension::kV);
4780 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4781 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
4782 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4783 }
4784
VAsubu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4785 void Riscv64Assembler::VAsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4786 AssertExtensionsEnabled(Riscv64Extension::kV);
4787 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4788 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
4789 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4790 }
4791
VAsubu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4792 void Riscv64Assembler::VAsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4793 AssertExtensionsEnabled(Riscv64Extension::kV);
4794 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4795 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
4796 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4797 }
4798
VAsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4799 void Riscv64Assembler::VAsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4800 AssertExtensionsEnabled(Riscv64Extension::kV);
4801 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4802 const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
4803 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4804 }
4805
VAsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4806 void Riscv64Assembler::VAsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4807 AssertExtensionsEnabled(Riscv64Extension::kV);
4808 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4809 const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
4810 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4811 }
4812
VSlide1up_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4813 void Riscv64Assembler::VSlide1up_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4814 AssertExtensionsEnabled(Riscv64Extension::kV);
4815 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4816 DCHECK(vd != vs2);
4817 const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4818 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4819 }
4820
VSlide1down_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4821 void Riscv64Assembler::VSlide1down_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4822 AssertExtensionsEnabled(Riscv64Extension::kV);
4823 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4824 const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
4825 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4826 }
4827
VCompress_vm(VRegister vd,VRegister vs2,VRegister vs1)4828 void Riscv64Assembler::VCompress_vm(VRegister vd, VRegister vs2, VRegister vs1) {
4829 AssertExtensionsEnabled(Riscv64Extension::kV);
4830 DCHECK(vd != vs1);
4831 DCHECK(vd != vs2);
4832 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4833 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4834 }
4835
VMandn_mm(VRegister vd,VRegister vs2,VRegister vs1)4836 void Riscv64Assembler::VMandn_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4837 AssertExtensionsEnabled(Riscv64Extension::kV);
4838 const uint32_t funct7 = EncodeRVVF7(0b011000, VM::kUnmasked);
4839 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4840 }
4841
VMand_mm(VRegister vd,VRegister vs2,VRegister vs1)4842 void Riscv64Assembler::VMand_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4843 AssertExtensionsEnabled(Riscv64Extension::kV);
4844 const uint32_t funct7 = EncodeRVVF7(0b011001, VM::kUnmasked);
4845 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4846 }
4847
VMmv_m(VRegister vd,VRegister vs2)4848 void Riscv64Assembler::VMmv_m(VRegister vd, VRegister vs2) { VMand_mm(vd, vs2, vs2); }
4849
VMor_mm(VRegister vd,VRegister vs2,VRegister vs1)4850 void Riscv64Assembler::VMor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4851 AssertExtensionsEnabled(Riscv64Extension::kV);
4852 const uint32_t funct7 = EncodeRVVF7(0b011010, VM::kUnmasked);
4853 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4854 }
4855
VMxor_mm(VRegister vd,VRegister vs2,VRegister vs1)4856 void Riscv64Assembler::VMxor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4857 AssertExtensionsEnabled(Riscv64Extension::kV);
4858 const uint32_t funct7 = EncodeRVVF7(0b011011, VM::kUnmasked);
4859 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4860 }
4861
VMclr_m(VRegister vd)4862 void Riscv64Assembler::VMclr_m(VRegister vd) { VMxor_mm(vd, vd, vd); }
4863
VMorn_mm(VRegister vd,VRegister vs2,VRegister vs1)4864 void Riscv64Assembler::VMorn_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4865 AssertExtensionsEnabled(Riscv64Extension::kV);
4866 const uint32_t funct7 = EncodeRVVF7(0b011100, VM::kUnmasked);
4867 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4868 }
4869
VMnand_mm(VRegister vd,VRegister vs2,VRegister vs1)4870 void Riscv64Assembler::VMnand_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4871 AssertExtensionsEnabled(Riscv64Extension::kV);
4872 const uint32_t funct7 = EncodeRVVF7(0b011101, VM::kUnmasked);
4873 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4874 }
4875
VMnot_m(VRegister vd,VRegister vs2)4876 void Riscv64Assembler::VMnot_m(VRegister vd, VRegister vs2) { VMnand_mm(vd, vs2, vs2); }
4877
VMnor_mm(VRegister vd,VRegister vs2,VRegister vs1)4878 void Riscv64Assembler::VMnor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4879 AssertExtensionsEnabled(Riscv64Extension::kV);
4880 const uint32_t funct7 = EncodeRVVF7(0b011110, VM::kUnmasked);
4881 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4882 }
4883
VMxnor_mm(VRegister vd,VRegister vs2,VRegister vs1)4884 void Riscv64Assembler::VMxnor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4885 AssertExtensionsEnabled(Riscv64Extension::kV);
4886 const uint32_t funct7 = EncodeRVVF7(0b011111, VM::kUnmasked);
4887 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4888 }
4889
VMset_m(VRegister vd)4890 void Riscv64Assembler::VMset_m(VRegister vd) { VMxnor_mm(vd, vd, vd); }
4891
VDivu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4892 void Riscv64Assembler::VDivu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4893 AssertExtensionsEnabled(Riscv64Extension::kV);
4894 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4895 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4896 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4897 }
4898
VDivu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4899 void Riscv64Assembler::VDivu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4900 AssertExtensionsEnabled(Riscv64Extension::kV);
4901 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4902 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4903 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4904 }
4905
VDiv_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4906 void Riscv64Assembler::VDiv_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4907 AssertExtensionsEnabled(Riscv64Extension::kV);
4908 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4909 const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4910 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4911 }
4912
VDiv_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4913 void Riscv64Assembler::VDiv_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4914 AssertExtensionsEnabled(Riscv64Extension::kV);
4915 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4916 const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4917 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4918 }
4919
VRemu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4920 void Riscv64Assembler::VRemu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4921 AssertExtensionsEnabled(Riscv64Extension::kV);
4922 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4923 const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4924 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4925 }
4926
VRemu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4927 void Riscv64Assembler::VRemu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4928 AssertExtensionsEnabled(Riscv64Extension::kV);
4929 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4930 const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4931 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4932 }
4933
VRem_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4934 void Riscv64Assembler::VRem_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4935 AssertExtensionsEnabled(Riscv64Extension::kV);
4936 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4937 const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4938 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4939 }
4940
VRem_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4941 void Riscv64Assembler::VRem_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4942 AssertExtensionsEnabled(Riscv64Extension::kV);
4943 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4944 const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4945 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4946 }
4947
VMulhu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4948 void Riscv64Assembler::VMulhu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4949 AssertExtensionsEnabled(Riscv64Extension::kV);
4950 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4951 const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
4952 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4953 }
4954
VMulhu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4955 void Riscv64Assembler::VMulhu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4956 AssertExtensionsEnabled(Riscv64Extension::kV);
4957 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4958 const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
4959 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4960 }
4961
VMul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4962 void Riscv64Assembler::VMul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4963 AssertExtensionsEnabled(Riscv64Extension::kV);
4964 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4965 const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4966 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4967 }
4968
VMul_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4969 void Riscv64Assembler::VMul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4970 AssertExtensionsEnabled(Riscv64Extension::kV);
4971 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4972 const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4973 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4974 }
4975
VMulhsu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4976 void Riscv64Assembler::VMulhsu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4977 AssertExtensionsEnabled(Riscv64Extension::kV);
4978 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4979 const uint32_t funct7 = EncodeRVVF7(0b100110, vm);
4980 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4981 }
4982
VMulhsu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4983 void Riscv64Assembler::VMulhsu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4984 AssertExtensionsEnabled(Riscv64Extension::kV);
4985 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4986 const uint32_t funct7 = EncodeRVVF7(0b100110, vm);
4987 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4988 }
4989
VMulh_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4990 void Riscv64Assembler::VMulh_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4991 AssertExtensionsEnabled(Riscv64Extension::kV);
4992 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4993 const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4994 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4995 }
4996
VMulh_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4997 void Riscv64Assembler::VMulh_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4998 AssertExtensionsEnabled(Riscv64Extension::kV);
4999 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5000 const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
5001 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5002 }
5003
VMadd_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5004 void Riscv64Assembler::VMadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5005 AssertExtensionsEnabled(Riscv64Extension::kV);
5006 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5007 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
5008 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5009 }
5010
VMadd_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5011 void Riscv64Assembler::VMadd_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5012 AssertExtensionsEnabled(Riscv64Extension::kV);
5013 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5014 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
5015 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5016 }
5017
VNmsub_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5018 void Riscv64Assembler::VNmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5019 AssertExtensionsEnabled(Riscv64Extension::kV);
5020 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5021 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
5022 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5023 }
5024
VNmsub_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5025 void Riscv64Assembler::VNmsub_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5026 AssertExtensionsEnabled(Riscv64Extension::kV);
5027 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5028 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
5029 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5030 }
5031
VMacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5032 void Riscv64Assembler::VMacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5033 AssertExtensionsEnabled(Riscv64Extension::kV);
5034 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5035 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
5036 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5037 }
5038
VMacc_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5039 void Riscv64Assembler::VMacc_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5040 AssertExtensionsEnabled(Riscv64Extension::kV);
5041 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5042 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
5043 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5044 }
5045
VNmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5046 void Riscv64Assembler::VNmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5047 AssertExtensionsEnabled(Riscv64Extension::kV);
5048 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5049 DCHECK(vd != vs1);
5050 DCHECK(vd != vs2);
5051 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5052 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5053 }
5054
VNmsac_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5055 void Riscv64Assembler::VNmsac_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5056 AssertExtensionsEnabled(Riscv64Extension::kV);
5057 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5058 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5059 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5060 }
5061
VWaddu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5062 void Riscv64Assembler::VWaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5063 AssertExtensionsEnabled(Riscv64Extension::kV);
5064 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5065 DCHECK(vd != vs1);
5066 DCHECK(vd != vs2);
5067 const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5068 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5069 }
5070
VWaddu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5071 void Riscv64Assembler::VWaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5072 AssertExtensionsEnabled(Riscv64Extension::kV);
5073 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5074 DCHECK(vd != vs2);
5075 const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5076 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5077 }
5078
VWcvtu_x_x_v(VRegister vd,VRegister vs,VM vm)5079 void Riscv64Assembler::VWcvtu_x_x_v(VRegister vd, VRegister vs, VM vm) {
5080 VWaddu_vx(vd, vs, Zero, vm);
5081 }
5082
VWadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5083 void Riscv64Assembler::VWadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5084 AssertExtensionsEnabled(Riscv64Extension::kV);
5085 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5086 DCHECK(vd != vs1);
5087 DCHECK(vd != vs2);
5088 const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
5089 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5090 }
5091
VWadd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5092 void Riscv64Assembler::VWadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5093 AssertExtensionsEnabled(Riscv64Extension::kV);
5094 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5095 DCHECK(vd != vs2);
5096 const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
5097 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5098 }
5099
VWcvt_x_x_v(VRegister vd,VRegister vs,VM vm)5100 void Riscv64Assembler::VWcvt_x_x_v(VRegister vd, VRegister vs, VM vm) {
5101 VWadd_vx(vd, vs, Zero, vm);
5102 }
5103
VWsubu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5104 void Riscv64Assembler::VWsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5105 AssertExtensionsEnabled(Riscv64Extension::kV);
5106 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5107 DCHECK(vd != vs1);
5108 DCHECK(vd != vs2);
5109 const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5110 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5111 }
5112
VWsubu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5113 void Riscv64Assembler::VWsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5114 AssertExtensionsEnabled(Riscv64Extension::kV);
5115 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5116 DCHECK(vd != vs2);
5117 const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5118 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5119 }
5120
VWsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5121 void Riscv64Assembler::VWsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5122 AssertExtensionsEnabled(Riscv64Extension::kV);
5123 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5124 DCHECK(vd != vs1);
5125 DCHECK(vd != vs2);
5126 const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
5127 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5128 }
5129
VWsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5130 void Riscv64Assembler::VWsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5131 AssertExtensionsEnabled(Riscv64Extension::kV);
5132 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5133 DCHECK(vd != vs2);
5134 const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
5135 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5136 }
5137
VWaddu_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5138 void Riscv64Assembler::VWaddu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5139 AssertExtensionsEnabled(Riscv64Extension::kV);
5140 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5141 DCHECK(vd != vs1);
5142 const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5143 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5144 }
5145
VWaddu_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5146 void Riscv64Assembler::VWaddu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5147 AssertExtensionsEnabled(Riscv64Extension::kV);
5148 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5149 const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5150 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5151 }
5152
VWadd_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5153 void Riscv64Assembler::VWadd_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5154 AssertExtensionsEnabled(Riscv64Extension::kV);
5155 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5156 DCHECK(vd != vs1);
5157 const uint32_t funct7 = EncodeRVVF7(0b110101, vm);
5158 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5159 }
5160
VWadd_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5161 void Riscv64Assembler::VWadd_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5162 AssertExtensionsEnabled(Riscv64Extension::kV);
5163 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5164 const uint32_t funct7 = EncodeRVVF7(0b110101, vm);
5165 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5166 }
5167
VWsubu_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5168 void Riscv64Assembler::VWsubu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5169 AssertExtensionsEnabled(Riscv64Extension::kV);
5170 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5171 DCHECK(vd != vs1);
5172 const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5173 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5174 }
5175
VWsubu_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5176 void Riscv64Assembler::VWsubu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5177 AssertExtensionsEnabled(Riscv64Extension::kV);
5178 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5179 const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5180 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5181 }
5182
VWsub_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5183 void Riscv64Assembler::VWsub_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5184 AssertExtensionsEnabled(Riscv64Extension::kV);
5185 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5186 DCHECK(vd != vs1);
5187 const uint32_t funct7 = EncodeRVVF7(0b110111, vm);
5188 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5189 }
5190
VWsub_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5191 void Riscv64Assembler::VWsub_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5192 AssertExtensionsEnabled(Riscv64Extension::kV);
5193 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5194 const uint32_t funct7 = EncodeRVVF7(0b110111, vm);
5195 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5196 }
5197
VWmulu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5198 void Riscv64Assembler::VWmulu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5199 AssertExtensionsEnabled(Riscv64Extension::kV);
5200 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5201 DCHECK(vd != vs1);
5202 DCHECK(vd != vs2);
5203 const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5204 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5205 }
5206
VWmulu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5207 void Riscv64Assembler::VWmulu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5208 AssertExtensionsEnabled(Riscv64Extension::kV);
5209 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5210 DCHECK(vd != vs2);
5211 const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5212 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5213 }
5214
VWmulsu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5215 void Riscv64Assembler::VWmulsu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5216 AssertExtensionsEnabled(Riscv64Extension::kV);
5217 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5218 DCHECK(vd != vs1);
5219 DCHECK(vd != vs2);
5220 const uint32_t funct7 = EncodeRVVF7(0b111010, vm);
5221 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5222 }
5223
VWmulsu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5224 void Riscv64Assembler::VWmulsu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5225 AssertExtensionsEnabled(Riscv64Extension::kV);
5226 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5227 DCHECK(vd != vs2);
5228 const uint32_t funct7 = EncodeRVVF7(0b111010, vm);
5229 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5230 }
5231
VWmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5232 void Riscv64Assembler::VWmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5233 AssertExtensionsEnabled(Riscv64Extension::kV);
5234 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5235 DCHECK(vd != vs1);
5236 DCHECK(vd != vs2);
5237 const uint32_t funct7 = EncodeRVVF7(0b111011, vm);
5238 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5239 }
5240
VWmul_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5241 void Riscv64Assembler::VWmul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5242 AssertExtensionsEnabled(Riscv64Extension::kV);
5243 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5244 DCHECK(vd != vs2);
5245 const uint32_t funct7 = EncodeRVVF7(0b111011, vm);
5246 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5247 }
5248
VWmaccu_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5249 void Riscv64Assembler::VWmaccu_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5250 AssertExtensionsEnabled(Riscv64Extension::kV);
5251 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5252 DCHECK(vd != vs1);
5253 DCHECK(vd != vs2);
5254 const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5255 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5256 }
5257
VWmaccu_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5258 void Riscv64Assembler::VWmaccu_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5259 AssertExtensionsEnabled(Riscv64Extension::kV);
5260 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5261 DCHECK(vd != vs2);
5262 const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5263 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5264 }
5265
VWmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5266 void Riscv64Assembler::VWmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5267 AssertExtensionsEnabled(Riscv64Extension::kV);
5268 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5269 DCHECK(vd != vs1);
5270 DCHECK(vd != vs2);
5271 const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5272 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5273 }
5274
VWmacc_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5275 void Riscv64Assembler::VWmacc_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5276 AssertExtensionsEnabled(Riscv64Extension::kV);
5277 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5278 DCHECK(vd != vs2);
5279 const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5280 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5281 }
5282
VWmaccus_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5283 void Riscv64Assembler::VWmaccus_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5284 AssertExtensionsEnabled(Riscv64Extension::kV);
5285 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5286 DCHECK(vd != vs2);
5287 const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
5288 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5289 }
5290
VWmaccsu_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5291 void Riscv64Assembler::VWmaccsu_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5292 AssertExtensionsEnabled(Riscv64Extension::kV);
5293 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5294 DCHECK(vd != vs1);
5295 DCHECK(vd != vs2);
5296 const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5297 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5298 }
5299
VWmaccsu_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5300 void Riscv64Assembler::VWmaccsu_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5301 AssertExtensionsEnabled(Riscv64Extension::kV);
5302 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5303 DCHECK(vd != vs2);
5304 const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5305 EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5306 }
5307
VFadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5308 void Riscv64Assembler::VFadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5309 AssertExtensionsEnabled(Riscv64Extension::kV);
5310 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5311 const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
5312 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5313 }
5314
VFadd_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5315 void Riscv64Assembler::VFadd_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5316 AssertExtensionsEnabled(Riscv64Extension::kV);
5317 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5318 const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
5319 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5320 }
5321
VFredusum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5322 void Riscv64Assembler::VFredusum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5323 AssertExtensionsEnabled(Riscv64Extension::kV);
5324 const uint32_t funct7 = EncodeRVVF7(0b000001, vm);
5325 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5326 }
5327
VFsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5328 void Riscv64Assembler::VFsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5329 AssertExtensionsEnabled(Riscv64Extension::kV);
5330 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5331 const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
5332 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5333 }
5334
VFsub_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5335 void Riscv64Assembler::VFsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5336 AssertExtensionsEnabled(Riscv64Extension::kV);
5337 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5338 const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
5339 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5340 }
5341
VFredosum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5342 void Riscv64Assembler::VFredosum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5343 AssertExtensionsEnabled(Riscv64Extension::kV);
5344 const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
5345 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5346 }
5347
VFmin_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5348 void Riscv64Assembler::VFmin_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5349 AssertExtensionsEnabled(Riscv64Extension::kV);
5350 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5351 const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
5352 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5353 }
5354
VFmin_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5355 void Riscv64Assembler::VFmin_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5356 AssertExtensionsEnabled(Riscv64Extension::kV);
5357 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5358 const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
5359 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5360 }
5361
VFredmin_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5362 void Riscv64Assembler::VFredmin_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5363 AssertExtensionsEnabled(Riscv64Extension::kV);
5364 const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
5365 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5366 }
5367
VFmax_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5368 void Riscv64Assembler::VFmax_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5369 AssertExtensionsEnabled(Riscv64Extension::kV);
5370 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5371 const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
5372 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5373 }
5374
VFmax_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5375 void Riscv64Assembler::VFmax_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5376 AssertExtensionsEnabled(Riscv64Extension::kV);
5377 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5378 const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
5379 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5380 }
5381
VFredmax_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5382 void Riscv64Assembler::VFredmax_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5383 AssertExtensionsEnabled(Riscv64Extension::kV);
5384 const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
5385 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5386 }
5387
VFsgnj_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5388 void Riscv64Assembler::VFsgnj_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5389 AssertExtensionsEnabled(Riscv64Extension::kV);
5390 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5391 const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
5392 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5393 }
5394
VFsgnj_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5395 void Riscv64Assembler::VFsgnj_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5396 AssertExtensionsEnabled(Riscv64Extension::kV);
5397 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5398 const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
5399 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5400 }
5401
VFsgnjn_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5402 void Riscv64Assembler::VFsgnjn_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5403 AssertExtensionsEnabled(Riscv64Extension::kV);
5404 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5405 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
5406 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5407 }
5408
VFsgnjn_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5409 void Riscv64Assembler::VFsgnjn_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5410 AssertExtensionsEnabled(Riscv64Extension::kV);
5411 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5412 const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
5413 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5414 }
5415
VFneg_v(VRegister vd,VRegister vs)5416 void Riscv64Assembler::VFneg_v(VRegister vd, VRegister vs) { VFsgnjn_vv(vd, vs, vs); }
5417
VFsgnjx_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5418 void Riscv64Assembler::VFsgnjx_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5419 AssertExtensionsEnabled(Riscv64Extension::kV);
5420 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5421 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
5422 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5423 }
5424
VFsgnjx_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5425 void Riscv64Assembler::VFsgnjx_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5426 AssertExtensionsEnabled(Riscv64Extension::kV);
5427 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5428 const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
5429 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5430 }
5431
VFabs_v(VRegister vd,VRegister vs)5432 void Riscv64Assembler::VFabs_v(VRegister vd, VRegister vs) { VFsgnjx_vv(vd, vs, vs); }
5433
VFslide1up_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5434 void Riscv64Assembler::VFslide1up_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5435 AssertExtensionsEnabled(Riscv64Extension::kV);
5436 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5437 DCHECK(vd != vs2);
5438 const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
5439 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5440 }
5441
VFslide1down_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5442 void Riscv64Assembler::VFslide1down_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5443 AssertExtensionsEnabled(Riscv64Extension::kV);
5444 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5445 const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
5446 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5447 }
5448
VFmerge_vfm(VRegister vd,VRegister vs2,FRegister fs1)5449 void Riscv64Assembler::VFmerge_vfm(VRegister vd, VRegister vs2, FRegister fs1) {
5450 AssertExtensionsEnabled(Riscv64Extension::kV);
5451 DCHECK(vd != V0);
5452 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
5453 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5454 }
5455
VFmv_v_f(VRegister vd,FRegister fs1)5456 void Riscv64Assembler::VFmv_v_f(VRegister vd, FRegister fs1) {
5457 AssertExtensionsEnabled(Riscv64Extension::kV);
5458 const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
5459 EmitR(funct7, V0, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5460 }
5461
VMfeq_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5462 void Riscv64Assembler::VMfeq_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5463 AssertExtensionsEnabled(Riscv64Extension::kV);
5464 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5465 const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
5466 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5467 }
5468
VMfeq_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5469 void Riscv64Assembler::VMfeq_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5470 AssertExtensionsEnabled(Riscv64Extension::kV);
5471 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5472 const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
5473 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5474 }
5475
VMfle_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5476 void Riscv64Assembler::VMfle_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5477 AssertExtensionsEnabled(Riscv64Extension::kV);
5478 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5479 const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
5480 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5481 }
5482
VMfle_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5483 void Riscv64Assembler::VMfle_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5484 AssertExtensionsEnabled(Riscv64Extension::kV);
5485 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5486 const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
5487 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5488 }
5489
VMfge_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5490 void Riscv64Assembler::VMfge_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5491 VMfle_vv(vd, vs1, vs2, vm);
5492 }
5493
VMflt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5494 void Riscv64Assembler::VMflt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5495 AssertExtensionsEnabled(Riscv64Extension::kV);
5496 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5497 const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
5498 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5499 }
5500
VMflt_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5501 void Riscv64Assembler::VMflt_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5502 AssertExtensionsEnabled(Riscv64Extension::kV);
5503 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5504 const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
5505 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5506 }
5507
VMfgt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5508 void Riscv64Assembler::VMfgt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5509 VMflt_vv(vd, vs1, vs2, vm);
5510 }
5511
VMfne_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5512 void Riscv64Assembler::VMfne_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5513 AssertExtensionsEnabled(Riscv64Extension::kV);
5514 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5515 const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
5516 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5517 }
5518
VMfne_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5519 void Riscv64Assembler::VMfne_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5520 AssertExtensionsEnabled(Riscv64Extension::kV);
5521 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5522 const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
5523 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5524 }
5525
VMfgt_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5526 void Riscv64Assembler::VMfgt_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5527 AssertExtensionsEnabled(Riscv64Extension::kV);
5528 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5529 const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
5530 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5531 }
5532
VMfge_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5533 void Riscv64Assembler::VMfge_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5534 AssertExtensionsEnabled(Riscv64Extension::kV);
5535 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5536 const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
5537 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5538 }
5539
VFdiv_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5540 void Riscv64Assembler::VFdiv_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5541 AssertExtensionsEnabled(Riscv64Extension::kV);
5542 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
5543 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5544 }
5545
VFdiv_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5546 void Riscv64Assembler::VFdiv_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5547 AssertExtensionsEnabled(Riscv64Extension::kV);
5548 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5549 const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
5550 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5551 }
5552
VFrdiv_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5553 void Riscv64Assembler::VFrdiv_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5554 AssertExtensionsEnabled(Riscv64Extension::kV);
5555 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5556 const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
5557 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5558 }
5559
VFmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5560 void Riscv64Assembler::VFmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5561 AssertExtensionsEnabled(Riscv64Extension::kV);
5562 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5563 const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
5564 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5565 }
5566
VFmul_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5567 void Riscv64Assembler::VFmul_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5568 AssertExtensionsEnabled(Riscv64Extension::kV);
5569 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5570 const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
5571 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5572 }
5573
VFrsub_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5574 void Riscv64Assembler::VFrsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5575 AssertExtensionsEnabled(Riscv64Extension::kV);
5576 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5577 const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
5578 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5579 }
5580
VFmadd_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5581 void Riscv64Assembler::VFmadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5582 AssertExtensionsEnabled(Riscv64Extension::kV);
5583 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5584 const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
5585 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5586 }
5587
VFmadd_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5588 void Riscv64Assembler::VFmadd_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5589 AssertExtensionsEnabled(Riscv64Extension::kV);
5590 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5591 const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
5592 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5593 }
5594
VFnmadd_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5595 void Riscv64Assembler::VFnmadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5596 AssertExtensionsEnabled(Riscv64Extension::kV);
5597 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5598 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
5599 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5600 }
5601
VFnmadd_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5602 void Riscv64Assembler::VFnmadd_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5603 AssertExtensionsEnabled(Riscv64Extension::kV);
5604 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5605 const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
5606 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5607 }
5608
VFmsub_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5609 void Riscv64Assembler::VFmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5610 AssertExtensionsEnabled(Riscv64Extension::kV);
5611 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5612 const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
5613 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5614 }
5615
VFmsub_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5616 void Riscv64Assembler::VFmsub_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5617 AssertExtensionsEnabled(Riscv64Extension::kV);
5618 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5619 const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
5620 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5621 }
5622
VFnmsub_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5623 void Riscv64Assembler::VFnmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5624 AssertExtensionsEnabled(Riscv64Extension::kV);
5625 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5626 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
5627 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5628 }
5629
VFnmsub_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5630 void Riscv64Assembler::VFnmsub_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5631 AssertExtensionsEnabled(Riscv64Extension::kV);
5632 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5633 const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
5634 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5635 }
5636
VFmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5637 void Riscv64Assembler::VFmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5638 AssertExtensionsEnabled(Riscv64Extension::kV);
5639 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5640 const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
5641 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5642 }
5643
VFmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5644 void Riscv64Assembler::VFmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5645 AssertExtensionsEnabled(Riscv64Extension::kV);
5646 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5647 const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
5648 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5649 }
5650
VFnmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5651 void Riscv64Assembler::VFnmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5652 AssertExtensionsEnabled(Riscv64Extension::kV);
5653 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5654 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
5655 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5656 }
5657
VFnmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5658 void Riscv64Assembler::VFnmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5659 AssertExtensionsEnabled(Riscv64Extension::kV);
5660 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5661 const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
5662 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5663 }
5664
VFmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5665 void Riscv64Assembler::VFmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5666 AssertExtensionsEnabled(Riscv64Extension::kV);
5667 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5668 const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
5669 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5670 }
5671
VFmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5672 void Riscv64Assembler::VFmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5673 AssertExtensionsEnabled(Riscv64Extension::kV);
5674 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5675 const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
5676 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5677 }
5678
VFnmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5679 void Riscv64Assembler::VFnmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5680 AssertExtensionsEnabled(Riscv64Extension::kV);
5681 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5682 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5683 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5684 }
5685
VFnmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5686 void Riscv64Assembler::VFnmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5687 AssertExtensionsEnabled(Riscv64Extension::kV);
5688 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5689 const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5690 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5691 }
5692
VFwadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5693 void Riscv64Assembler::VFwadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5694 AssertExtensionsEnabled(Riscv64Extension::kV);
5695 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5696 DCHECK(vd != vs1);
5697 DCHECK(vd != vs2);
5698 const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5699 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5700 }
5701
VFwadd_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5702 void Riscv64Assembler::VFwadd_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5703 AssertExtensionsEnabled(Riscv64Extension::kV);
5704 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5705 DCHECK(vd != vs2);
5706 const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5707 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5708 }
5709
VFwredusum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5710 void Riscv64Assembler::VFwredusum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5711 AssertExtensionsEnabled(Riscv64Extension::kV);
5712 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5713 const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
5714 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5715 }
5716
VFwsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5717 void Riscv64Assembler::VFwsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5718 AssertExtensionsEnabled(Riscv64Extension::kV);
5719 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5720 DCHECK(vd != vs1);
5721 DCHECK(vd != vs2);
5722 const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5723 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5724 }
5725
VFwsub_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5726 void Riscv64Assembler::VFwsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5727 AssertExtensionsEnabled(Riscv64Extension::kV);
5728 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5729 DCHECK(vd != vs2);
5730 const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5731 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5732 }
5733
VFwredosum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5734 void Riscv64Assembler::VFwredosum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5735 AssertExtensionsEnabled(Riscv64Extension::kV);
5736 const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
5737 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5738 }
5739
VFwadd_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5740 void Riscv64Assembler::VFwadd_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5741 AssertExtensionsEnabled(Riscv64Extension::kV);
5742 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5743 DCHECK(vd != vs1);
5744 const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5745 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5746 }
5747
VFwadd_wf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5748 void Riscv64Assembler::VFwadd_wf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5749 AssertExtensionsEnabled(Riscv64Extension::kV);
5750 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5751 const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5752 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5753 }
5754
VFwsub_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5755 void Riscv64Assembler::VFwsub_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5756 AssertExtensionsEnabled(Riscv64Extension::kV);
5757 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5758 DCHECK(vd != vs1);
5759 const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5760 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5761 }
5762
VFwsub_wf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5763 void Riscv64Assembler::VFwsub_wf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5764 AssertExtensionsEnabled(Riscv64Extension::kV);
5765 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5766 const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5767 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5768 }
5769
VFwmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5770 void Riscv64Assembler::VFwmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5771 AssertExtensionsEnabled(Riscv64Extension::kV);
5772 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5773 DCHECK(vd != vs1);
5774 DCHECK(vd != vs2);
5775 const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5776 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5777 }
5778
VFwmul_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5779 void Riscv64Assembler::VFwmul_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5780 AssertExtensionsEnabled(Riscv64Extension::kV);
5781 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5782 DCHECK(vd != vs2);
5783 const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5784 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5785 }
5786
VFwmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5787 void Riscv64Assembler::VFwmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5788 AssertExtensionsEnabled(Riscv64Extension::kV);
5789 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5790 DCHECK(vd != vs1);
5791 DCHECK(vd != vs2);
5792 const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5793 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5794 }
5795
VFwmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5796 void Riscv64Assembler::VFwmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5797 AssertExtensionsEnabled(Riscv64Extension::kV);
5798 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5799 DCHECK(vd != vs2);
5800 const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5801 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5802 }
5803
VFwnmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5804 void Riscv64Assembler::VFwnmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5805 AssertExtensionsEnabled(Riscv64Extension::kV);
5806 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5807 DCHECK(vd != vs1);
5808 DCHECK(vd != vs2);
5809 const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5810 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5811 }
5812
VFwnmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5813 void Riscv64Assembler::VFwnmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5814 AssertExtensionsEnabled(Riscv64Extension::kV);
5815 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5816 DCHECK(vd != vs2);
5817 const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5818 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5819 }
5820
VFwmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5821 void Riscv64Assembler::VFwmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5822 AssertExtensionsEnabled(Riscv64Extension::kV);
5823 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5824 DCHECK(vd != vs1);
5825 DCHECK(vd != vs2);
5826 const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
5827 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5828 }
5829
VFwmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5830 void Riscv64Assembler::VFwmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5831 AssertExtensionsEnabled(Riscv64Extension::kV);
5832 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5833 DCHECK(vd != vs2);
5834 const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
5835 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5836 }
5837
VFwnmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5838 void Riscv64Assembler::VFwnmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5839 AssertExtensionsEnabled(Riscv64Extension::kV);
5840 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5841 DCHECK(vd != vs1);
5842 DCHECK(vd != vs2);
5843 const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5844 EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5845 }
5846
VFwnmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5847 void Riscv64Assembler::VFwnmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5848 AssertExtensionsEnabled(Riscv64Extension::kV);
5849 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5850 DCHECK(vd != vs2);
5851 const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5852 EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5853 }
5854
VMv_s_x(VRegister vd,XRegister rs1)5855 void Riscv64Assembler::VMv_s_x(VRegister vd, XRegister rs1) {
5856 AssertExtensionsEnabled(Riscv64Extension::kV);
5857 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5858 EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5859 }
5860
VMv_x_s(XRegister rd,VRegister vs2)5861 void Riscv64Assembler::VMv_x_s(XRegister rd, VRegister vs2) {
5862 AssertExtensionsEnabled(Riscv64Extension::kV);
5863 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5864 EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
5865 }
5866
VCpop_m(XRegister rd,VRegister vs2,VM vm)5867 void Riscv64Assembler::VCpop_m(XRegister rd, VRegister vs2, VM vm) {
5868 AssertExtensionsEnabled(Riscv64Extension::kV);
5869 const uint32_t funct7 = EncodeRVVF7(0b010000, vm);
5870 EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
5871 }
5872
VFirst_m(XRegister rd,VRegister vs2,VM vm)5873 void Riscv64Assembler::VFirst_m(XRegister rd, VRegister vs2, VM vm) {
5874 AssertExtensionsEnabled(Riscv64Extension::kV);
5875 const uint32_t funct7 = EncodeRVVF7(0b010000, vm);
5876 EmitR(funct7, vs2, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
5877 }
5878
VZext_vf8(VRegister vd,VRegister vs2,VM vm)5879 void Riscv64Assembler::VZext_vf8(VRegister vd, VRegister vs2, VM vm) {
5880 AssertExtensionsEnabled(Riscv64Extension::kV);
5881 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5882 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5883 EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5884 }
5885
VSext_vf8(VRegister vd,VRegister vs2,VM vm)5886 void Riscv64Assembler::VSext_vf8(VRegister vd, VRegister vs2, VM vm) {
5887 AssertExtensionsEnabled(Riscv64Extension::kV);
5888 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5889 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5890 EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5891 }
5892
VZext_vf4(VRegister vd,VRegister vs2,VM vm)5893 void Riscv64Assembler::VZext_vf4(VRegister vd, VRegister vs2, VM vm) {
5894 AssertExtensionsEnabled(Riscv64Extension::kV);
5895 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5896 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5897 EmitR(funct7, vs2, 0b00100, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5898 }
5899
VSext_vf4(VRegister vd,VRegister vs2,VM vm)5900 void Riscv64Assembler::VSext_vf4(VRegister vd, VRegister vs2, VM vm) {
5901 AssertExtensionsEnabled(Riscv64Extension::kV);
5902 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5903 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5904 EmitR(funct7, vs2, 0b00101, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5905 }
5906
VZext_vf2(VRegister vd,VRegister vs2,VM vm)5907 void Riscv64Assembler::VZext_vf2(VRegister vd, VRegister vs2, VM vm) {
5908 AssertExtensionsEnabled(Riscv64Extension::kV);
5909 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5910 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5911 EmitR(funct7, vs2, 0b00110, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5912 }
5913
VSext_vf2(VRegister vd,VRegister vs2,VM vm)5914 void Riscv64Assembler::VSext_vf2(VRegister vd, VRegister vs2, VM vm) {
5915 AssertExtensionsEnabled(Riscv64Extension::kV);
5916 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5917 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5918 EmitR(funct7, vs2, 0b00111, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5919 }
5920
VFmv_s_f(VRegister vd,FRegister fs1)5921 void Riscv64Assembler::VFmv_s_f(VRegister vd, FRegister fs1) {
5922 AssertExtensionsEnabled(Riscv64Extension::kV);
5923 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5924 EmitR(funct7, 0b00000, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5925 }
5926
VFmv_f_s(FRegister fd,VRegister vs2)5927 void Riscv64Assembler::VFmv_f_s(FRegister fd, VRegister vs2) {
5928 AssertExtensionsEnabled(Riscv64Extension::kV);
5929 const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5930 EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), fd, 0x57);
5931 }
5932
VFcvt_xu_f_v(VRegister vd,VRegister vs2,VM vm)5933 void Riscv64Assembler::VFcvt_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5934 AssertExtensionsEnabled(Riscv64Extension::kV);
5935 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5936 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5937 EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5938 }
5939
VFcvt_x_f_v(VRegister vd,VRegister vs2,VM vm)5940 void Riscv64Assembler::VFcvt_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5941 AssertExtensionsEnabled(Riscv64Extension::kV);
5942 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5943 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5944 EmitR(funct7, vs2, 0b00001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5945 }
5946
VFcvt_f_xu_v(VRegister vd,VRegister vs2,VM vm)5947 void Riscv64Assembler::VFcvt_f_xu_v(VRegister vd, VRegister vs2, VM vm) {
5948 AssertExtensionsEnabled(Riscv64Extension::kV);
5949 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5950 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5951 EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5952 }
5953
VFcvt_f_x_v(VRegister vd,VRegister vs2,VM vm)5954 void Riscv64Assembler::VFcvt_f_x_v(VRegister vd, VRegister vs2, VM vm) {
5955 AssertExtensionsEnabled(Riscv64Extension::kV);
5956 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5957 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5958 EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5959 }
5960
VFcvt_rtz_xu_f_v(VRegister vd,VRegister vs2,VM vm)5961 void Riscv64Assembler::VFcvt_rtz_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5962 AssertExtensionsEnabled(Riscv64Extension::kV);
5963 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5964 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5965 EmitR(funct7, vs2, 0b00110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5966 }
5967
VFcvt_rtz_x_f_v(VRegister vd,VRegister vs2,VM vm)5968 void Riscv64Assembler::VFcvt_rtz_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5969 AssertExtensionsEnabled(Riscv64Extension::kV);
5970 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5971 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5972 EmitR(funct7, vs2, 0b00111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5973 }
5974
VFwcvt_xu_f_v(VRegister vd,VRegister vs2,VM vm)5975 void Riscv64Assembler::VFwcvt_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5976 AssertExtensionsEnabled(Riscv64Extension::kV);
5977 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5978 DCHECK(vd != vs2);
5979 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5980 EmitR(funct7, vs2, 0b01000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5981 }
5982
VFwcvt_x_f_v(VRegister vd,VRegister vs2,VM vm)5983 void Riscv64Assembler::VFwcvt_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5984 AssertExtensionsEnabled(Riscv64Extension::kV);
5985 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5986 DCHECK(vd != vs2);
5987 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5988 EmitR(funct7, vs2, 0b01001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5989 }
5990
VFwcvt_f_xu_v(VRegister vd,VRegister vs2,VM vm)5991 void Riscv64Assembler::VFwcvt_f_xu_v(VRegister vd, VRegister vs2, VM vm) {
5992 AssertExtensionsEnabled(Riscv64Extension::kV);
5993 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5994 DCHECK(vd != vs2);
5995 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5996 EmitR(funct7, vs2, 0b01010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5997 }
5998
VFwcvt_f_x_v(VRegister vd,VRegister vs2,VM vm)5999 void Riscv64Assembler::VFwcvt_f_x_v(VRegister vd, VRegister vs2, VM vm) {
6000 AssertExtensionsEnabled(Riscv64Extension::kV);
6001 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6002 DCHECK(vd != vs2);
6003 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6004 EmitR(funct7, vs2, 0b01011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6005 }
6006
VFwcvt_f_f_v(VRegister vd,VRegister vs2,VM vm)6007 void Riscv64Assembler::VFwcvt_f_f_v(VRegister vd, VRegister vs2, VM vm) {
6008 AssertExtensionsEnabled(Riscv64Extension::kV);
6009 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6010 DCHECK(vd != vs2);
6011 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6012 EmitR(funct7, vs2, 0b01100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6013 }
6014
VFwcvt_rtz_xu_f_v(VRegister vd,VRegister vs2,VM vm)6015 void Riscv64Assembler::VFwcvt_rtz_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
6016 AssertExtensionsEnabled(Riscv64Extension::kV);
6017 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6018 DCHECK(vd != vs2);
6019 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6020 EmitR(funct7, vs2, 0b01110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6021 }
6022
VFwcvt_rtz_x_f_v(VRegister vd,VRegister vs2,VM vm)6023 void Riscv64Assembler::VFwcvt_rtz_x_f_v(VRegister vd, VRegister vs2, VM vm) {
6024 AssertExtensionsEnabled(Riscv64Extension::kV);
6025 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6026 DCHECK(vd != vs2);
6027 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6028 EmitR(funct7, vs2, 0b01111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6029 }
6030
VFncvt_xu_f_w(VRegister vd,VRegister vs2,VM vm)6031 void Riscv64Assembler::VFncvt_xu_f_w(VRegister vd, VRegister vs2, VM vm) {
6032 AssertExtensionsEnabled(Riscv64Extension::kV);
6033 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6034 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6035 EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6036 }
6037
VFncvt_x_f_w(VRegister vd,VRegister vs2,VM vm)6038 void Riscv64Assembler::VFncvt_x_f_w(VRegister vd, VRegister vs2, VM vm) {
6039 AssertExtensionsEnabled(Riscv64Extension::kV);
6040 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6041 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6042 EmitR(funct7, vs2, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6043 }
6044
VFncvt_f_xu_w(VRegister vd,VRegister vs2,VM vm)6045 void Riscv64Assembler::VFncvt_f_xu_w(VRegister vd, VRegister vs2, VM vm) {
6046 AssertExtensionsEnabled(Riscv64Extension::kV);
6047 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6048 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6049 EmitR(funct7, vs2, 0b10010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6050 }
6051
VFncvt_f_x_w(VRegister vd,VRegister vs2,VM vm)6052 void Riscv64Assembler::VFncvt_f_x_w(VRegister vd, VRegister vs2, VM vm) {
6053 AssertExtensionsEnabled(Riscv64Extension::kV);
6054 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6055 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6056 EmitR(funct7, vs2, 0b10011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6057 }
6058
VFncvt_f_f_w(VRegister vd,VRegister vs2,VM vm)6059 void Riscv64Assembler::VFncvt_f_f_w(VRegister vd, VRegister vs2, VM vm) {
6060 AssertExtensionsEnabled(Riscv64Extension::kV);
6061 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6062 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6063 EmitR(funct7, vs2, 0b10100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6064 }
6065
VFncvt_rod_f_f_w(VRegister vd,VRegister vs2,VM vm)6066 void Riscv64Assembler::VFncvt_rod_f_f_w(VRegister vd, VRegister vs2, VM vm) {
6067 AssertExtensionsEnabled(Riscv64Extension::kV);
6068 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6069 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6070 EmitR(funct7, vs2, 0b10101, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6071 }
6072
VFncvt_rtz_xu_f_w(VRegister vd,VRegister vs2,VM vm)6073 void Riscv64Assembler::VFncvt_rtz_xu_f_w(VRegister vd, VRegister vs2, VM vm) {
6074 AssertExtensionsEnabled(Riscv64Extension::kV);
6075 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6076 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6077 EmitR(funct7, vs2, 0b10110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6078 }
6079
VFncvt_rtz_x_f_w(VRegister vd,VRegister vs2,VM vm)6080 void Riscv64Assembler::VFncvt_rtz_x_f_w(VRegister vd, VRegister vs2, VM vm) {
6081 AssertExtensionsEnabled(Riscv64Extension::kV);
6082 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6083 const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6084 EmitR(funct7, vs2, 0b10111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6085 }
6086
VFsqrt_v(VRegister vd,VRegister vs2,VM vm)6087 void Riscv64Assembler::VFsqrt_v(VRegister vd, VRegister vs2, VM vm) {
6088 AssertExtensionsEnabled(Riscv64Extension::kV);
6089 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6090 const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6091 EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6092 }
6093
VFrsqrt7_v(VRegister vd,VRegister vs2,VM vm)6094 void Riscv64Assembler::VFrsqrt7_v(VRegister vd, VRegister vs2, VM vm) {
6095 AssertExtensionsEnabled(Riscv64Extension::kV);
6096 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6097 const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6098 EmitR(funct7, vs2, 0b00100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6099 }
6100
VFrec7_v(VRegister vd,VRegister vs2,VM vm)6101 void Riscv64Assembler::VFrec7_v(VRegister vd, VRegister vs2, VM vm) {
6102 AssertExtensionsEnabled(Riscv64Extension::kV);
6103 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6104 const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6105 EmitR(funct7, vs2, 0b00101, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6106 }
6107
VFclass_v(VRegister vd,VRegister vs2,VM vm)6108 void Riscv64Assembler::VFclass_v(VRegister vd, VRegister vs2, VM vm) {
6109 AssertExtensionsEnabled(Riscv64Extension::kV);
6110 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6111 const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6112 EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6113 }
6114
VMsbf_m(VRegister vd,VRegister vs2,VM vm)6115 void Riscv64Assembler::VMsbf_m(VRegister vd, VRegister vs2, VM vm) {
6116 AssertExtensionsEnabled(Riscv64Extension::kV);
6117 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6118 DCHECK(vd != vs2);
6119 const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6120 EmitR(funct7, vs2, 0b00001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6121 }
6122
VMsof_m(VRegister vd,VRegister vs2,VM vm)6123 void Riscv64Assembler::VMsof_m(VRegister vd, VRegister vs2, VM vm) {
6124 AssertExtensionsEnabled(Riscv64Extension::kV);
6125 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6126 DCHECK(vd != vs2);
6127 const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6128 EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6129 }
6130
VMsif_m(VRegister vd,VRegister vs2,VM vm)6131 void Riscv64Assembler::VMsif_m(VRegister vd, VRegister vs2, VM vm) {
6132 AssertExtensionsEnabled(Riscv64Extension::kV);
6133 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6134 DCHECK(vd != vs2);
6135 const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6136 EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6137 }
6138
VIota_m(VRegister vd,VRegister vs2,VM vm)6139 void Riscv64Assembler::VIota_m(VRegister vd, VRegister vs2, VM vm) {
6140 AssertExtensionsEnabled(Riscv64Extension::kV);
6141 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6142 DCHECK(vd != vs2);
6143 const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6144 EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6145 }
6146
VId_v(VRegister vd,VM vm)6147 void Riscv64Assembler::VId_v(VRegister vd, VM vm) {
6148 AssertExtensionsEnabled(Riscv64Extension::kV);
6149 DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6150 const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6151 EmitR(funct7, V0, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6152 }
6153
6154 /////////////////////////////// RVV Arithmetic Instructions END /////////////////////////////
6155
6156 ////////////////////////////// RV64 MACRO Instructions START ///////////////////////////////
6157
6158 // Pseudo instructions
6159
Nop()6160 void Riscv64Assembler::Nop() { Addi(Zero, Zero, 0); }
6161
Li(XRegister rd,int64_t imm)6162 void Riscv64Assembler::Li(XRegister rd, int64_t imm) {
6163 LoadImmediate(rd, imm, /*can_use_tmp=*/ false);
6164 }
6165
Mv(XRegister rd,XRegister rs)6166 void Riscv64Assembler::Mv(XRegister rd, XRegister rs) { Addi(rd, rs, 0); }
6167
Not(XRegister rd,XRegister rs)6168 void Riscv64Assembler::Not(XRegister rd, XRegister rs) { Xori(rd, rs, -1); }
6169
Neg(XRegister rd,XRegister rs)6170 void Riscv64Assembler::Neg(XRegister rd, XRegister rs) { Sub(rd, Zero, rs); }
6171
NegW(XRegister rd,XRegister rs)6172 void Riscv64Assembler::NegW(XRegister rd, XRegister rs) { Subw(rd, Zero, rs); }
6173
SextB(XRegister rd,XRegister rs)6174 void Riscv64Assembler::SextB(XRegister rd, XRegister rs) {
6175 if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
6176 if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6177 CSextB(rd);
6178 } else {
6179 ZbbSextB(rd, rs);
6180 }
6181 } else {
6182 Slli(rd, rs, kXlen - 8u);
6183 Srai(rd, rd, kXlen - 8u);
6184 }
6185 }
6186
SextH(XRegister rd,XRegister rs)6187 void Riscv64Assembler::SextH(XRegister rd, XRegister rs) {
6188 if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
6189 if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6190 CSextH(rd);
6191 } else {
6192 ZbbSextH(rd, rs);
6193 }
6194 } else {
6195 Slli(rd, rs, kXlen - 16u);
6196 Srai(rd, rd, kXlen - 16u);
6197 }
6198 }
6199
SextW(XRegister rd,XRegister rs)6200 void Riscv64Assembler::SextW(XRegister rd, XRegister rs) {
6201 if (IsExtensionEnabled(Riscv64Extension::kZca) && rd != Zero && (rd == rs || rs == Zero)) {
6202 if (rd == rs) {
6203 CAddiw(rd, 0);
6204 } else {
6205 CLi(rd, 0);
6206 }
6207 } else {
6208 Addiw(rd, rs, 0);
6209 }
6210 }
6211
ZextB(XRegister rd,XRegister rs)6212 void Riscv64Assembler::ZextB(XRegister rd, XRegister rs) {
6213 if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6214 CZextB(rd);
6215 } else {
6216 Andi(rd, rs, 0xff);
6217 }
6218 }
6219
ZextH(XRegister rd,XRegister rs)6220 void Riscv64Assembler::ZextH(XRegister rd, XRegister rs) {
6221 if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
6222 if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6223 CZextH(rd);
6224 } else {
6225 ZbbZextH(rd, rs);
6226 }
6227 } else {
6228 Slli(rd, rs, kXlen - 16u);
6229 Srli(rd, rd, kXlen - 16u);
6230 }
6231 }
6232
ZextW(XRegister rd,XRegister rs)6233 void Riscv64Assembler::ZextW(XRegister rd, XRegister rs) {
6234 if (IsExtensionEnabled(Riscv64Extension::kZba)) {
6235 if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6236 CZextW(rd);
6237 } else {
6238 AddUw(rd, rs, Zero);
6239 }
6240 } else {
6241 Slli(rd, rs, kXlen - 32u);
6242 Srli(rd, rd, kXlen - 32u);
6243 }
6244 }
6245
Seqz(XRegister rd,XRegister rs)6246 void Riscv64Assembler::Seqz(XRegister rd, XRegister rs) { Sltiu(rd, rs, 1); }
6247
Snez(XRegister rd,XRegister rs)6248 void Riscv64Assembler::Snez(XRegister rd, XRegister rs) { Sltu(rd, Zero, rs); }
6249
Sltz(XRegister rd,XRegister rs)6250 void Riscv64Assembler::Sltz(XRegister rd, XRegister rs) { Slt(rd, rs, Zero); }
6251
Sgtz(XRegister rd,XRegister rs)6252 void Riscv64Assembler::Sgtz(XRegister rd, XRegister rs) { Slt(rd, Zero, rs); }
6253
FMvS(FRegister rd,FRegister rs)6254 void Riscv64Assembler::FMvS(FRegister rd, FRegister rs) { FSgnjS(rd, rs, rs); }
6255
FAbsS(FRegister rd,FRegister rs)6256 void Riscv64Assembler::FAbsS(FRegister rd, FRegister rs) { FSgnjxS(rd, rs, rs); }
6257
FNegS(FRegister rd,FRegister rs)6258 void Riscv64Assembler::FNegS(FRegister rd, FRegister rs) { FSgnjnS(rd, rs, rs); }
6259
FMvD(FRegister rd,FRegister rs)6260 void Riscv64Assembler::FMvD(FRegister rd, FRegister rs) { FSgnjD(rd, rs, rs); }
6261
FAbsD(FRegister rd,FRegister rs)6262 void Riscv64Assembler::FAbsD(FRegister rd, FRegister rs) { FSgnjxD(rd, rs, rs); }
6263
FNegD(FRegister rd,FRegister rs)6264 void Riscv64Assembler::FNegD(FRegister rd, FRegister rs) { FSgnjnD(rd, rs, rs); }
6265
Beqz(XRegister rs,int32_t offset)6266 void Riscv64Assembler::Beqz(XRegister rs, int32_t offset) {
6267 Beq(rs, Zero, offset);
6268 }
6269
Bnez(XRegister rs,int32_t offset)6270 void Riscv64Assembler::Bnez(XRegister rs, int32_t offset) {
6271 Bne(rs, Zero, offset);
6272 }
6273
Blez(XRegister rt,int32_t offset)6274 void Riscv64Assembler::Blez(XRegister rt, int32_t offset) {
6275 Bge(Zero, rt, offset);
6276 }
6277
Bgez(XRegister rt,int32_t offset)6278 void Riscv64Assembler::Bgez(XRegister rt, int32_t offset) {
6279 Bge(rt, Zero, offset);
6280 }
6281
Bltz(XRegister rt,int32_t offset)6282 void Riscv64Assembler::Bltz(XRegister rt, int32_t offset) {
6283 Blt(rt, Zero, offset);
6284 }
6285
Bgtz(XRegister rt,int32_t offset)6286 void Riscv64Assembler::Bgtz(XRegister rt, int32_t offset) {
6287 Blt(Zero, rt, offset);
6288 }
6289
Bgt(XRegister rs,XRegister rt,int32_t offset)6290 void Riscv64Assembler::Bgt(XRegister rs, XRegister rt, int32_t offset) {
6291 Blt(rt, rs, offset);
6292 }
6293
Ble(XRegister rs,XRegister rt,int32_t offset)6294 void Riscv64Assembler::Ble(XRegister rs, XRegister rt, int32_t offset) {
6295 Bge(rt, rs, offset);
6296 }
6297
Bgtu(XRegister rs,XRegister rt,int32_t offset)6298 void Riscv64Assembler::Bgtu(XRegister rs, XRegister rt, int32_t offset) {
6299 Bltu(rt, rs, offset);
6300 }
6301
Bleu(XRegister rs,XRegister rt,int32_t offset)6302 void Riscv64Assembler::Bleu(XRegister rs, XRegister rt, int32_t offset) {
6303 Bgeu(rt, rs, offset);
6304 }
6305
J(int32_t offset)6306 void Riscv64Assembler::J(int32_t offset) { Jal(Zero, offset); }
6307
Jal(int32_t offset)6308 void Riscv64Assembler::Jal(int32_t offset) { Jal(RA, offset); }
6309
Jr(XRegister rs)6310 void Riscv64Assembler::Jr(XRegister rs) { Jalr(Zero, rs, 0); }
6311
Jalr(XRegister rs)6312 void Riscv64Assembler::Jalr(XRegister rs) { Jalr(RA, rs, 0); }
6313
Jalr(XRegister rd,XRegister rs)6314 void Riscv64Assembler::Jalr(XRegister rd, XRegister rs) { Jalr(rd, rs, 0); }
6315
Ret()6316 void Riscv64Assembler::Ret() { Jalr(Zero, RA, 0); }
6317
RdCycle(XRegister rd)6318 void Riscv64Assembler::RdCycle(XRegister rd) {
6319 Csrrs(rd, 0xc00, Zero);
6320 }
6321
RdTime(XRegister rd)6322 void Riscv64Assembler::RdTime(XRegister rd) {
6323 Csrrs(rd, 0xc01, Zero);
6324 }
6325
RdInstret(XRegister rd)6326 void Riscv64Assembler::RdInstret(XRegister rd) {
6327 Csrrs(rd, 0xc02, Zero);
6328 }
6329
Csrr(XRegister rd,uint32_t csr)6330 void Riscv64Assembler::Csrr(XRegister rd, uint32_t csr) {
6331 Csrrs(rd, csr, Zero);
6332 }
6333
Csrw(uint32_t csr,XRegister rs)6334 void Riscv64Assembler::Csrw(uint32_t csr, XRegister rs) {
6335 Csrrw(Zero, csr, rs);
6336 }
6337
Csrs(uint32_t csr,XRegister rs)6338 void Riscv64Assembler::Csrs(uint32_t csr, XRegister rs) {
6339 Csrrs(Zero, csr, rs);
6340 }
6341
Csrc(uint32_t csr,XRegister rs)6342 void Riscv64Assembler::Csrc(uint32_t csr, XRegister rs) {
6343 Csrrc(Zero, csr, rs);
6344 }
6345
Csrwi(uint32_t csr,uint32_t uimm5)6346 void Riscv64Assembler::Csrwi(uint32_t csr, uint32_t uimm5) {
6347 Csrrwi(Zero, csr, uimm5);
6348 }
6349
Csrsi(uint32_t csr,uint32_t uimm5)6350 void Riscv64Assembler::Csrsi(uint32_t csr, uint32_t uimm5) {
6351 Csrrsi(Zero, csr, uimm5);
6352 }
6353
Csrci(uint32_t csr,uint32_t uimm5)6354 void Riscv64Assembler::Csrci(uint32_t csr, uint32_t uimm5) {
6355 Csrrci(Zero, csr, uimm5);
6356 }
6357
Loadb(XRegister rd,XRegister rs1,int32_t offset)6358 void Riscv64Assembler::Loadb(XRegister rd, XRegister rs1, int32_t offset) {
6359 LoadFromOffset<&Riscv64Assembler::Lb>(rd, rs1, offset);
6360 }
6361
Loadh(XRegister rd,XRegister rs1,int32_t offset)6362 void Riscv64Assembler::Loadh(XRegister rd, XRegister rs1, int32_t offset) {
6363 LoadFromOffset<&Riscv64Assembler::Lh>(rd, rs1, offset);
6364 }
6365
Loadw(XRegister rd,XRegister rs1,int32_t offset)6366 void Riscv64Assembler::Loadw(XRegister rd, XRegister rs1, int32_t offset) {
6367 LoadFromOffset<&Riscv64Assembler::Lw>(rd, rs1, offset);
6368 }
6369
Loadd(XRegister rd,XRegister rs1,int32_t offset)6370 void Riscv64Assembler::Loadd(XRegister rd, XRegister rs1, int32_t offset) {
6371 LoadFromOffset<&Riscv64Assembler::Ld>(rd, rs1, offset);
6372 }
6373
Loadbu(XRegister rd,XRegister rs1,int32_t offset)6374 void Riscv64Assembler::Loadbu(XRegister rd, XRegister rs1, int32_t offset) {
6375 LoadFromOffset<&Riscv64Assembler::Lbu>(rd, rs1, offset);
6376 }
6377
Loadhu(XRegister rd,XRegister rs1,int32_t offset)6378 void Riscv64Assembler::Loadhu(XRegister rd, XRegister rs1, int32_t offset) {
6379 LoadFromOffset<&Riscv64Assembler::Lhu>(rd, rs1, offset);
6380 }
6381
Loadwu(XRegister rd,XRegister rs1,int32_t offset)6382 void Riscv64Assembler::Loadwu(XRegister rd, XRegister rs1, int32_t offset) {
6383 LoadFromOffset<&Riscv64Assembler::Lwu>(rd, rs1, offset);
6384 }
6385
Storeb(XRegister rs2,XRegister rs1,int32_t offset)6386 void Riscv64Assembler::Storeb(XRegister rs2, XRegister rs1, int32_t offset) {
6387 StoreToOffset<&Riscv64Assembler::Sb>(rs2, rs1, offset);
6388 }
6389
Storeh(XRegister rs2,XRegister rs1,int32_t offset)6390 void Riscv64Assembler::Storeh(XRegister rs2, XRegister rs1, int32_t offset) {
6391 StoreToOffset<&Riscv64Assembler::Sh>(rs2, rs1, offset);
6392 }
6393
Storew(XRegister rs2,XRegister rs1,int32_t offset)6394 void Riscv64Assembler::Storew(XRegister rs2, XRegister rs1, int32_t offset) {
6395 StoreToOffset<&Riscv64Assembler::Sw>(rs2, rs1, offset);
6396 }
6397
Stored(XRegister rs2,XRegister rs1,int32_t offset)6398 void Riscv64Assembler::Stored(XRegister rs2, XRegister rs1, int32_t offset) {
6399 StoreToOffset<&Riscv64Assembler::Sd>(rs2, rs1, offset);
6400 }
6401
FLoadw(FRegister rd,XRegister rs1,int32_t offset)6402 void Riscv64Assembler::FLoadw(FRegister rd, XRegister rs1, int32_t offset) {
6403 FLoadFromOffset<&Riscv64Assembler::FLw>(rd, rs1, offset);
6404 }
6405
FLoadd(FRegister rd,XRegister rs1,int32_t offset)6406 void Riscv64Assembler::FLoadd(FRegister rd, XRegister rs1, int32_t offset) {
6407 FLoadFromOffset<&Riscv64Assembler::FLd>(rd, rs1, offset);
6408 }
6409
FStorew(FRegister rs2,XRegister rs1,int32_t offset)6410 void Riscv64Assembler::FStorew(FRegister rs2, XRegister rs1, int32_t offset) {
6411 FStoreToOffset<&Riscv64Assembler::FSw>(rs2, rs1, offset);
6412 }
6413
FStored(FRegister rs2,XRegister rs1,int32_t offset)6414 void Riscv64Assembler::FStored(FRegister rs2, XRegister rs1, int32_t offset) {
6415 FStoreToOffset<&Riscv64Assembler::FSd>(rs2, rs1, offset);
6416 }
6417
LoadConst32(XRegister rd,int32_t value)6418 void Riscv64Assembler::LoadConst32(XRegister rd, int32_t value) {
6419 // No need to use a temporary register for 32-bit values.
6420 LoadImmediate(rd, value, /*can_use_tmp=*/ false);
6421 }
6422
LoadConst64(XRegister rd,int64_t value)6423 void Riscv64Assembler::LoadConst64(XRegister rd, int64_t value) {
6424 LoadImmediate(rd, value, /*can_use_tmp=*/ true);
6425 }
6426
6427 template <typename ValueType, typename Addi, typename AddLarge>
AddConstImpl(Riscv64Assembler * assembler,XRegister rd,XRegister rs1,ValueType value,Addi && addi,AddLarge && add_large)6428 void AddConstImpl(Riscv64Assembler* assembler,
6429 XRegister rd,
6430 XRegister rs1,
6431 ValueType value,
6432 Addi&& addi,
6433 AddLarge&& add_large) {
6434 ScratchRegisterScope srs(assembler);
6435 // A temporary must be available for adjustment even if it's not needed.
6436 // However, `rd` can be used as the temporary unless it's the same as `rs1` or SP.
6437 DCHECK_IMPLIES(rd == rs1 || rd == SP, srs.AvailableXRegisters() != 0u);
6438
6439 if (IsInt<12>(value)) {
6440 addi(rd, rs1, value);
6441 return;
6442 }
6443
6444 constexpr int32_t kPositiveValueSimpleAdjustment = 0x7ff;
6445 constexpr int32_t kHighestValueForSimpleAdjustment = 2 * kPositiveValueSimpleAdjustment;
6446 constexpr int32_t kNegativeValueSimpleAdjustment = -0x800;
6447 constexpr int32_t kLowestValueForSimpleAdjustment = 2 * kNegativeValueSimpleAdjustment;
6448
6449 if (rd != rs1 && rd != SP) {
6450 srs.IncludeXRegister(rd);
6451 }
6452 XRegister tmp = srs.AllocateXRegister();
6453 if (value >= 0 && value <= kHighestValueForSimpleAdjustment) {
6454 addi(tmp, rs1, kPositiveValueSimpleAdjustment);
6455 addi(rd, tmp, value - kPositiveValueSimpleAdjustment);
6456 } else if (value < 0 && value >= kLowestValueForSimpleAdjustment) {
6457 addi(tmp, rs1, kNegativeValueSimpleAdjustment);
6458 addi(rd, tmp, value - kNegativeValueSimpleAdjustment);
6459 } else {
6460 add_large(rd, rs1, value, tmp);
6461 }
6462 }
6463
AddConst32(XRegister rd,XRegister rs1,int32_t value)6464 void Riscv64Assembler::AddConst32(XRegister rd, XRegister rs1, int32_t value) {
6465 CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
6466 CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
6467 auto addiw = [&](XRegister rd, XRegister rs1, int32_t value) { Addiw(rd, rs1, value); };
6468 auto add_large = [&](XRegister rd, XRegister rs1, int32_t value, XRegister tmp) {
6469 LoadConst32(tmp, value);
6470 Addw(rd, rs1, tmp);
6471 };
6472 AddConstImpl(this, rd, rs1, value, addiw, add_large);
6473 }
6474
AddConst64(XRegister rd,XRegister rs1,int64_t value)6475 void Riscv64Assembler::AddConst64(XRegister rd, XRegister rs1, int64_t value) {
6476 CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
6477 CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
6478 auto addi = [&](XRegister rd, XRegister rs1, int32_t value) { Addi(rd, rs1, value); };
6479 auto add_large = [&](XRegister rd, XRegister rs1, int64_t value, XRegister tmp) {
6480 // We may not have another scratch register for `LoadConst64()`, so use `Li()`.
6481 // TODO(riscv64): Refactor `LoadImmediate()` so that we can reuse the code to detect
6482 // when the code path using the scratch reg is beneficial, and use that path with a
6483 // small modification - instead of adding the two parts togeter, add them individually
6484 // to the input `rs1`. (This works as long as `rd` is not the same as `tmp`.)
6485 Li(tmp, value);
6486 Add(rd, rs1, tmp);
6487 };
6488 AddConstImpl(this, rd, rs1, value, addi, add_large);
6489 }
6490
Beqz(XRegister rs,Riscv64Label * label,bool is_bare)6491 void Riscv64Assembler::Beqz(XRegister rs, Riscv64Label* label, bool is_bare) {
6492 Beq(rs, Zero, label, is_bare);
6493 }
6494
Bnez(XRegister rs,Riscv64Label * label,bool is_bare)6495 void Riscv64Assembler::Bnez(XRegister rs, Riscv64Label* label, bool is_bare) {
6496 Bne(rs, Zero, label, is_bare);
6497 }
6498
Blez(XRegister rs,Riscv64Label * label,bool is_bare)6499 void Riscv64Assembler::Blez(XRegister rs, Riscv64Label* label, bool is_bare) {
6500 Ble(rs, Zero, label, is_bare);
6501 }
6502
Bgez(XRegister rs,Riscv64Label * label,bool is_bare)6503 void Riscv64Assembler::Bgez(XRegister rs, Riscv64Label* label, bool is_bare) {
6504 Bge(rs, Zero, label, is_bare);
6505 }
6506
Bltz(XRegister rs,Riscv64Label * label,bool is_bare)6507 void Riscv64Assembler::Bltz(XRegister rs, Riscv64Label* label, bool is_bare) {
6508 Blt(rs, Zero, label, is_bare);
6509 }
6510
Bgtz(XRegister rs,Riscv64Label * label,bool is_bare)6511 void Riscv64Assembler::Bgtz(XRegister rs, Riscv64Label* label, bool is_bare) {
6512 Bgt(rs, Zero, label, is_bare);
6513 }
6514
Beq(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6515 void Riscv64Assembler::Beq(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6516 Bcond(label, is_bare, kCondEQ, rs, rt);
6517 }
6518
Bne(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6519 void Riscv64Assembler::Bne(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6520 Bcond(label, is_bare, kCondNE, rs, rt);
6521 }
6522
Ble(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6523 void Riscv64Assembler::Ble(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6524 Bcond(label, is_bare, kCondLE, rs, rt);
6525 }
6526
Bge(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6527 void Riscv64Assembler::Bge(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6528 Bcond(label, is_bare, kCondGE, rs, rt);
6529 }
6530
Blt(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6531 void Riscv64Assembler::Blt(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6532 Bcond(label, is_bare, kCondLT, rs, rt);
6533 }
6534
Bgt(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6535 void Riscv64Assembler::Bgt(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6536 Bcond(label, is_bare, kCondGT, rs, rt);
6537 }
6538
Bleu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6539 void Riscv64Assembler::Bleu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6540 Bcond(label, is_bare, kCondLEU, rs, rt);
6541 }
6542
Bgeu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6543 void Riscv64Assembler::Bgeu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6544 Bcond(label, is_bare, kCondGEU, rs, rt);
6545 }
6546
Bltu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6547 void Riscv64Assembler::Bltu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6548 Bcond(label, is_bare, kCondLTU, rs, rt);
6549 }
6550
Bgtu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6551 void Riscv64Assembler::Bgtu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6552 Bcond(label, is_bare, kCondGTU, rs, rt);
6553 }
6554
Jal(XRegister rd,Riscv64Label * label,bool is_bare)6555 void Riscv64Assembler::Jal(XRegister rd, Riscv64Label* label, bool is_bare) {
6556 Buncond(label, rd, is_bare);
6557 }
6558
J(Riscv64Label * label,bool is_bare)6559 void Riscv64Assembler::J(Riscv64Label* label, bool is_bare) {
6560 Jal(Zero, label, is_bare);
6561 }
6562
Jal(Riscv64Label * label,bool is_bare)6563 void Riscv64Assembler::Jal(Riscv64Label* label, bool is_bare) {
6564 Jal(RA, label, is_bare);
6565 }
6566
Loadw(XRegister rd,Literal * literal)6567 void Riscv64Assembler::Loadw(XRegister rd, Literal* literal) {
6568 DCHECK_EQ(literal->GetSize(), 4u);
6569 LoadLiteral(literal, rd, Branch::kLiteral);
6570 }
6571
Loadwu(XRegister rd,Literal * literal)6572 void Riscv64Assembler::Loadwu(XRegister rd, Literal* literal) {
6573 DCHECK_EQ(literal->GetSize(), 4u);
6574 LoadLiteral(literal, rd, Branch::kLiteralUnsigned);
6575 }
6576
Loadd(XRegister rd,Literal * literal)6577 void Riscv64Assembler::Loadd(XRegister rd, Literal* literal) {
6578 DCHECK_EQ(literal->GetSize(), 8u);
6579 LoadLiteral(literal, rd, Branch::kLiteralLong);
6580 }
6581
FLoadw(FRegister rd,Literal * literal)6582 void Riscv64Assembler::FLoadw(FRegister rd, Literal* literal) {
6583 DCHECK_EQ(literal->GetSize(), 4u);
6584 LoadLiteral(literal, rd, Branch::kLiteralFloat);
6585 }
6586
FLoadd(FRegister rd,Literal * literal)6587 void Riscv64Assembler::FLoadd(FRegister rd, Literal* literal) {
6588 DCHECK_EQ(literal->GetSize(), 8u);
6589 LoadLiteral(literal, rd, Branch::kLiteralDouble);
6590 }
6591
Unimp()6592 void Riscv64Assembler::Unimp() {
6593 if (IsExtensionEnabled(Riscv64Extension::kZca)) {
6594 CUnimp();
6595 } else {
6596 Emit32(0xC0001073);
6597 }
6598 }
6599
6600 /////////////////////////////// RV64 MACRO Instructions END ///////////////////////////////
6601
6602 const Riscv64Assembler::Branch::BranchInfo Riscv64Assembler::Branch::branch_info_[] = {
6603 // Compressed branches (can be promoted to longer)
6604 {2, 0, Riscv64Assembler::Branch::kOffset9}, // kCondCBranch
6605 {2, 0, Riscv64Assembler::Branch::kOffset12}, // kUncondCBranch
6606 // Compressed branches (can't be promoted to longer)
6607 {2, 0, Riscv64Assembler::Branch::kOffset9}, // kBareCondCBranch
6608 {2, 0, Riscv64Assembler::Branch::kOffset12}, // kBareUncondCBranch
6609
6610 // Short branches (can be promoted to longer).
6611 {4, 0, Riscv64Assembler::Branch::kOffset13}, // kCondBranch
6612 {4, 0, Riscv64Assembler::Branch::kOffset21}, // kUncondBranch
6613 {4, 0, Riscv64Assembler::Branch::kOffset21}, // kCall
6614 // Short branches (can't be promoted to longer).
6615 {4, 0, Riscv64Assembler::Branch::kOffset13}, // kBareCondBranch
6616 {4, 0, Riscv64Assembler::Branch::kOffset21}, // kBareUncondBranch
6617 {4, 0, Riscv64Assembler::Branch::kOffset21}, // kBareCall
6618
6619 // Medium branches.
6620 {6, 2, Riscv64Assembler::Branch::kOffset21}, // kCondCBranch21
6621 {8, 4, Riscv64Assembler::Branch::kOffset21}, // kCondBranch21
6622
6623 // Long branches.
6624 {10, 2, Riscv64Assembler::Branch::kOffset32}, // kLongCondCBranch
6625 {12, 4, Riscv64Assembler::Branch::kOffset32}, // kLongCondBranch
6626 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLongUncondBranch
6627 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLongCall
6628
6629 // label.
6630 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLabel
6631
6632 // literals.
6633 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLiteral
6634 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLiteralUnsigned
6635 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLiteralLong
6636 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLiteralFloat
6637 {8, 0, Riscv64Assembler::Branch::kOffset32}, // kLiteralDouble
6638 };
6639
InitShortOrLong(OffsetBits offset_size,std::initializer_list<Type> types)6640 void Riscv64Assembler::Branch::InitShortOrLong(OffsetBits offset_size,
6641 std::initializer_list<Type> types) {
6642 auto it = types.begin();
6643 DCHECK(it != types.end());
6644 while (offset_size > branch_info_[*it].offset_size) {
6645 ++it;
6646 DCHECK(it != types.end());
6647 }
6648 type_ = *it;
6649 }
6650
InitializeType(Type initial_type)6651 void Riscv64Assembler::Branch::InitializeType(Type initial_type) {
6652 OffsetBits offset_size_needed = GetOffsetSizeNeeded(location_, target_);
6653
6654 switch (initial_type) {
6655 case kCondCBranch:
6656 CHECK(IsCompressableCondition());
6657 if (condition_ != kUncond) {
6658 InitShortOrLong(
6659 offset_size_needed, {kCondCBranch, kCondBranch, kCondCBranch21, kLongCondCBranch});
6660 break;
6661 }
6662 FALLTHROUGH_INTENDED;
6663 case kUncondCBranch:
6664 InitShortOrLong(offset_size_needed, {kUncondCBranch, kUncondBranch, kLongUncondBranch});
6665 break;
6666 case kBareCondCBranch:
6667 if (condition_ != kUncond) {
6668 type_ = kBareCondCBranch;
6669 CHECK_LE(offset_size_needed, GetOffsetSize());
6670 break;
6671 }
6672 FALLTHROUGH_INTENDED;
6673 case kBareUncondCBranch:
6674 type_ = kBareUncondCBranch;
6675 CHECK_LE(offset_size_needed, GetOffsetSize());
6676 break;
6677 case kCondBranch:
6678 if (condition_ != kUncond) {
6679 InitShortOrLong(offset_size_needed, {kCondBranch, kCondBranch21, kLongCondBranch});
6680 break;
6681 }
6682 FALLTHROUGH_INTENDED;
6683 case kUncondBranch:
6684 InitShortOrLong(offset_size_needed, {kUncondBranch, kLongUncondBranch, kLongUncondBranch});
6685 break;
6686 case kCall:
6687 InitShortOrLong(offset_size_needed, {kCall, kLongCall, kLongCall});
6688 break;
6689 case kBareCondBranch:
6690 if (condition_ != kUncond) {
6691 type_ = kBareCondBranch;
6692 CHECK_LE(offset_size_needed, GetOffsetSize());
6693 break;
6694 }
6695 FALLTHROUGH_INTENDED;
6696 case kBareUncondBranch:
6697 type_ = kBareUncondBranch;
6698 CHECK_LE(offset_size_needed, GetOffsetSize());
6699 break;
6700 case kBareCall:
6701 type_ = kBareCall;
6702 CHECK_LE(offset_size_needed, GetOffsetSize());
6703 break;
6704 case kLabel:
6705 type_ = initial_type;
6706 break;
6707 case kLiteral:
6708 case kLiteralUnsigned:
6709 case kLiteralLong:
6710 case kLiteralFloat:
6711 case kLiteralDouble:
6712 CHECK(!IsResolved());
6713 type_ = initial_type;
6714 break;
6715 default:
6716 LOG(FATAL) << "Unexpected branch type " << enum_cast<uint32_t>(initial_type);
6717 UNREACHABLE();
6718 }
6719
6720 old_type_ = type_;
6721 }
6722
IsNop(BranchCondition condition,XRegister lhs,XRegister rhs)6723 bool Riscv64Assembler::Branch::IsNop(BranchCondition condition, XRegister lhs, XRegister rhs) {
6724 switch (condition) {
6725 case kCondNE:
6726 case kCondLT:
6727 case kCondGT:
6728 case kCondLTU:
6729 case kCondGTU:
6730 return lhs == rhs;
6731 default:
6732 return false;
6733 }
6734 }
6735
IsUncond(BranchCondition condition,XRegister lhs,XRegister rhs)6736 bool Riscv64Assembler::Branch::IsUncond(BranchCondition condition, XRegister lhs, XRegister rhs) {
6737 switch (condition) {
6738 case kUncond:
6739 return true;
6740 case kCondEQ:
6741 case kCondGE:
6742 case kCondLE:
6743 case kCondLEU:
6744 case kCondGEU:
6745 return lhs == rhs;
6746 default:
6747 return false;
6748 }
6749 }
6750
IsCompressed(Type type)6751 bool Riscv64Assembler::Branch::IsCompressed(Type type) {
6752 switch (type) {
6753 case kCondCBranch:
6754 case kUncondCBranch:
6755 case kBareCondCBranch:
6756 case kBareUncondCBranch:
6757 case kCondCBranch21:
6758 case kLongCondCBranch:
6759 return true;
6760 default:
6761 return false;
6762 }
6763 }
6764
Branch(uint32_t location,uint32_t target,XRegister rd,bool is_bare,bool compression_allowed)6765 Riscv64Assembler::Branch::Branch(
6766 uint32_t location, uint32_t target, XRegister rd, bool is_bare, bool compression_allowed)
6767 : old_location_(location),
6768 location_(location),
6769 target_(target),
6770 lhs_reg_(rd),
6771 rhs_reg_(Zero),
6772 freg_(kNoFRegister),
6773 condition_(kUncond),
6774 compression_allowed_(compression_allowed),
6775 next_branch_id_(0u) {
6776 InitializeType((rd != Zero ?
6777 (is_bare ? kBareCall : kCall) :
6778 (is_bare ? (compression_allowed ? kBareUncondCBranch : kBareUncondBranch) :
6779 (compression_allowed ? kUncondCBranch : kUncondBranch))));
6780 }
6781
Branch(uint32_t location,uint32_t target,Riscv64Assembler::BranchCondition condition,XRegister lhs_reg,XRegister rhs_reg,bool is_bare,bool compression_allowed)6782 Riscv64Assembler::Branch::Branch(uint32_t location,
6783 uint32_t target,
6784 Riscv64Assembler::BranchCondition condition,
6785 XRegister lhs_reg,
6786 XRegister rhs_reg,
6787 bool is_bare,
6788 bool compression_allowed)
6789 : old_location_(location),
6790 location_(location),
6791 target_(target),
6792 lhs_reg_(lhs_reg),
6793 rhs_reg_(rhs_reg),
6794 freg_(kNoFRegister),
6795 condition_(condition),
6796 compression_allowed_(compression_allowed && IsCompressableCondition()),
6797 next_branch_id_(0u) {
6798 DCHECK_NE(condition, kUncond);
6799 DCHECK(!IsNop(condition, lhs_reg, rhs_reg));
6800 DCHECK(!IsUncond(condition, lhs_reg, rhs_reg));
6801 InitializeType(is_bare ? (compression_allowed_ ? kBareCondCBranch : kBareCondBranch) :
6802 (compression_allowed_ ? kCondCBranch : kCondBranch));
6803 }
6804
Branch(uint32_t location,uint32_t target,XRegister rd,Type label_or_literal_type)6805 Riscv64Assembler::Branch::Branch(uint32_t location,
6806 uint32_t target,
6807 XRegister rd,
6808 Type label_or_literal_type)
6809 : old_location_(location),
6810 location_(location),
6811 target_(target),
6812 lhs_reg_(rd),
6813 rhs_reg_(Zero),
6814 freg_(kNoFRegister),
6815 condition_(kUncond),
6816 compression_allowed_(false),
6817 next_branch_id_(0u) {
6818 CHECK_NE(rd , Zero);
6819 InitializeType(label_or_literal_type);
6820 }
6821
Branch(uint32_t location,uint32_t target,FRegister rd,Type literal_type)6822 Riscv64Assembler::Branch::Branch(uint32_t location,
6823 uint32_t target,
6824 FRegister rd,
6825 Type literal_type)
6826 : old_location_(location),
6827 location_(location),
6828 target_(target),
6829 lhs_reg_(Zero),
6830 rhs_reg_(Zero),
6831 freg_(rd),
6832 condition_(kUncond),
6833 compression_allowed_(false),
6834 next_branch_id_(0u) {
6835 InitializeType(literal_type);
6836 }
6837
OppositeCondition(Riscv64Assembler::BranchCondition cond)6838 Riscv64Assembler::BranchCondition Riscv64Assembler::Branch::OppositeCondition(
6839 Riscv64Assembler::BranchCondition cond) {
6840 switch (cond) {
6841 case kCondEQ:
6842 return kCondNE;
6843 case kCondNE:
6844 return kCondEQ;
6845 case kCondLT:
6846 return kCondGE;
6847 case kCondGE:
6848 return kCondLT;
6849 case kCondLE:
6850 return kCondGT;
6851 case kCondGT:
6852 return kCondLE;
6853 case kCondLTU:
6854 return kCondGEU;
6855 case kCondGEU:
6856 return kCondLTU;
6857 case kCondLEU:
6858 return kCondGTU;
6859 case kCondGTU:
6860 return kCondLEU;
6861 case kUncond:
6862 LOG(FATAL) << "Unexpected branch condition " << enum_cast<uint32_t>(cond);
6863 UNREACHABLE();
6864 }
6865 }
6866
GetType() const6867 Riscv64Assembler::Branch::Type Riscv64Assembler::Branch::GetType() const { return type_; }
6868
GetOldType() const6869 Riscv64Assembler::Branch::Type Riscv64Assembler::Branch::GetOldType() const { return old_type_; }
6870
GetCondition() const6871 Riscv64Assembler::BranchCondition Riscv64Assembler::Branch::GetCondition() const {
6872 return condition_;
6873 }
6874
GetLeftRegister() const6875 XRegister Riscv64Assembler::Branch::GetLeftRegister() const { return lhs_reg_; }
6876
GetRightRegister() const6877 XRegister Riscv64Assembler::Branch::GetRightRegister() const { return rhs_reg_; }
6878
GetNonZeroRegister() const6879 XRegister Riscv64Assembler::Branch::GetNonZeroRegister() const {
6880 DCHECK(GetLeftRegister() == Zero || GetRightRegister() == Zero)
6881 << "Either register has to be Zero register";
6882 DCHECK(GetLeftRegister() != Zero || GetRightRegister() != Zero)
6883 << "Either register has to be non-Zero register";
6884 return GetLeftRegister() == Zero ? GetRightRegister() : GetLeftRegister();
6885 }
6886
GetFRegister() const6887 FRegister Riscv64Assembler::Branch::GetFRegister() const { return freg_; }
6888
GetTarget() const6889 uint32_t Riscv64Assembler::Branch::GetTarget() const { return target_; }
6890
GetLocation() const6891 uint32_t Riscv64Assembler::Branch::GetLocation() const { return location_; }
6892
GetOldLocation() const6893 uint32_t Riscv64Assembler::Branch::GetOldLocation() const { return old_location_; }
6894
GetLength() const6895 uint32_t Riscv64Assembler::Branch::GetLength() const { return branch_info_[type_].length; }
6896
GetOldLength() const6897 uint32_t Riscv64Assembler::Branch::GetOldLength() const { return branch_info_[old_type_].length; }
6898
GetEndLocation() const6899 uint32_t Riscv64Assembler::Branch::GetEndLocation() const { return GetLocation() + GetLength(); }
6900
GetOldEndLocation() const6901 uint32_t Riscv64Assembler::Branch::GetOldEndLocation() const {
6902 return GetOldLocation() + GetOldLength();
6903 }
6904
NextBranchId() const6905 uint32_t Riscv64Assembler::Branch::NextBranchId() const { return next_branch_id_; }
6906
IsBare() const6907 bool Riscv64Assembler::Branch::IsBare() const {
6908 switch (type_) {
6909 case kBareCondCBranch:
6910 case kBareUncondCBranch:
6911 case kBareUncondBranch:
6912 case kBareCondBranch:
6913 case kBareCall:
6914 return true;
6915 default:
6916 return false;
6917 }
6918 }
6919
IsResolved() const6920 bool Riscv64Assembler::Branch::IsResolved() const { return target_ != kUnresolved; }
6921
IsCompressableCondition() const6922 bool Riscv64Assembler::Branch::IsCompressableCondition() const {
6923 return (condition_ == kCondEQ || condition_ == kCondNE) &&
6924 ((lhs_reg_ == Zero && IsShortReg(rhs_reg_)) || (rhs_reg_ == Zero && IsShortReg(lhs_reg_)));
6925 }
6926
GetOffsetSize() const6927 Riscv64Assembler::Branch::OffsetBits Riscv64Assembler::Branch::GetOffsetSize() const {
6928 return branch_info_[type_].offset_size;
6929 }
6930
GetOffsetSizeNeeded(uint32_t location,uint32_t target)6931 Riscv64Assembler::Branch::OffsetBits Riscv64Assembler::Branch::GetOffsetSizeNeeded(
6932 uint32_t location, uint32_t target) {
6933 // For unresolved targets assume the shortest encoding
6934 // (later it will be made longer if needed).
6935 if (target == kUnresolved) {
6936 return kOffset9;
6937 }
6938 int64_t distance = static_cast<int64_t>(target) - location;
6939
6940 if (IsInt<kOffset9>(distance)) {
6941 return kOffset9;
6942 } else if (IsInt<kOffset12>(distance)) {
6943 return kOffset12;
6944 } else if (IsInt<kOffset13>(distance)) {
6945 return kOffset13;
6946 } else if (IsInt<kOffset21>(distance)) {
6947 return kOffset21;
6948 } else {
6949 return kOffset32;
6950 }
6951 }
6952
Resolve(uint32_t target)6953 void Riscv64Assembler::Branch::Resolve(uint32_t target) { target_ = target; }
6954
Relocate(uint32_t expand_location,uint32_t delta)6955 void Riscv64Assembler::Branch::Relocate(uint32_t expand_location, uint32_t delta) {
6956 // All targets should be resolved before we start promoting branches.
6957 DCHECK(IsResolved());
6958 if (location_ > expand_location) {
6959 location_ += delta;
6960 }
6961 if (target_ > expand_location) {
6962 target_ += delta;
6963 }
6964 }
6965
PromoteIfNeeded()6966 uint32_t Riscv64Assembler::Branch::PromoteIfNeeded() {
6967 // All targets should be resolved before we start promoting branches.
6968 DCHECK(IsResolved());
6969 Type old_type = type_;
6970 switch (type_) {
6971 // Compressed branches (can be promoted to longer)
6972 case kUncondCBranch: {
6973 OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
6974 if (needed_size <= GetOffsetSize()) {
6975 return 0u;
6976 }
6977
6978 type_ = needed_size <= branch_info_[kUncondBranch].offset_size ? kUncondBranch :
6979 kLongUncondBranch;
6980 break;
6981 }
6982 case kCondCBranch: {
6983 DCHECK(IsCompressableCondition());
6984 OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
6985 if (needed_size <= GetOffsetSize()) {
6986 return 0u;
6987 }
6988
6989 if (needed_size <= branch_info_[kCondBranch].offset_size) {
6990 type_ = kCondBranch;
6991 break;
6992 }
6993 FALLTHROUGH_INTENDED;
6994 }
6995 // Short branches (can be promoted to longer).
6996 case kCondBranch: {
6997 OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
6998 if (needed_size <= GetOffsetSize()) {
6999 return 0u;
7000 }
7001
7002 Type cond21Type =
7003 (compression_allowed_ && IsCompressableCondition()) ? kCondCBranch21 : kCondBranch21;
7004 Type longCondType =
7005 (compression_allowed_ && IsCompressableCondition()) ? kLongCondCBranch : kLongCondBranch;
7006
7007 // The offset remains the same for `kCond[C]Branch21` for forward branches.
7008 DCHECK_EQ(branch_info_[cond21Type].length - branch_info_[cond21Type].pc_offset,
7009 branch_info_[kCondBranch].length - branch_info_[kCondBranch].pc_offset);
7010 if (target_ <= location_) {
7011 // Calculate the needed size for kCond[C]Branch21.
7012 needed_size = GetOffsetSizeNeeded(location_ + branch_info_[cond21Type].pc_offset, target_);
7013 }
7014 type_ = (needed_size <= branch_info_[cond21Type].offset_size) ? cond21Type : longCondType;
7015 break;
7016 }
7017 case kUncondBranch:
7018 if (GetOffsetSizeNeeded(GetOffsetLocation(), target_) <= GetOffsetSize()) {
7019 return 0u;
7020 }
7021 type_ = kLongUncondBranch;
7022 break;
7023 case kCall:
7024 if (GetOffsetSizeNeeded(GetOffsetLocation(), target_) <= GetOffsetSize()) {
7025 return 0u;
7026 }
7027 type_ = kLongCall;
7028 break;
7029 // Medium branches (can be promoted to long).
7030 case kCondCBranch21: {
7031 OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
7032 if (needed_size <= GetOffsetSize()) {
7033 return 0u;
7034 }
7035 type_ = kLongCondCBranch;
7036 break;
7037 }
7038 case kCondBranch21: {
7039 OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
7040 if (needed_size <= GetOffsetSize()) {
7041 return 0u;
7042 }
7043 type_ = kLongCondBranch;
7044 break;
7045 }
7046 default:
7047 // Other branch types cannot be promoted.
7048 DCHECK_LE(GetOffsetSizeNeeded(GetOffsetLocation(), target_), GetOffsetSize())
7049 << static_cast<uint32_t>(type_);
7050 return 0u;
7051 }
7052 DCHECK(type_ != old_type);
7053 DCHECK_GT(branch_info_[type_].length, branch_info_[old_type].length);
7054 return branch_info_[type_].length - branch_info_[old_type].length;
7055 }
7056
GetOffsetLocation() const7057 uint32_t Riscv64Assembler::Branch::GetOffsetLocation() const {
7058 return location_ + branch_info_[type_].pc_offset;
7059 }
7060
GetOffset() const7061 int32_t Riscv64Assembler::Branch::GetOffset() const {
7062 CHECK(IsResolved());
7063 // Calculate the byte distance between instructions and also account for
7064 // different PC-relative origins.
7065 uint32_t offset_location = GetOffsetLocation();
7066 int32_t offset = static_cast<int32_t>(target_ - offset_location);
7067 DCHECK_EQ(offset, static_cast<int64_t>(target_) - static_cast<int64_t>(offset_location));
7068 return offset;
7069 }
7070
LinkToList(uint32_t next_branch_id)7071 void Riscv64Assembler::Branch::LinkToList(uint32_t next_branch_id) {
7072 next_branch_id_ = next_branch_id;
7073 }
7074
EmitBcond(BranchCondition cond,XRegister rs,XRegister rt,int32_t offset)7075 void Riscv64Assembler::EmitBcond(BranchCondition cond,
7076 XRegister rs,
7077 XRegister rt,
7078 int32_t offset) {
7079 switch (cond) {
7080 #define DEFINE_CASE(COND, cond) \
7081 case kCond##COND: \
7082 B##cond(rs, rt, offset); \
7083 break;
7084 DEFINE_CASE(EQ, eq)
7085 DEFINE_CASE(NE, ne)
7086 DEFINE_CASE(LT, lt)
7087 DEFINE_CASE(GE, ge)
7088 DEFINE_CASE(LE, le)
7089 DEFINE_CASE(GT, gt)
7090 DEFINE_CASE(LTU, ltu)
7091 DEFINE_CASE(GEU, geu)
7092 DEFINE_CASE(LEU, leu)
7093 DEFINE_CASE(GTU, gtu)
7094 #undef DEFINE_CASE
7095 case kUncond:
7096 LOG(FATAL) << "Unexpected branch condition " << enum_cast<uint32_t>(cond);
7097 UNREACHABLE();
7098 }
7099 }
7100
EmitBranch(Riscv64Assembler::Branch * branch)7101 void Riscv64Assembler::EmitBranch(Riscv64Assembler::Branch* branch) {
7102 CHECK(overwriting_);
7103 overwrite_location_ = branch->GetLocation();
7104 const int32_t offset = branch->GetOffset();
7105 BranchCondition condition = branch->GetCondition();
7106 XRegister lhs = branch->GetLeftRegister();
7107 XRegister rhs = branch->GetRightRegister();
7108 // Disable Compressed emitter explicitly and enable where it is needed
7109 ScopedNoCInstructions no_compression(this);
7110
7111 auto emit_auipc_and_next = [&](XRegister reg, auto next) {
7112 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7113 auto [imm20, short_offset] = SplitOffset(offset);
7114 Auipc(reg, imm20);
7115 next(short_offset);
7116 };
7117
7118 auto emit_cbcondz_opposite = [&]() {
7119 DCHECK(branch->IsCompressableCondition());
7120 ScopedUseCInstructions use_compression(this);
7121 if (condition == kCondNE) {
7122 DCHECK_EQ(Branch::OppositeCondition(condition), kCondEQ);
7123 CBeqz(branch->GetNonZeroRegister(), branch->GetLength());
7124 } else {
7125 DCHECK_EQ(Branch::OppositeCondition(condition), kCondNE);
7126 CBnez(branch->GetNonZeroRegister(), branch->GetLength());
7127 }
7128 };
7129
7130 switch (branch->GetType()) {
7131 // Compressed branches
7132 case Branch::kCondCBranch:
7133 case Branch::kBareCondCBranch: {
7134 ScopedUseCInstructions use_compression(this);
7135 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7136 DCHECK(branch->IsCompressableCondition());
7137 if (condition == kCondEQ) {
7138 CBeqz(branch->GetNonZeroRegister(), offset);
7139 } else {
7140 CBnez(branch->GetNonZeroRegister(), offset);
7141 }
7142 break;
7143 }
7144 case Branch::kUncondCBranch:
7145 case Branch::kBareUncondCBranch: {
7146 ScopedUseCInstructions use_compression(this);
7147 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7148 CJ(offset);
7149 break;
7150 }
7151 // Short branches.
7152 case Branch::kUncondBranch:
7153 case Branch::kBareUncondBranch:
7154 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7155 J(offset);
7156 break;
7157 case Branch::kCondBranch:
7158 case Branch::kBareCondBranch:
7159 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7160 EmitBcond(condition, lhs, rhs, offset);
7161 break;
7162 case Branch::kCall:
7163 case Branch::kBareCall:
7164 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7165 DCHECK(lhs != Zero);
7166 Jal(lhs, offset);
7167 break;
7168
7169 // Medium branch.
7170 case Branch::kCondBranch21:
7171 EmitBcond(Branch::OppositeCondition(condition), lhs, rhs, branch->GetLength());
7172 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7173 J(offset);
7174 break;
7175 case Branch::kCondCBranch21: {
7176 emit_cbcondz_opposite();
7177 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7178 J(offset);
7179 break;
7180 }
7181 // Long branches.
7182 case Branch::kLongCondCBranch:
7183 emit_cbcondz_opposite();
7184 emit_auipc_and_next(TMP, [&](int32_t short_offset) { Jalr(Zero, TMP, short_offset); });
7185 break;
7186 case Branch::kLongCondBranch:
7187 EmitBcond(Branch::OppositeCondition(condition), lhs, rhs, branch->GetLength());
7188 FALLTHROUGH_INTENDED;
7189 case Branch::kLongUncondBranch:
7190 emit_auipc_and_next(TMP, [&](int32_t short_offset) { Jalr(Zero, TMP, short_offset); });
7191 break;
7192 case Branch::kLongCall:
7193 DCHECK(lhs != Zero);
7194 emit_auipc_and_next(lhs, [&](int32_t short_offset) { Jalr(lhs, lhs, short_offset); });
7195 break;
7196
7197 // label.
7198 case Branch::kLabel:
7199 emit_auipc_and_next(lhs, [&](int32_t short_offset) { Addi(lhs, lhs, short_offset); });
7200 break;
7201 // literals.
7202 case Branch::kLiteral:
7203 emit_auipc_and_next(lhs, [&](int32_t short_offset) { Lw(lhs, lhs, short_offset); });
7204 break;
7205 case Branch::kLiteralUnsigned:
7206 emit_auipc_and_next(lhs, [&](int32_t short_offset) { Lwu(lhs, lhs, short_offset); });
7207 break;
7208 case Branch::kLiteralLong:
7209 emit_auipc_and_next(lhs, [&](int32_t short_offset) { Ld(lhs, lhs, short_offset); });
7210 break;
7211 case Branch::kLiteralFloat:
7212 emit_auipc_and_next(
7213 TMP, [&](int32_t short_offset) { FLw(branch->GetFRegister(), TMP, short_offset); });
7214 break;
7215 case Branch::kLiteralDouble:
7216 emit_auipc_and_next(
7217 TMP, [&](int32_t short_offset) { FLd(branch->GetFRegister(), TMP, short_offset); });
7218 break;
7219 }
7220 CHECK_EQ(overwrite_location_, branch->GetEndLocation());
7221 CHECK_LE(branch->GetLength(), static_cast<uint32_t>(Branch::kMaxBranchLength));
7222 }
7223
EmitBranches()7224 void Riscv64Assembler::EmitBranches() {
7225 CHECK(!overwriting_);
7226 // Switch from appending instructions at the end of the buffer to overwriting
7227 // existing instructions (branch placeholders) in the buffer.
7228 overwriting_ = true;
7229 for (auto& branch : branches_) {
7230 EmitBranch(&branch);
7231 }
7232 overwriting_ = false;
7233 }
7234
FinalizeLabeledBranch(Riscv64Label * label)7235 void Riscv64Assembler::FinalizeLabeledBranch(Riscv64Label* label) {
7236 const uint32_t alignment =
7237 IsExtensionEnabled(Riscv64Extension::kZca) ? sizeof(uint16_t) : sizeof(uint32_t);
7238 Branch& this_branch = branches_.back();
7239 uint32_t branch_length = this_branch.GetLength();
7240 DCHECK(IsAlignedParam(branch_length, alignment));
7241 uint32_t length = branch_length / alignment;
7242 if (!label->IsBound()) {
7243 // Branch forward (to a following label), distance is unknown.
7244 // The first branch forward will contain 0, serving as the terminator of
7245 // the list of forward-reaching branches.
7246 this_branch.LinkToList(label->position_);
7247 // Now make the label object point to this branch
7248 // (this forms a linked list of branches preceding this label).
7249 uint32_t branch_id = branches_.size() - 1;
7250 label->LinkTo(branch_id);
7251 }
7252 // Reserve space for the branch.
7253 for (; length != 0u; --length) {
7254 if (alignment == sizeof(uint16_t)) {
7255 Emit16(0);
7256 } else {
7257 Emit32(0);
7258 }
7259 }
7260 }
7261
Bcond(Riscv64Label * label,bool is_bare,BranchCondition condition,XRegister lhs,XRegister rhs)7262 void Riscv64Assembler::Bcond(
7263 Riscv64Label* label, bool is_bare, BranchCondition condition, XRegister lhs, XRegister rhs) {
7264 // TODO(riscv64): Should an assembler perform these optimizations, or should we remove them?
7265 // If lhs = rhs, this can be a NOP.
7266 if (Branch::IsNop(condition, lhs, rhs)) {
7267 return;
7268 }
7269 if (Branch::IsUncond(condition, lhs, rhs)) {
7270 Buncond(label, Zero, is_bare);
7271 return;
7272 }
7273
7274 uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
7275 branches_.emplace_back(buffer_.Size(),
7276 target,
7277 condition,
7278 lhs,
7279 rhs,
7280 is_bare,
7281 IsExtensionEnabled(Riscv64Extension::kZca));
7282 FinalizeLabeledBranch(label);
7283 }
7284
Buncond(Riscv64Label * label,XRegister rd,bool is_bare)7285 void Riscv64Assembler::Buncond(Riscv64Label* label, XRegister rd, bool is_bare) {
7286 uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
7287 branches_.emplace_back(
7288 buffer_.Size(), target, rd, is_bare, IsExtensionEnabled(Riscv64Extension::kZca));
7289 FinalizeLabeledBranch(label);
7290 }
7291
7292 template <typename XRegisterOrFRegister>
LoadLiteral(Literal * literal,XRegisterOrFRegister rd,Branch::Type literal_type)7293 void Riscv64Assembler::LoadLiteral(Literal* literal,
7294 XRegisterOrFRegister rd,
7295 Branch::Type literal_type) {
7296 Riscv64Label* label = literal->GetLabel();
7297 DCHECK(!label->IsBound());
7298 branches_.emplace_back(buffer_.Size(), Branch::kUnresolved, rd, literal_type);
7299 FinalizeLabeledBranch(label);
7300 }
7301
GetBranch(uint32_t branch_id)7302 Riscv64Assembler::Branch* Riscv64Assembler::GetBranch(uint32_t branch_id) {
7303 CHECK_LT(branch_id, branches_.size());
7304 return &branches_[branch_id];
7305 }
7306
GetBranch(uint32_t branch_id) const7307 const Riscv64Assembler::Branch* Riscv64Assembler::GetBranch(uint32_t branch_id) const {
7308 CHECK_LT(branch_id, branches_.size());
7309 return &branches_[branch_id];
7310 }
7311
Bind(Riscv64Label * label)7312 void Riscv64Assembler::Bind(Riscv64Label* label) {
7313 CHECK(!label->IsBound());
7314 uint32_t bound_pc = buffer_.Size();
7315
7316 // Walk the list of branches referring to and preceding this label.
7317 // Store the previously unknown target addresses in them.
7318 while (label->IsLinked()) {
7319 uint32_t branch_id = label->Position();
7320 Branch* branch = GetBranch(branch_id);
7321 branch->Resolve(bound_pc);
7322 // On to the next branch in the list...
7323 label->position_ = branch->NextBranchId();
7324 }
7325
7326 // Now make the label object contain its own location (relative to the end of the preceding
7327 // branch, if any; it will be used by the branches referring to and following this label).
7328 uint32_t prev_branch_id = Riscv64Label::kNoPrevBranchId;
7329 if (!branches_.empty()) {
7330 prev_branch_id = branches_.size() - 1u;
7331 const Branch* prev_branch = GetBranch(prev_branch_id);
7332 bound_pc -= prev_branch->GetEndLocation();
7333 }
7334 label->prev_branch_id_ = prev_branch_id;
7335 label->BindTo(bound_pc);
7336 }
7337
LoadLabelAddress(XRegister rd,Riscv64Label * label)7338 void Riscv64Assembler::LoadLabelAddress(XRegister rd, Riscv64Label* label) {
7339 DCHECK_NE(rd, Zero);
7340 uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
7341 branches_.emplace_back(buffer_.Size(), target, rd, Branch::kLabel);
7342 FinalizeLabeledBranch(label);
7343 }
7344
NewLiteral(size_t size,const uint8_t * data)7345 Literal* Riscv64Assembler::NewLiteral(size_t size, const uint8_t* data) {
7346 // We don't support byte and half-word literals.
7347 if (size == 4u) {
7348 literals_.emplace_back(size, data);
7349 return &literals_.back();
7350 } else {
7351 DCHECK_EQ(size, 8u);
7352 long_literals_.emplace_back(size, data);
7353 return &long_literals_.back();
7354 }
7355 }
7356
CreateJumpTable(ArenaVector<Riscv64Label * > && labels)7357 JumpTable* Riscv64Assembler::CreateJumpTable(ArenaVector<Riscv64Label*>&& labels) {
7358 jump_tables_.emplace_back(std::move(labels));
7359 JumpTable* table = &jump_tables_.back();
7360 DCHECK(!table->GetLabel()->IsBound());
7361 return table;
7362 }
7363
GetLabelLocation(const Riscv64Label * label) const7364 uint32_t Riscv64Assembler::GetLabelLocation(const Riscv64Label* label) const {
7365 CHECK(label->IsBound());
7366 uint32_t target = label->Position();
7367 if (label->prev_branch_id_ != Riscv64Label::kNoPrevBranchId) {
7368 // Get label location based on the branch preceding it.
7369 const Branch* prev_branch = GetBranch(label->prev_branch_id_);
7370 target += prev_branch->GetEndLocation();
7371 }
7372 return target;
7373 }
7374
GetAdjustedPosition(uint32_t old_position)7375 uint32_t Riscv64Assembler::GetAdjustedPosition(uint32_t old_position) {
7376 // We can reconstruct the adjustment by going through all the branches from the beginning
7377 // up to the `old_position`. Since we expect `GetAdjustedPosition()` to be called in a loop
7378 // with increasing `old_position`, we can use the data from last `GetAdjustedPosition()` to
7379 // continue where we left off and the whole loop should be O(m+n) where m is the number
7380 // of positions to adjust and n is the number of branches.
7381 if (old_position < last_old_position_) {
7382 last_position_adjustment_ = 0;
7383 last_old_position_ = 0;
7384 last_branch_id_ = 0;
7385 }
7386 while (last_branch_id_ != branches_.size()) {
7387 const Branch* branch = GetBranch(last_branch_id_);
7388 if (branch->GetLocation() >= old_position + last_position_adjustment_) {
7389 break;
7390 }
7391 last_position_adjustment_ += branch->GetLength() - branch->GetOldLength();
7392 ++last_branch_id_;
7393 }
7394 last_old_position_ = old_position;
7395 return old_position + last_position_adjustment_;
7396 }
7397
ReserveJumpTableSpace()7398 void Riscv64Assembler::ReserveJumpTableSpace() {
7399 if (!jump_tables_.empty()) {
7400 for (JumpTable& table : jump_tables_) {
7401 Riscv64Label* label = table.GetLabel();
7402 Bind(label);
7403
7404 // Bulk ensure capacity, as this may be large.
7405 size_t orig_size = buffer_.Size();
7406 size_t required_capacity = orig_size + table.GetSize();
7407 if (required_capacity > buffer_.Capacity()) {
7408 buffer_.ExtendCapacity(required_capacity);
7409 }
7410 #ifndef NDEBUG
7411 buffer_.has_ensured_capacity_ = true;
7412 #endif
7413
7414 // Fill the space with placeholder data as the data is not final
7415 // until the branches have been promoted. And we shouldn't
7416 // be moving uninitialized data during branch promotion.
7417 for (size_t cnt = table.GetData().size(), i = 0; i < cnt; ++i) {
7418 buffer_.Emit<uint32_t>(0x1abe1234u);
7419 }
7420
7421 #ifndef NDEBUG
7422 buffer_.has_ensured_capacity_ = false;
7423 #endif
7424 }
7425 }
7426 }
7427
PromoteBranches()7428 void Riscv64Assembler::PromoteBranches() {
7429 // Promote short branches to long as necessary.
7430 bool changed;
7431 // To avoid re-computing predicate on each iteration cache it in local
7432 do {
7433 changed = false;
7434 for (auto& branch : branches_) {
7435 CHECK(branch.IsResolved());
7436 uint32_t delta = branch.PromoteIfNeeded();
7437 // If this branch has been promoted and needs to expand in size,
7438 // relocate all branches by the expansion size.
7439 if (delta != 0u) {
7440 changed = true;
7441 uint32_t expand_location = branch.GetLocation();
7442 for (auto& branch2 : branches_) {
7443 branch2.Relocate(expand_location, delta);
7444 }
7445 }
7446 }
7447 } while (changed);
7448
7449 // Account for branch expansion by resizing the code buffer
7450 // and moving the code in it to its final location.
7451 size_t branch_count = branches_.size();
7452 if (branch_count > 0) {
7453 // Resize.
7454 Branch& last_branch = branches_[branch_count - 1];
7455 uint32_t size_delta = last_branch.GetEndLocation() - last_branch.GetOldEndLocation();
7456 uint32_t old_size = buffer_.Size();
7457 buffer_.Resize(old_size + size_delta);
7458 // Move the code residing between branch placeholders.
7459 uint32_t end = old_size;
7460 for (size_t i = branch_count; i > 0;) {
7461 Branch& branch = branches_[--i];
7462 uint32_t size = end - branch.GetOldEndLocation();
7463 buffer_.Move(branch.GetEndLocation(), branch.GetOldEndLocation(), size);
7464 end = branch.GetOldLocation();
7465 }
7466 }
7467
7468 // Align 64-bit literals by moving them up by 4 bytes if needed.
7469 // This can increase the PC-relative distance but all literals are accessed with AUIPC+Load(imm12)
7470 // without branch promotion, so this late adjustment cannot take them out of instruction range.
7471 if (!long_literals_.empty()) {
7472 uint32_t first_literal_location = GetLabelLocation(long_literals_.front().GetLabel());
7473 size_t lit_size = long_literals_.size() * sizeof(uint64_t);
7474 size_t buf_size = buffer_.Size();
7475 // 64-bit literals must be at the very end of the buffer.
7476 CHECK_EQ(first_literal_location + lit_size, buf_size);
7477 if (!IsAligned<sizeof(uint64_t)>(first_literal_location)) {
7478 // Insert the padding.
7479 buffer_.Resize(buf_size + sizeof(uint32_t));
7480 buffer_.Move(first_literal_location + sizeof(uint32_t), first_literal_location, lit_size);
7481 DCHECK(!overwriting_);
7482 overwriting_ = true;
7483 overwrite_location_ = first_literal_location;
7484 Emit32(0); // Illegal instruction.
7485 overwriting_ = false;
7486 // Increase target addresses in literal and address loads by 4 bytes in order for correct
7487 // offsets from PC to be generated.
7488 for (auto& branch : branches_) {
7489 uint32_t target = branch.GetTarget();
7490 if (target >= first_literal_location) {
7491 branch.Resolve(target + sizeof(uint32_t));
7492 }
7493 }
7494 // If after this we ever call GetLabelLocation() to get the location of a 64-bit literal,
7495 // we need to adjust the location of the literal's label as well.
7496 for (Literal& literal : long_literals_) {
7497 // Bound label's position is negative, hence decrementing it instead of incrementing.
7498 literal.GetLabel()->position_ -= sizeof(uint32_t);
7499 }
7500 }
7501 }
7502 }
7503
PatchCFI()7504 void Riscv64Assembler::PatchCFI() {
7505 if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
7506 return;
7507 }
7508
7509 using DelayedAdvancePC = DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC;
7510 const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
7511 const std::vector<uint8_t>& old_stream = data.first;
7512 const std::vector<DelayedAdvancePC>& advances = data.second;
7513
7514 // Refill our data buffer with patched opcodes.
7515 static constexpr size_t kExtraSpace = 16; // Not every PC advance can be encoded in one byte.
7516 cfi().ReserveCFIStream(old_stream.size() + advances.size() + kExtraSpace);
7517 size_t stream_pos = 0;
7518 for (const DelayedAdvancePC& advance : advances) {
7519 DCHECK_GE(advance.stream_pos, stream_pos);
7520 // Copy old data up to the point where advance was issued.
7521 cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
7522 stream_pos = advance.stream_pos;
7523 // Insert the advance command with its final offset.
7524 size_t final_pc = GetAdjustedPosition(advance.pc);
7525 cfi().AdvancePC(final_pc);
7526 }
7527 // Copy the final segment if any.
7528 cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
7529 }
7530
EmitJumpTables()7531 void Riscv64Assembler::EmitJumpTables() {
7532 if (!jump_tables_.empty()) {
7533 CHECK(!overwriting_);
7534 // Switch from appending instructions at the end of the buffer to overwriting
7535 // existing instructions (here, jump tables) in the buffer.
7536 overwriting_ = true;
7537
7538 for (JumpTable& table : jump_tables_) {
7539 Riscv64Label* table_label = table.GetLabel();
7540 uint32_t start = GetLabelLocation(table_label);
7541 overwrite_location_ = start;
7542
7543 for (Riscv64Label* target : table.GetData()) {
7544 CHECK_EQ(buffer_.Load<uint32_t>(overwrite_location_), 0x1abe1234u);
7545 // The table will contain target addresses relative to the table start.
7546 uint32_t offset = GetLabelLocation(target) - start;
7547 Emit32(offset);
7548 }
7549 }
7550
7551 overwriting_ = false;
7552 }
7553 }
7554
EmitLiterals()7555 void Riscv64Assembler::EmitLiterals() {
7556 if (!literals_.empty()) {
7557 for (Literal& literal : literals_) {
7558 Riscv64Label* label = literal.GetLabel();
7559 Bind(label);
7560 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
7561 DCHECK_EQ(literal.GetSize(), 4u);
7562 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
7563 buffer_.Emit<uint8_t>(literal.GetData()[i]);
7564 }
7565 }
7566 }
7567 if (!long_literals_.empty()) {
7568 // These need to be 8-byte-aligned but we shall add the alignment padding after the branch
7569 // promotion, if needed. Since all literals are accessed with AUIPC+Load(imm12) without branch
7570 // promotion, this late adjustment cannot take long literals out of instruction range.
7571 for (Literal& literal : long_literals_) {
7572 Riscv64Label* label = literal.GetLabel();
7573 Bind(label);
7574 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
7575 DCHECK_EQ(literal.GetSize(), 8u);
7576 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
7577 buffer_.Emit<uint8_t>(literal.GetData()[i]);
7578 }
7579 }
7580 }
7581 }
7582
7583 // This method is used to adjust the base register and offset pair for
7584 // a load/store when the offset doesn't fit into 12-bit signed integer.
AdjustBaseAndOffset(XRegister & base,int32_t & offset,ScratchRegisterScope & srs)7585 void Riscv64Assembler::AdjustBaseAndOffset(XRegister& base,
7586 int32_t& offset,
7587 ScratchRegisterScope& srs) {
7588 // A scratch register must be available for adjustment even if it's not needed.
7589 CHECK_NE(srs.AvailableXRegisters(), 0u);
7590 if (IsInt<12>(offset)) {
7591 return;
7592 }
7593
7594 constexpr int32_t kPositiveOffsetMaxSimpleAdjustment = 0x7ff;
7595 constexpr int32_t kHighestOffsetForSimpleAdjustment = 2 * kPositiveOffsetMaxSimpleAdjustment;
7596 constexpr int32_t kPositiveOffsetSimpleAdjustmentAligned8 =
7597 RoundDown(kPositiveOffsetMaxSimpleAdjustment, 8);
7598 constexpr int32_t kPositiveOffsetSimpleAdjustmentAligned4 =
7599 RoundDown(kPositiveOffsetMaxSimpleAdjustment, 4);
7600 constexpr int32_t kNegativeOffsetSimpleAdjustment = -0x800;
7601 constexpr int32_t kLowestOffsetForSimpleAdjustment = 2 * kNegativeOffsetSimpleAdjustment;
7602
7603 XRegister tmp = srs.AllocateXRegister();
7604 if (offset >= 0 && offset <= kHighestOffsetForSimpleAdjustment) {
7605 // Make the adjustment 8-byte aligned (0x7f8) except for offsets that cannot be reached
7606 // with this adjustment, then try 4-byte alignment, then just half of the offset.
7607 int32_t adjustment = IsInt<12>(offset - kPositiveOffsetSimpleAdjustmentAligned8)
7608 ? kPositiveOffsetSimpleAdjustmentAligned8
7609 : IsInt<12>(offset - kPositiveOffsetSimpleAdjustmentAligned4)
7610 ? kPositiveOffsetSimpleAdjustmentAligned4
7611 : offset / 2;
7612 DCHECK(IsInt<12>(adjustment));
7613 Addi(tmp, base, adjustment);
7614 offset -= adjustment;
7615 } else if (offset < 0 && offset >= kLowestOffsetForSimpleAdjustment) {
7616 Addi(tmp, base, kNegativeOffsetSimpleAdjustment);
7617 offset -= kNegativeOffsetSimpleAdjustment;
7618 } else if (offset >= 0x7ffff800) {
7619 // Support even large offsets outside the range supported by `SplitOffset()`.
7620 LoadConst32(tmp, offset);
7621 Add(tmp, tmp, base);
7622 offset = 0;
7623 } else {
7624 auto [imm20, short_offset] = SplitOffset(offset);
7625 Lui(tmp, imm20);
7626 Add(tmp, tmp, base);
7627 offset = short_offset;
7628 }
7629 base = tmp;
7630 }
7631
7632 template <void (Riscv64Assembler::*insn)(XRegister, XRegister, int32_t)>
LoadFromOffset(XRegister rd,XRegister rs1,int32_t offset)7633 void Riscv64Assembler::LoadFromOffset(XRegister rd, XRegister rs1, int32_t offset) {
7634 CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7635 CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
7636 ScratchRegisterScope srs(this);
7637 // If `rd` differs from `rs1`, allow using it as a temporary if needed.
7638 if (rd != rs1) {
7639 srs.IncludeXRegister(rd);
7640 }
7641 AdjustBaseAndOffset(rs1, offset, srs);
7642 (this->*insn)(rd, rs1, offset);
7643 }
7644
7645 template <void (Riscv64Assembler::*insn)(XRegister, XRegister, int32_t)>
StoreToOffset(XRegister rs2,XRegister rs1,int32_t offset)7646 void Riscv64Assembler::StoreToOffset(XRegister rs2, XRegister rs1, int32_t offset) {
7647 CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7648 CHECK_EQ((1u << rs2) & available_scratch_core_registers_, 0u);
7649 ScratchRegisterScope srs(this);
7650 AdjustBaseAndOffset(rs1, offset, srs);
7651 (this->*insn)(rs2, rs1, offset);
7652 }
7653
7654 template <void (Riscv64Assembler::*insn)(FRegister, XRegister, int32_t)>
FLoadFromOffset(FRegister rd,XRegister rs1,int32_t offset)7655 void Riscv64Assembler::FLoadFromOffset(FRegister rd, XRegister rs1, int32_t offset) {
7656 CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7657 ScratchRegisterScope srs(this);
7658 AdjustBaseAndOffset(rs1, offset, srs);
7659 (this->*insn)(rd, rs1, offset);
7660 }
7661
7662 template <void (Riscv64Assembler::*insn)(FRegister, XRegister, int32_t)>
FStoreToOffset(FRegister rs2,XRegister rs1,int32_t offset)7663 void Riscv64Assembler::FStoreToOffset(FRegister rs2, XRegister rs1, int32_t offset) {
7664 CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7665 ScratchRegisterScope srs(this);
7666 AdjustBaseAndOffset(rs1, offset, srs);
7667 (this->*insn)(rs2, rs1, offset);
7668 }
7669
LoadImmediate(XRegister rd,int64_t imm,bool can_use_tmp)7670 void Riscv64Assembler::LoadImmediate(XRegister rd, int64_t imm, bool can_use_tmp) {
7671 CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
7672 ScratchRegisterScope srs(this);
7673 CHECK_IMPLIES(can_use_tmp, srs.AvailableXRegisters() != 0u);
7674
7675 // Helper lambdas.
7676 auto addi = [&](XRegister rd, XRegister rs, int32_t imm) { Addi(rd, rs, imm); };
7677 auto addiw = [&](XRegister rd, XRegister rs, int32_t imm) { Addiw(rd, rs, imm); };
7678 auto slli = [&](XRegister rd, XRegister rs, int32_t imm) { Slli(rd, rs, imm); };
7679 auto lui = [&](XRegister rd, uint32_t imm20) { Lui(rd, imm20); };
7680
7681 // Simple LUI+ADDI/W can handle value range [-0x80000800, 0x7fffffff].
7682 auto is_simple_li_value = [](int64_t value) {
7683 return value >= INT64_C(-0x80000800) && value <= INT64_C(0x7fffffff);
7684 };
7685 auto emit_simple_li_helper = [&](XRegister rd,
7686 int64_t value,
7687 auto&& addi,
7688 auto&& addiw,
7689 auto&& slli,
7690 auto&& lui) {
7691 DCHECK(is_simple_li_value(value)) << "0x" << std::hex << value;
7692 if (IsInt<12>(value)) {
7693 addi(rd, Zero, value);
7694 } else if (CTZ(value) < 12 && IsInt(6 + CTZ(value), value)) {
7695 // This path yields two 16-bit instructions with the "C" Standard Extension.
7696 addi(rd, Zero, value >> CTZ(value));
7697 slli(rd, rd, CTZ(value));
7698 } else if (value < INT64_C(-0x80000000)) {
7699 int32_t small_value = dchecked_integral_cast<int32_t>(value - INT64_C(-0x80000000));
7700 DCHECK(IsInt<12>(small_value));
7701 DCHECK_LT(small_value, 0);
7702 lui(rd, 1u << 19);
7703 addi(rd, rd, small_value);
7704 } else {
7705 DCHECK(IsInt<32>(value));
7706 // Note: Similar to `SplitOffset()` but we can target the full 32-bit range with ADDIW.
7707 int64_t near_value = (value + 0x800) & ~0xfff;
7708 int32_t small_value = value - near_value;
7709 DCHECK(IsInt<12>(small_value));
7710 uint32_t imm20 = static_cast<uint32_t>(near_value) >> 12;
7711 DCHECK_NE(imm20, 0u); // Small values are handled above.
7712 lui(rd, imm20);
7713 if (small_value != 0) {
7714 addiw(rd, rd, small_value);
7715 }
7716 }
7717 };
7718 auto emit_simple_li = [&](XRegister rd, int64_t value) {
7719 emit_simple_li_helper(rd, value, addi, addiw, slli, lui);
7720 };
7721 auto count_simple_li_instructions = [&](int64_t value) {
7722 size_t num_instructions = 0u;
7723 auto count_rri = [&](XRegister, XRegister, int32_t) { ++num_instructions; };
7724 auto count_ru = [&](XRegister, uint32_t) { ++num_instructions; };
7725 emit_simple_li_helper(Zero, value, count_rri, count_rri, count_rri, count_ru);
7726 return num_instructions;
7727 };
7728
7729 // If LUI+ADDI/W is not enough, we can generate up to 3 SLLI+ADDI afterwards (up to 8 instructions
7730 // total). The ADDI from the first SLLI+ADDI pair can be a no-op.
7731 auto emit_with_slli_addi_helper = [&](XRegister rd,
7732 int64_t value,
7733 auto&& addi,
7734 auto&& addiw,
7735 auto&& slli,
7736 auto&& lui) {
7737 static constexpr size_t kMaxNumSllAddi = 3u;
7738 int32_t addi_values[kMaxNumSllAddi];
7739 size_t sll_shamts[kMaxNumSllAddi];
7740 size_t num_sll_addi = 0u;
7741 while (!is_simple_li_value(value)) {
7742 DCHECK_LT(num_sll_addi, kMaxNumSllAddi);
7743 // Prepare sign-extended low 12 bits for ADDI.
7744 int64_t addi_value = (value & 0xfff) - ((value & 0x800) << 1);
7745 DCHECK(IsInt<12>(addi_value));
7746 int64_t remaining = value - addi_value;
7747 size_t shamt = CTZ(remaining);
7748 DCHECK_GE(shamt, 12u);
7749 addi_values[num_sll_addi] = addi_value;
7750 sll_shamts[num_sll_addi] = shamt;
7751 value = remaining >> shamt;
7752 ++num_sll_addi;
7753 }
7754 if (num_sll_addi != 0u && IsInt<20>(value) && !IsInt<12>(value)) {
7755 // If `sll_shamts[num_sll_addi - 1u]` was only 12, we would have stopped
7756 // the decomposition a step earlier with smaller `num_sll_addi`.
7757 DCHECK_GT(sll_shamts[num_sll_addi - 1u], 12u);
7758 // Emit the signed 20-bit value with LUI and reduce the SLLI shamt by 12 to compensate.
7759 sll_shamts[num_sll_addi - 1u] -= 12u;
7760 lui(rd, dchecked_integral_cast<uint32_t>(value & 0xfffff));
7761 } else {
7762 emit_simple_li_helper(rd, value, addi, addiw, slli, lui);
7763 }
7764 for (size_t i = num_sll_addi; i != 0u; ) {
7765 --i;
7766 slli(rd, rd, sll_shamts[i]);
7767 if (addi_values[i] != 0) {
7768 addi(rd, rd, addi_values[i]);
7769 }
7770 }
7771 };
7772 auto emit_with_slli_addi = [&](XRegister rd, int64_t value) {
7773 emit_with_slli_addi_helper(rd, value, addi, addiw, slli, lui);
7774 };
7775 auto count_instructions_with_slli_addi = [&](int64_t value) {
7776 size_t num_instructions = 0u;
7777 auto count_rri = [&](XRegister, XRegister, int32_t) { ++num_instructions; };
7778 auto count_ru = [&](XRegister, uint32_t) { ++num_instructions; };
7779 emit_with_slli_addi_helper(Zero, value, count_rri, count_rri, count_rri, count_ru);
7780 return num_instructions;
7781 };
7782
7783 size_t insns_needed = count_instructions_with_slli_addi(imm);
7784 size_t trailing_slli_shamt = 0u;
7785 if (insns_needed > 2u) {
7786 // Sometimes it's better to end with a SLLI even when the above code would end with ADDI.
7787 if ((imm & 1) == 0 && (imm & 0xfff) != 0) {
7788 int64_t value = imm >> CTZ(imm);
7789 size_t new_insns_needed = count_instructions_with_slli_addi(value) + /*SLLI*/ 1u;
7790 DCHECK_GT(new_insns_needed, 2u);
7791 if (insns_needed > new_insns_needed) {
7792 insns_needed = new_insns_needed;
7793 trailing_slli_shamt = CTZ(imm);
7794 }
7795 }
7796
7797 // Sometimes we can emit a shorter sequence that ends with SRLI.
7798 if (imm > 0) {
7799 size_t shamt = CLZ(static_cast<uint64_t>(imm));
7800 DCHECK_LE(shamt, 32u); // Otherwise we would not get here as `insns_needed` would be <= 2.
7801 if (imm == dchecked_integral_cast<int64_t>(MaxInt<uint64_t>(64 - shamt))) {
7802 Addi(rd, Zero, -1);
7803 Srli(rd, rd, shamt);
7804 return;
7805 }
7806
7807 int64_t value = static_cast<int64_t>(static_cast<uint64_t>(imm) << shamt);
7808 DCHECK_LT(value, 0);
7809 if (is_simple_li_value(value)){
7810 size_t new_insns_needed = count_simple_li_instructions(value) + /*SRLI*/ 1u;
7811 // In case of equal number of instructions, clang prefers the sequence without SRLI.
7812 if (new_insns_needed < insns_needed) {
7813 // If we emit ADDI, we set low bits that shall be shifted out to one in line with clang,
7814 // effectively choosing to emit the negative constant closest to zero.
7815 int32_t shifted_out = dchecked_integral_cast<int32_t>(MaxInt<uint32_t>(shamt));
7816 DCHECK_EQ(value & shifted_out, 0);
7817 emit_simple_li(rd, (value & 0xfff) == 0 ? value : value + shifted_out);
7818 Srli(rd, rd, shamt);
7819 return;
7820 }
7821 }
7822
7823 size_t ctz = CTZ(static_cast<uint64_t>(value));
7824 if (IsInt(ctz + 20, value)) {
7825 size_t new_insns_needed = /*ADDI or LUI*/ 1u + /*SLLI*/ 1u + /*SRLI*/ 1u;
7826 if (new_insns_needed < insns_needed) {
7827 // Clang prefers ADDI+SLLI+SRLI over LUI+SLLI+SRLI.
7828 if (IsInt(ctz + 12, value)) {
7829 Addi(rd, Zero, value >> ctz);
7830 Slli(rd, rd, ctz);
7831 } else {
7832 Lui(rd, (static_cast<uint64_t>(value) >> ctz) & 0xfffffu);
7833 Slli(rd, rd, ctz - 12);
7834 }
7835 Srli(rd, rd, shamt);
7836 return;
7837 }
7838 }
7839 }
7840
7841 // If we can use a scratch register, try using it to emit a shorter sequence. Without a
7842 // scratch reg, the sequence is up to 8 instructions, with a scratch reg only up to 6.
7843 if (can_use_tmp) {
7844 int64_t low = (imm & 0xffffffff) - ((imm & 0x80000000) << 1);
7845 int64_t remainder = imm - low;
7846 size_t slli_shamt = CTZ(remainder);
7847 DCHECK_GE(slli_shamt, 32u);
7848 int64_t high = remainder >> slli_shamt;
7849 size_t new_insns_needed =
7850 ((IsInt<20>(high) || (high & 0xfff) == 0u) ? 1u : 2u) +
7851 count_simple_li_instructions(low) +
7852 /*SLLI+ADD*/ 2u;
7853 if (new_insns_needed < insns_needed) {
7854 DCHECK_NE(low & 0xfffff000, 0);
7855 XRegister tmp = srs.AllocateXRegister();
7856 if (IsInt<20>(high) && !IsInt<12>(high)) {
7857 // Emit the signed 20-bit value with LUI and reduce the SLLI shamt by 12 to compensate.
7858 Lui(rd, static_cast<uint32_t>(high & 0xfffff));
7859 slli_shamt -= 12;
7860 } else {
7861 emit_simple_li(rd, high);
7862 }
7863 emit_simple_li(tmp, low);
7864 Slli(rd, rd, slli_shamt);
7865 Add(rd, rd, tmp);
7866 return;
7867 }
7868 }
7869 }
7870 emit_with_slli_addi(rd, trailing_slli_shamt != 0u ? imm >> trailing_slli_shamt : imm);
7871 if (trailing_slli_shamt != 0u) {
7872 Slli(rd, rd, trailing_slli_shamt);
7873 }
7874 }
7875
7876 /////////////////////////////// RV64 VARIANTS extension end ////////////
7877
7878 } // namespace riscv64
7879 } // namespace art
7880