xref: /aosp_15_r20/external/vixl/test/aarch64/test-assembler-fp-aarch64.cc (revision f5c631da2f1efdd72b5fd1e20510e4042af13d77)
1*f5c631daSSadaf Ebrahimi // Copyright 2019, VIXL authors
2*f5c631daSSadaf Ebrahimi // All rights reserved.
3*f5c631daSSadaf Ebrahimi //
4*f5c631daSSadaf Ebrahimi // Redistribution and use in source and binary forms, with or without
5*f5c631daSSadaf Ebrahimi // modification, are permitted provided that the following conditions are met:
6*f5c631daSSadaf Ebrahimi //
7*f5c631daSSadaf Ebrahimi //   * Redistributions of source code must retain the above copyright notice,
8*f5c631daSSadaf Ebrahimi //     this list of conditions and the following disclaimer.
9*f5c631daSSadaf Ebrahimi //   * Redistributions in binary form must reproduce the above copyright notice,
10*f5c631daSSadaf Ebrahimi //     this list of conditions and the following disclaimer in the documentation
11*f5c631daSSadaf Ebrahimi //     and/or other materials provided with the distribution.
12*f5c631daSSadaf Ebrahimi //   * Neither the name of ARM Limited nor the names of its contributors may be
13*f5c631daSSadaf Ebrahimi //     used to endorse or promote products derived from this software without
14*f5c631daSSadaf Ebrahimi //     specific prior written permission.
15*f5c631daSSadaf Ebrahimi //
16*f5c631daSSadaf Ebrahimi // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17*f5c631daSSadaf Ebrahimi // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18*f5c631daSSadaf Ebrahimi // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19*f5c631daSSadaf Ebrahimi // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20*f5c631daSSadaf Ebrahimi // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21*f5c631daSSadaf Ebrahimi // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22*f5c631daSSadaf Ebrahimi // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23*f5c631daSSadaf Ebrahimi // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24*f5c631daSSadaf Ebrahimi // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25*f5c631daSSadaf Ebrahimi // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*f5c631daSSadaf Ebrahimi 
27*f5c631daSSadaf Ebrahimi #include <sys/mman.h>
28*f5c631daSSadaf Ebrahimi 
29*f5c631daSSadaf Ebrahimi #include <cfloat>
30*f5c631daSSadaf Ebrahimi #include <cmath>
31*f5c631daSSadaf Ebrahimi #include <cstdio>
32*f5c631daSSadaf Ebrahimi #include <cstdlib>
33*f5c631daSSadaf Ebrahimi #include <cstring>
34*f5c631daSSadaf Ebrahimi 
35*f5c631daSSadaf Ebrahimi #include "test-runner.h"
36*f5c631daSSadaf Ebrahimi #include "test-utils.h"
37*f5c631daSSadaf Ebrahimi #include "aarch64/test-utils-aarch64.h"
38*f5c631daSSadaf Ebrahimi 
39*f5c631daSSadaf Ebrahimi #include "aarch64/cpu-aarch64.h"
40*f5c631daSSadaf Ebrahimi #include "aarch64/disasm-aarch64.h"
41*f5c631daSSadaf Ebrahimi #include "aarch64/macro-assembler-aarch64.h"
42*f5c631daSSadaf Ebrahimi #include "aarch64/simulator-aarch64.h"
43*f5c631daSSadaf Ebrahimi #include "test-assembler-aarch64.h"
44*f5c631daSSadaf Ebrahimi 
45*f5c631daSSadaf Ebrahimi namespace vixl {
46*f5c631daSSadaf Ebrahimi namespace aarch64 {
47*f5c631daSSadaf Ebrahimi 
TEST(load_store_float)48*f5c631daSSadaf Ebrahimi TEST(load_store_float) {
49*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
50*f5c631daSSadaf Ebrahimi 
51*f5c631daSSadaf Ebrahimi   float src[3] = {1.0, 2.0, 3.0};
52*f5c631daSSadaf Ebrahimi   float dst[3] = {0.0, 0.0, 0.0};
53*f5c631daSSadaf Ebrahimi   uintptr_t src_base = reinterpret_cast<uintptr_t>(src);
54*f5c631daSSadaf Ebrahimi   uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst);
55*f5c631daSSadaf Ebrahimi 
56*f5c631daSSadaf Ebrahimi   START();
57*f5c631daSSadaf Ebrahimi   __ Mov(x17, src_base);
58*f5c631daSSadaf Ebrahimi   __ Mov(x18, dst_base);
59*f5c631daSSadaf Ebrahimi   __ Mov(x19, src_base);
60*f5c631daSSadaf Ebrahimi   __ Mov(x20, dst_base);
61*f5c631daSSadaf Ebrahimi   __ Mov(x21, src_base);
62*f5c631daSSadaf Ebrahimi   __ Mov(x22, dst_base);
63*f5c631daSSadaf Ebrahimi   __ Ldr(s0, MemOperand(x17, sizeof(src[0])));
64*f5c631daSSadaf Ebrahimi   __ Str(s0, MemOperand(x18, sizeof(dst[0]), PostIndex));
65*f5c631daSSadaf Ebrahimi   __ Ldr(s1, MemOperand(x19, sizeof(src[0]), PostIndex));
66*f5c631daSSadaf Ebrahimi   __ Str(s1, MemOperand(x20, 2 * sizeof(dst[0]), PreIndex));
67*f5c631daSSadaf Ebrahimi   __ Ldr(s2, MemOperand(x21, 2 * sizeof(src[0]), PreIndex));
68*f5c631daSSadaf Ebrahimi   __ Str(s2, MemOperand(x22, sizeof(dst[0])));
69*f5c631daSSadaf Ebrahimi   END();
70*f5c631daSSadaf Ebrahimi 
71*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
72*f5c631daSSadaf Ebrahimi     RUN();
73*f5c631daSSadaf Ebrahimi 
74*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s0);
75*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, dst[0]);
76*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
77*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, dst[2]);
78*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.0, s2);
79*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.0, dst[1]);
80*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base, x17);
81*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18);
82*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + sizeof(src[0]), x19);
83*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20);
84*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x21);
85*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base, x22);
86*f5c631daSSadaf Ebrahimi   }
87*f5c631daSSadaf Ebrahimi }
88*f5c631daSSadaf Ebrahimi 
89*f5c631daSSadaf Ebrahimi 
TEST(load_store_double)90*f5c631daSSadaf Ebrahimi TEST(load_store_double) {
91*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
92*f5c631daSSadaf Ebrahimi 
93*f5c631daSSadaf Ebrahimi   double src[3] = {1.0, 2.0, 3.0};
94*f5c631daSSadaf Ebrahimi   double dst[3] = {0.0, 0.0, 0.0};
95*f5c631daSSadaf Ebrahimi   uintptr_t src_base = reinterpret_cast<uintptr_t>(src);
96*f5c631daSSadaf Ebrahimi   uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst);
97*f5c631daSSadaf Ebrahimi 
98*f5c631daSSadaf Ebrahimi   START();
99*f5c631daSSadaf Ebrahimi   __ Mov(x17, src_base);
100*f5c631daSSadaf Ebrahimi   __ Mov(x18, dst_base);
101*f5c631daSSadaf Ebrahimi   __ Mov(x19, src_base);
102*f5c631daSSadaf Ebrahimi   __ Mov(x20, dst_base);
103*f5c631daSSadaf Ebrahimi   __ Mov(x21, src_base);
104*f5c631daSSadaf Ebrahimi   __ Mov(x22, dst_base);
105*f5c631daSSadaf Ebrahimi   __ Ldr(d0, MemOperand(x17, sizeof(src[0])));
106*f5c631daSSadaf Ebrahimi   __ Str(d0, MemOperand(x18, sizeof(dst[0]), PostIndex));
107*f5c631daSSadaf Ebrahimi   __ Ldr(d1, MemOperand(x19, sizeof(src[0]), PostIndex));
108*f5c631daSSadaf Ebrahimi   __ Str(d1, MemOperand(x20, 2 * sizeof(dst[0]), PreIndex));
109*f5c631daSSadaf Ebrahimi   __ Ldr(d2, MemOperand(x21, 2 * sizeof(src[0]), PreIndex));
110*f5c631daSSadaf Ebrahimi   __ Str(d2, MemOperand(x22, sizeof(dst[0])));
111*f5c631daSSadaf Ebrahimi   END();
112*f5c631daSSadaf Ebrahimi 
113*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
114*f5c631daSSadaf Ebrahimi     RUN();
115*f5c631daSSadaf Ebrahimi 
116*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d0);
117*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, dst[0]);
118*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d1);
119*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, dst[2]);
120*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.0, d2);
121*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.0, dst[1]);
122*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base, x17);
123*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18);
124*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + sizeof(src[0]), x19);
125*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20);
126*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x21);
127*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base, x22);
128*f5c631daSSadaf Ebrahimi   }
129*f5c631daSSadaf Ebrahimi }
130*f5c631daSSadaf Ebrahimi 
TEST(ldp_stp_float)131*f5c631daSSadaf Ebrahimi TEST(ldp_stp_float) {
132*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
133*f5c631daSSadaf Ebrahimi 
134*f5c631daSSadaf Ebrahimi   float src[2] = {1.0, 2.0};
135*f5c631daSSadaf Ebrahimi   float dst[3] = {0.0, 0.0, 0.0};
136*f5c631daSSadaf Ebrahimi   uintptr_t src_base = reinterpret_cast<uintptr_t>(src);
137*f5c631daSSadaf Ebrahimi   uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst);
138*f5c631daSSadaf Ebrahimi 
139*f5c631daSSadaf Ebrahimi   START();
140*f5c631daSSadaf Ebrahimi   __ Mov(x16, src_base);
141*f5c631daSSadaf Ebrahimi   __ Mov(x17, dst_base);
142*f5c631daSSadaf Ebrahimi   __ Ldp(s31, s0, MemOperand(x16, 2 * sizeof(src[0]), PostIndex));
143*f5c631daSSadaf Ebrahimi   __ Stp(s0, s31, MemOperand(x17, sizeof(dst[1]), PreIndex));
144*f5c631daSSadaf Ebrahimi   END();
145*f5c631daSSadaf Ebrahimi 
146*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
147*f5c631daSSadaf Ebrahimi     RUN();
148*f5c631daSSadaf Ebrahimi 
149*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s31);
150*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s0);
151*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, dst[0]);
152*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, dst[1]);
153*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, dst[2]);
154*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x16);
155*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + sizeof(dst[1]), x17);
156*f5c631daSSadaf Ebrahimi   }
157*f5c631daSSadaf Ebrahimi }
158*f5c631daSSadaf Ebrahimi 
159*f5c631daSSadaf Ebrahimi 
TEST(ldp_stp_double)160*f5c631daSSadaf Ebrahimi TEST(ldp_stp_double) {
161*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
162*f5c631daSSadaf Ebrahimi 
163*f5c631daSSadaf Ebrahimi   double src[2] = {1.0, 2.0};
164*f5c631daSSadaf Ebrahimi   double dst[3] = {0.0, 0.0, 0.0};
165*f5c631daSSadaf Ebrahimi   uintptr_t src_base = reinterpret_cast<uintptr_t>(src);
166*f5c631daSSadaf Ebrahimi   uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst);
167*f5c631daSSadaf Ebrahimi 
168*f5c631daSSadaf Ebrahimi   START();
169*f5c631daSSadaf Ebrahimi   __ Mov(x16, src_base);
170*f5c631daSSadaf Ebrahimi   __ Mov(x17, dst_base);
171*f5c631daSSadaf Ebrahimi   __ Ldp(d31, d0, MemOperand(x16, 2 * sizeof(src[0]), PostIndex));
172*f5c631daSSadaf Ebrahimi   __ Stp(d0, d31, MemOperand(x17, sizeof(dst[1]), PreIndex));
173*f5c631daSSadaf Ebrahimi   END();
174*f5c631daSSadaf Ebrahimi 
175*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
176*f5c631daSSadaf Ebrahimi     RUN();
177*f5c631daSSadaf Ebrahimi 
178*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d31);
179*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d0);
180*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, dst[0]);
181*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, dst[1]);
182*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, dst[2]);
183*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x16);
184*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + sizeof(dst[1]), x17);
185*f5c631daSSadaf Ebrahimi   }
186*f5c631daSSadaf Ebrahimi }
187*f5c631daSSadaf Ebrahimi 
TEST(ldnp_stnp_offset_float)188*f5c631daSSadaf Ebrahimi TEST(ldnp_stnp_offset_float) {
189*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
190*f5c631daSSadaf Ebrahimi 
191*f5c631daSSadaf Ebrahimi   float src[3] = {1.2, 2.3, 3.4};
192*f5c631daSSadaf Ebrahimi   float dst[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
193*f5c631daSSadaf Ebrahimi   uintptr_t src_base = reinterpret_cast<uintptr_t>(src);
194*f5c631daSSadaf Ebrahimi   uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst);
195*f5c631daSSadaf Ebrahimi 
196*f5c631daSSadaf Ebrahimi   START();
197*f5c631daSSadaf Ebrahimi   __ Mov(x16, src_base);
198*f5c631daSSadaf Ebrahimi   __ Mov(x17, dst_base);
199*f5c631daSSadaf Ebrahimi   __ Mov(x18, src_base + 12);
200*f5c631daSSadaf Ebrahimi   __ Mov(x19, dst_base + 24);
201*f5c631daSSadaf Ebrahimi 
202*f5c631daSSadaf Ebrahimi   // Ensure address set up has happened before executing non-temporal ops.
203*f5c631daSSadaf Ebrahimi   __ Dmb(InnerShareable, BarrierAll);
204*f5c631daSSadaf Ebrahimi 
205*f5c631daSSadaf Ebrahimi   __ Ldnp(s0, s1, MemOperand(x16));
206*f5c631daSSadaf Ebrahimi   __ Ldnp(s2, s3, MemOperand(x16, 4));
207*f5c631daSSadaf Ebrahimi   __ Ldnp(s5, s4, MemOperand(x18, -8));
208*f5c631daSSadaf Ebrahimi   __ Stnp(s1, s0, MemOperand(x17));
209*f5c631daSSadaf Ebrahimi   __ Stnp(s3, s2, MemOperand(x17, 8));
210*f5c631daSSadaf Ebrahimi   __ Stnp(s4, s5, MemOperand(x19, -8));
211*f5c631daSSadaf Ebrahimi   END();
212*f5c631daSSadaf Ebrahimi 
213*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
214*f5c631daSSadaf Ebrahimi     RUN();
215*f5c631daSSadaf Ebrahimi 
216*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.2, s0);
217*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.3, s1);
218*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.3, dst[0]);
219*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.2, dst[1]);
220*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.3, s2);
221*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.4, s3);
222*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.4, dst[2]);
223*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.3, dst[3]);
224*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.4, s4);
225*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.3, s5);
226*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.4, dst[4]);
227*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.3, dst[5]);
228*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base, x16);
229*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base, x17);
230*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + 12, x18);
231*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + 24, x19);
232*f5c631daSSadaf Ebrahimi   }
233*f5c631daSSadaf Ebrahimi }
234*f5c631daSSadaf Ebrahimi 
235*f5c631daSSadaf Ebrahimi 
TEST(ldnp_stnp_offset_double)236*f5c631daSSadaf Ebrahimi TEST(ldnp_stnp_offset_double) {
237*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
238*f5c631daSSadaf Ebrahimi 
239*f5c631daSSadaf Ebrahimi   double src[3] = {1.2, 2.3, 3.4};
240*f5c631daSSadaf Ebrahimi   double dst[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
241*f5c631daSSadaf Ebrahimi   uintptr_t src_base = reinterpret_cast<uintptr_t>(src);
242*f5c631daSSadaf Ebrahimi   uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst);
243*f5c631daSSadaf Ebrahimi 
244*f5c631daSSadaf Ebrahimi   START();
245*f5c631daSSadaf Ebrahimi   __ Mov(x16, src_base);
246*f5c631daSSadaf Ebrahimi   __ Mov(x17, dst_base);
247*f5c631daSSadaf Ebrahimi   __ Mov(x18, src_base + 24);
248*f5c631daSSadaf Ebrahimi   __ Mov(x19, dst_base + 48);
249*f5c631daSSadaf Ebrahimi 
250*f5c631daSSadaf Ebrahimi   // Ensure address set up has happened before executing non-temporal ops.
251*f5c631daSSadaf Ebrahimi   __ Dmb(InnerShareable, BarrierAll);
252*f5c631daSSadaf Ebrahimi 
253*f5c631daSSadaf Ebrahimi   __ Ldnp(d0, d1, MemOperand(x16));
254*f5c631daSSadaf Ebrahimi   __ Ldnp(d2, d3, MemOperand(x16, 8));
255*f5c631daSSadaf Ebrahimi   __ Ldnp(d5, d4, MemOperand(x18, -16));
256*f5c631daSSadaf Ebrahimi   __ Stnp(d1, d0, MemOperand(x17));
257*f5c631daSSadaf Ebrahimi   __ Stnp(d3, d2, MemOperand(x17, 16));
258*f5c631daSSadaf Ebrahimi   __ Stnp(d4, d5, MemOperand(x19, -16));
259*f5c631daSSadaf Ebrahimi   END();
260*f5c631daSSadaf Ebrahimi 
261*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
262*f5c631daSSadaf Ebrahimi     RUN();
263*f5c631daSSadaf Ebrahimi 
264*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.2, d0);
265*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.3, d1);
266*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.3, dst[0]);
267*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.2, dst[1]);
268*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.3, d2);
269*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.4, d3);
270*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.4, dst[2]);
271*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.3, dst[3]);
272*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.4, d4);
273*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.3, d5);
274*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.4, dst[4]);
275*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.3, dst[5]);
276*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base, x16);
277*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base, x17);
278*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(src_base + 24, x18);
279*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(dst_base + 48, x19);
280*f5c631daSSadaf Ebrahimi   }
281*f5c631daSSadaf Ebrahimi }
282*f5c631daSSadaf Ebrahimi 
283*f5c631daSSadaf Ebrahimi template <typename T>
LoadFPValueHelper(T values[],int card)284*f5c631daSSadaf Ebrahimi void LoadFPValueHelper(T values[], int card) {
285*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
286*f5c631daSSadaf Ebrahimi 
287*f5c631daSSadaf Ebrahimi   const bool is_32bits = (sizeof(T) == 4);
288*f5c631daSSadaf Ebrahimi   const VRegister& fp_tgt = is_32bits ? VRegister(s2) : VRegister(d2);
289*f5c631daSSadaf Ebrahimi   const Register& tgt1 = is_32bits ? Register(w1) : Register(x1);
290*f5c631daSSadaf Ebrahimi   const Register& tgt2 = is_32bits ? Register(w2) : Register(x2);
291*f5c631daSSadaf Ebrahimi 
292*f5c631daSSadaf Ebrahimi   START();
293*f5c631daSSadaf Ebrahimi   __ Mov(x0, 0);
294*f5c631daSSadaf Ebrahimi 
295*f5c631daSSadaf Ebrahimi   // If one of the values differ then x0 will be one.
296*f5c631daSSadaf Ebrahimi   for (int i = 0; i < card; ++i) {
297*f5c631daSSadaf Ebrahimi     __ Mov(tgt1,
298*f5c631daSSadaf Ebrahimi            is_32bits ? FloatToRawbits(values[i]) : DoubleToRawbits(values[i]));
299*f5c631daSSadaf Ebrahimi     __ Ldr(fp_tgt, values[i]);
300*f5c631daSSadaf Ebrahimi     __ Fmov(tgt2, fp_tgt);
301*f5c631daSSadaf Ebrahimi     __ Cmp(tgt1, tgt2);
302*f5c631daSSadaf Ebrahimi     __ Cset(x0, ne);
303*f5c631daSSadaf Ebrahimi   }
304*f5c631daSSadaf Ebrahimi   END();
305*f5c631daSSadaf Ebrahimi 
306*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
307*f5c631daSSadaf Ebrahimi     RUN();
308*f5c631daSSadaf Ebrahimi 
309*f5c631daSSadaf Ebrahimi     // If one of the values differs, the trace can be used to identify which
310*f5c631daSSadaf Ebrahimi     // one.
311*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x0);
312*f5c631daSSadaf Ebrahimi   }
313*f5c631daSSadaf Ebrahimi }
314*f5c631daSSadaf Ebrahimi 
TEST(ldr_literal_values_d)315*f5c631daSSadaf Ebrahimi TEST(ldr_literal_values_d) {
316*f5c631daSSadaf Ebrahimi   static const double kValues[] = {-0.0, 0.0, -1.0, 1.0, -1e10, 1e10};
317*f5c631daSSadaf Ebrahimi 
318*f5c631daSSadaf Ebrahimi   LoadFPValueHelper(kValues, sizeof(kValues) / sizeof(kValues[0]));
319*f5c631daSSadaf Ebrahimi }
320*f5c631daSSadaf Ebrahimi 
321*f5c631daSSadaf Ebrahimi 
TEST(ldr_literal_values_s)322*f5c631daSSadaf Ebrahimi TEST(ldr_literal_values_s) {
323*f5c631daSSadaf Ebrahimi   static const float kValues[] = {-0.0, 0.0, -1.0, 1.0, -1e10, 1e10};
324*f5c631daSSadaf Ebrahimi 
325*f5c631daSSadaf Ebrahimi   LoadFPValueHelper(kValues, sizeof(kValues) / sizeof(kValues[0]));
326*f5c631daSSadaf Ebrahimi }
327*f5c631daSSadaf Ebrahimi 
TEST(fmov_imm)328*f5c631daSSadaf Ebrahimi TEST(fmov_imm) {
329*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
330*f5c631daSSadaf Ebrahimi 
331*f5c631daSSadaf Ebrahimi   START();
332*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 255.0);
333*f5c631daSSadaf Ebrahimi   __ Fmov(d2, 12.34567);
334*f5c631daSSadaf Ebrahimi   __ Fmov(s3, 0.0);
335*f5c631daSSadaf Ebrahimi   __ Fmov(d4, 0.0);
336*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32PositiveInfinity);
337*f5c631daSSadaf Ebrahimi   __ Fmov(d6, kFP64NegativeInfinity);
338*f5c631daSSadaf Ebrahimi   __ Fmov(h7, RawbitsToFloat16(0x6400U));
339*f5c631daSSadaf Ebrahimi   __ Fmov(h8, kFP16PositiveInfinity);
340*f5c631daSSadaf Ebrahimi   __ Fmov(s11, 1.0);
341*f5c631daSSadaf Ebrahimi   __ Fmov(h12, RawbitsToFloat16(0x7BFF));
342*f5c631daSSadaf Ebrahimi   __ Fmov(h13, RawbitsToFloat16(0x57F2));
343*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -13.0);
344*f5c631daSSadaf Ebrahimi   __ Fmov(h23, RawbitsToFloat16(0xC500U));
345*f5c631daSSadaf Ebrahimi   __ Fmov(h24, Float16(-5.0));
346*f5c631daSSadaf Ebrahimi   __ Fmov(h25, Float16(2049.0));
347*f5c631daSSadaf Ebrahimi   __ Fmov(h21, RawbitsToFloat16(0x6404U));
348*f5c631daSSadaf Ebrahimi   __ Fmov(h26, RawbitsToFloat16(0x0U));
349*f5c631daSSadaf Ebrahimi   __ Fmov(h27, RawbitsToFloat16(0x7e00U));
350*f5c631daSSadaf Ebrahimi   END();
351*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
352*f5c631daSSadaf Ebrahimi     RUN();
353*f5c631daSSadaf Ebrahimi 
354*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(255.0, s1);
355*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(12.34567, d2);
356*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s3);
357*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d4);
358*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s5);
359*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d6);
360*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x6400U), h7);
361*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16PositiveInfinity, h8);
362*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s11);
363*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x7BFF), h12);
364*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x57F2U), h13);
365*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x6404), h21);
366*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-13.0, d22);
367*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-5.0), h23);
368*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0xC500), h24);
369*f5c631daSSadaf Ebrahimi     // 2049 is unpresentable.
370*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x6800), h25);
371*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16PositiveZero, h26);
372*f5c631daSSadaf Ebrahimi     // NaN check.
373*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x7e00), h27);
374*f5c631daSSadaf Ebrahimi   }
375*f5c631daSSadaf Ebrahimi }
376*f5c631daSSadaf Ebrahimi 
TEST(fmov_reg)377*f5c631daSSadaf Ebrahimi TEST(fmov_reg) {
378*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kNEON,
379*f5c631daSSadaf Ebrahimi                       CPUFeatures::kFP,
380*f5c631daSSadaf Ebrahimi                       CPUFeatures::kFPHalf);
381*f5c631daSSadaf Ebrahimi 
382*f5c631daSSadaf Ebrahimi   START();
383*f5c631daSSadaf Ebrahimi 
384*f5c631daSSadaf Ebrahimi   __ Fmov(h3, RawbitsToFloat16(0xCA80U));
385*f5c631daSSadaf Ebrahimi   __ Fmov(h7, h3);
386*f5c631daSSadaf Ebrahimi   __ Fmov(h8, -5.0);
387*f5c631daSSadaf Ebrahimi   __ Fmov(w3, h8);
388*f5c631daSSadaf Ebrahimi   __ Fmov(h9, w3);
389*f5c631daSSadaf Ebrahimi   __ Fmov(h8, Float16(1024.0));
390*f5c631daSSadaf Ebrahimi   __ Fmov(x4, h8);
391*f5c631daSSadaf Ebrahimi   __ Fmov(h10, x4);
392*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 1.0);
393*f5c631daSSadaf Ebrahimi   __ Fmov(w10, s20);
394*f5c631daSSadaf Ebrahimi   __ Fmov(s30, w10);
395*f5c631daSSadaf Ebrahimi   __ Fmov(s5, s20);
396*f5c631daSSadaf Ebrahimi   __ Fmov(d1, -13.0);
397*f5c631daSSadaf Ebrahimi   __ Fmov(x1, d1);
398*f5c631daSSadaf Ebrahimi   __ Fmov(d2, x1);
399*f5c631daSSadaf Ebrahimi   __ Fmov(d4, d1);
400*f5c631daSSadaf Ebrahimi   __ Fmov(d6, RawbitsToDouble(0x0123456789abcdef));
401*f5c631daSSadaf Ebrahimi   __ Fmov(s6, s6);
402*f5c631daSSadaf Ebrahimi   __ Fmov(d0, 0.0);
403*f5c631daSSadaf Ebrahimi   __ Fmov(v0.D(), 1, x1);
404*f5c631daSSadaf Ebrahimi   __ Fmov(x2, v0.D(), 1);
405*f5c631daSSadaf Ebrahimi   __ Fmov(v3.D(), 1, x4);
406*f5c631daSSadaf Ebrahimi   __ Fmov(v3.D(), 0, x1);
407*f5c631daSSadaf Ebrahimi   __ Fmov(x5, v1.D(), 0);
408*f5c631daSSadaf Ebrahimi 
409*f5c631daSSadaf Ebrahimi   END();
410*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
411*f5c631daSSadaf Ebrahimi     RUN();
412*f5c631daSSadaf Ebrahimi 
413*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0xCA80U), h7);
414*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0xC500U), h9);
415*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(0x0000C500, w3);
416*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x0000000000006400, x4);
417*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0x6400), h10);
418*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(FloatToRawbits(1.0), w10);
419*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s30);
420*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s5);
421*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(DoubleToRawbits(-13.0), x1);
422*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-13.0, d2);
423*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-13.0, d4);
424*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(0x89abcdef), s6);
425*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(DoubleToRawbits(-13.0), 0x0000000000000000, q0);
426*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(DoubleToRawbits(-13.0), x2);
427*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0x0000000000006400, DoubleToRawbits(-13.0), q3);
428*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(DoubleToRawbits(-13.0), x5);
429*f5c631daSSadaf Ebrahimi   }
430*f5c631daSSadaf Ebrahimi }
431*f5c631daSSadaf Ebrahimi 
432*f5c631daSSadaf Ebrahimi 
TEST(fadd)433*f5c631daSSadaf Ebrahimi TEST(fadd) {
434*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
435*f5c631daSSadaf Ebrahimi 
436*f5c631daSSadaf Ebrahimi   START();
437*f5c631daSSadaf Ebrahimi   __ Fmov(s14, -0.0f);
438*f5c631daSSadaf Ebrahimi   __ Fmov(s15, kFP32PositiveInfinity);
439*f5c631daSSadaf Ebrahimi   __ Fmov(s16, kFP32NegativeInfinity);
440*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 3.25f);
441*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.0f);
442*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 0.0f);
443*f5c631daSSadaf Ebrahimi 
444*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
445*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
446*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
447*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0.0);
448*f5c631daSSadaf Ebrahimi   __ Fmov(d30, -2.0);
449*f5c631daSSadaf Ebrahimi   __ Fmov(d31, 2.25);
450*f5c631daSSadaf Ebrahimi 
451*f5c631daSSadaf Ebrahimi   __ Fadd(s0, s17, s18);
452*f5c631daSSadaf Ebrahimi   __ Fadd(s1, s18, s19);
453*f5c631daSSadaf Ebrahimi   __ Fadd(s2, s14, s18);
454*f5c631daSSadaf Ebrahimi   __ Fadd(s3, s15, s18);
455*f5c631daSSadaf Ebrahimi   __ Fadd(s4, s16, s18);
456*f5c631daSSadaf Ebrahimi   __ Fadd(s5, s15, s16);
457*f5c631daSSadaf Ebrahimi   __ Fadd(s6, s16, s15);
458*f5c631daSSadaf Ebrahimi 
459*f5c631daSSadaf Ebrahimi   __ Fadd(d7, d30, d31);
460*f5c631daSSadaf Ebrahimi   __ Fadd(d8, d29, d31);
461*f5c631daSSadaf Ebrahimi   __ Fadd(d9, d26, d31);
462*f5c631daSSadaf Ebrahimi   __ Fadd(d10, d27, d31);
463*f5c631daSSadaf Ebrahimi   __ Fadd(d11, d28, d31);
464*f5c631daSSadaf Ebrahimi   __ Fadd(d12, d27, d28);
465*f5c631daSSadaf Ebrahimi   __ Fadd(d13, d28, d27);
466*f5c631daSSadaf Ebrahimi   END();
467*f5c631daSSadaf Ebrahimi 
468*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
469*f5c631daSSadaf Ebrahimi     RUN();
470*f5c631daSSadaf Ebrahimi 
471*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(4.25, s0);
472*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
473*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s2);
474*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s3);
475*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s4);
476*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5);
477*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6);
478*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.25, d7);
479*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.25, d8);
480*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.25, d9);
481*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d10);
482*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d11);
483*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12);
484*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13);
485*f5c631daSSadaf Ebrahimi   }
486*f5c631daSSadaf Ebrahimi }
487*f5c631daSSadaf Ebrahimi 
488*f5c631daSSadaf Ebrahimi 
TEST(fadd_h)489*f5c631daSSadaf Ebrahimi TEST(fadd_h) {
490*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
491*f5c631daSSadaf Ebrahimi 
492*f5c631daSSadaf Ebrahimi   START();
493*f5c631daSSadaf Ebrahimi   __ Fmov(h14, -0.0f);
494*f5c631daSSadaf Ebrahimi   __ Fmov(h15, kFP16PositiveInfinity);
495*f5c631daSSadaf Ebrahimi   __ Fmov(h16, kFP16NegativeInfinity);
496*f5c631daSSadaf Ebrahimi   __ Fmov(h17, 3.25f);
497*f5c631daSSadaf Ebrahimi   __ Fmov(h18, 1.0);
498*f5c631daSSadaf Ebrahimi   __ Fmov(h19, 0.0f);
499*f5c631daSSadaf Ebrahimi   __ Fmov(h20, 5.0f);
500*f5c631daSSadaf Ebrahimi 
501*f5c631daSSadaf Ebrahimi   __ Fadd(h0, h17, h18);
502*f5c631daSSadaf Ebrahimi   __ Fadd(h1, h18, h19);
503*f5c631daSSadaf Ebrahimi   __ Fadd(h2, h14, h18);
504*f5c631daSSadaf Ebrahimi   __ Fadd(h3, h15, h18);
505*f5c631daSSadaf Ebrahimi   __ Fadd(h4, h16, h18);
506*f5c631daSSadaf Ebrahimi   __ Fadd(h5, h15, h16);
507*f5c631daSSadaf Ebrahimi   __ Fadd(h6, h16, h15);
508*f5c631daSSadaf Ebrahimi   __ Fadd(h7, h20, h20);
509*f5c631daSSadaf Ebrahimi   END();
510*f5c631daSSadaf Ebrahimi 
511*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
512*f5c631daSSadaf Ebrahimi     RUN();
513*f5c631daSSadaf Ebrahimi 
514*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(4.25), h0);
515*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0), h1);
516*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0), h2);
517*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16PositiveInfinity, h3);
518*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16NegativeInfinity, h4);
519*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h5);
520*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h6);
521*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(10.0), h7);
522*f5c631daSSadaf Ebrahimi   }
523*f5c631daSSadaf Ebrahimi }
524*f5c631daSSadaf Ebrahimi 
TEST(fsub)525*f5c631daSSadaf Ebrahimi TEST(fsub) {
526*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
527*f5c631daSSadaf Ebrahimi 
528*f5c631daSSadaf Ebrahimi   START();
529*f5c631daSSadaf Ebrahimi   __ Fmov(s14, -0.0f);
530*f5c631daSSadaf Ebrahimi   __ Fmov(s15, kFP32PositiveInfinity);
531*f5c631daSSadaf Ebrahimi   __ Fmov(s16, kFP32NegativeInfinity);
532*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 3.25f);
533*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.0f);
534*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 0.0f);
535*f5c631daSSadaf Ebrahimi 
536*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
537*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
538*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
539*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0.0);
540*f5c631daSSadaf Ebrahimi   __ Fmov(d30, -2.0);
541*f5c631daSSadaf Ebrahimi   __ Fmov(d31, 2.25);
542*f5c631daSSadaf Ebrahimi 
543*f5c631daSSadaf Ebrahimi   __ Fsub(s0, s17, s18);
544*f5c631daSSadaf Ebrahimi   __ Fsub(s1, s18, s19);
545*f5c631daSSadaf Ebrahimi   __ Fsub(s2, s14, s18);
546*f5c631daSSadaf Ebrahimi   __ Fsub(s3, s18, s15);
547*f5c631daSSadaf Ebrahimi   __ Fsub(s4, s18, s16);
548*f5c631daSSadaf Ebrahimi   __ Fsub(s5, s15, s15);
549*f5c631daSSadaf Ebrahimi   __ Fsub(s6, s16, s16);
550*f5c631daSSadaf Ebrahimi 
551*f5c631daSSadaf Ebrahimi   __ Fsub(d7, d30, d31);
552*f5c631daSSadaf Ebrahimi   __ Fsub(d8, d29, d31);
553*f5c631daSSadaf Ebrahimi   __ Fsub(d9, d26, d31);
554*f5c631daSSadaf Ebrahimi   __ Fsub(d10, d31, d27);
555*f5c631daSSadaf Ebrahimi   __ Fsub(d11, d31, d28);
556*f5c631daSSadaf Ebrahimi   __ Fsub(d12, d27, d27);
557*f5c631daSSadaf Ebrahimi   __ Fsub(d13, d28, d28);
558*f5c631daSSadaf Ebrahimi   END();
559*f5c631daSSadaf Ebrahimi 
560*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
561*f5c631daSSadaf Ebrahimi     RUN();
562*f5c631daSSadaf Ebrahimi 
563*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.25, s0);
564*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
565*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s2);
566*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s3);
567*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s4);
568*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5);
569*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6);
570*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-4.25, d7);
571*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.25, d8);
572*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.25, d9);
573*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d10);
574*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d11);
575*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12);
576*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13);
577*f5c631daSSadaf Ebrahimi   }
578*f5c631daSSadaf Ebrahimi }
579*f5c631daSSadaf Ebrahimi 
580*f5c631daSSadaf Ebrahimi 
TEST(fsub_h)581*f5c631daSSadaf Ebrahimi TEST(fsub_h) {
582*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
583*f5c631daSSadaf Ebrahimi 
584*f5c631daSSadaf Ebrahimi   START();
585*f5c631daSSadaf Ebrahimi   __ Fmov(h14, -0.0f);
586*f5c631daSSadaf Ebrahimi   __ Fmov(h15, kFP16PositiveInfinity);
587*f5c631daSSadaf Ebrahimi   __ Fmov(h16, kFP16NegativeInfinity);
588*f5c631daSSadaf Ebrahimi   __ Fmov(h17, 3.25f);
589*f5c631daSSadaf Ebrahimi   __ Fmov(h18, 1.0f);
590*f5c631daSSadaf Ebrahimi   __ Fmov(h19, 0.0f);
591*f5c631daSSadaf Ebrahimi 
592*f5c631daSSadaf Ebrahimi   __ Fsub(h0, h17, h18);
593*f5c631daSSadaf Ebrahimi   __ Fsub(h1, h18, h19);
594*f5c631daSSadaf Ebrahimi   __ Fsub(h2, h14, h18);
595*f5c631daSSadaf Ebrahimi   __ Fsub(h3, h18, h15);
596*f5c631daSSadaf Ebrahimi   __ Fsub(h4, h18, h16);
597*f5c631daSSadaf Ebrahimi   __ Fsub(h5, h15, h15);
598*f5c631daSSadaf Ebrahimi   __ Fsub(h6, h16, h16);
599*f5c631daSSadaf Ebrahimi   END();
600*f5c631daSSadaf Ebrahimi 
601*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
602*f5c631daSSadaf Ebrahimi     RUN();
603*f5c631daSSadaf Ebrahimi 
604*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(2.25), h0);
605*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0), h1);
606*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-1.0), h2);
607*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16NegativeInfinity, h3);
608*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16PositiveInfinity, h4);
609*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h5);
610*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h6);
611*f5c631daSSadaf Ebrahimi   }
612*f5c631daSSadaf Ebrahimi }
613*f5c631daSSadaf Ebrahimi 
614*f5c631daSSadaf Ebrahimi 
TEST(fmul)615*f5c631daSSadaf Ebrahimi TEST(fmul) {
616*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
617*f5c631daSSadaf Ebrahimi 
618*f5c631daSSadaf Ebrahimi   START();
619*f5c631daSSadaf Ebrahimi   __ Fmov(s14, -0.0f);
620*f5c631daSSadaf Ebrahimi   __ Fmov(s15, kFP32PositiveInfinity);
621*f5c631daSSadaf Ebrahimi   __ Fmov(s16, kFP32NegativeInfinity);
622*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 3.25f);
623*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 2.0f);
624*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 0.0f);
625*f5c631daSSadaf Ebrahimi   __ Fmov(s20, -2.0f);
626*f5c631daSSadaf Ebrahimi 
627*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
628*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
629*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
630*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0.0);
631*f5c631daSSadaf Ebrahimi   __ Fmov(d30, -2.0);
632*f5c631daSSadaf Ebrahimi   __ Fmov(d31, 2.25);
633*f5c631daSSadaf Ebrahimi 
634*f5c631daSSadaf Ebrahimi   __ Fmul(s0, s17, s18);
635*f5c631daSSadaf Ebrahimi   __ Fmul(s1, s18, s19);
636*f5c631daSSadaf Ebrahimi   __ Fmul(s2, s14, s14);
637*f5c631daSSadaf Ebrahimi   __ Fmul(s3, s15, s20);
638*f5c631daSSadaf Ebrahimi   __ Fmul(s4, s16, s20);
639*f5c631daSSadaf Ebrahimi   __ Fmul(s5, s15, s19);
640*f5c631daSSadaf Ebrahimi   __ Fmul(s6, s19, s16);
641*f5c631daSSadaf Ebrahimi 
642*f5c631daSSadaf Ebrahimi   __ Fmul(d7, d30, d31);
643*f5c631daSSadaf Ebrahimi   __ Fmul(d8, d29, d31);
644*f5c631daSSadaf Ebrahimi   __ Fmul(d9, d26, d26);
645*f5c631daSSadaf Ebrahimi   __ Fmul(d10, d27, d30);
646*f5c631daSSadaf Ebrahimi   __ Fmul(d11, d28, d30);
647*f5c631daSSadaf Ebrahimi   __ Fmul(d12, d27, d29);
648*f5c631daSSadaf Ebrahimi   __ Fmul(d13, d29, d28);
649*f5c631daSSadaf Ebrahimi   END();
650*f5c631daSSadaf Ebrahimi 
651*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
652*f5c631daSSadaf Ebrahimi     RUN();
653*f5c631daSSadaf Ebrahimi 
654*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(6.5, s0);
655*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s1);
656*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s2);
657*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s3);
658*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s4);
659*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5);
660*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6);
661*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-4.5, d7);
662*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d8);
663*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d9);
664*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d10);
665*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d11);
666*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12);
667*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13);
668*f5c631daSSadaf Ebrahimi   }
669*f5c631daSSadaf Ebrahimi }
670*f5c631daSSadaf Ebrahimi 
671*f5c631daSSadaf Ebrahimi 
TEST(fmul_h)672*f5c631daSSadaf Ebrahimi TEST(fmul_h) {
673*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
674*f5c631daSSadaf Ebrahimi 
675*f5c631daSSadaf Ebrahimi   START();
676*f5c631daSSadaf Ebrahimi   __ Fmov(h14, -0.0f);
677*f5c631daSSadaf Ebrahimi   __ Fmov(h15, kFP16PositiveInfinity);
678*f5c631daSSadaf Ebrahimi   __ Fmov(h16, kFP16NegativeInfinity);
679*f5c631daSSadaf Ebrahimi   __ Fmov(h17, 3.25f);
680*f5c631daSSadaf Ebrahimi   __ Fmov(h18, 2.0f);
681*f5c631daSSadaf Ebrahimi   __ Fmov(h19, 0.0f);
682*f5c631daSSadaf Ebrahimi   __ Fmov(h20, -2.0f);
683*f5c631daSSadaf Ebrahimi 
684*f5c631daSSadaf Ebrahimi   __ Fmul(h0, h17, h18);
685*f5c631daSSadaf Ebrahimi   __ Fmul(h1, h18, h19);
686*f5c631daSSadaf Ebrahimi   __ Fmul(h2, h14, h14);
687*f5c631daSSadaf Ebrahimi   __ Fmul(h3, h15, h20);
688*f5c631daSSadaf Ebrahimi   __ Fmul(h4, h16, h20);
689*f5c631daSSadaf Ebrahimi   __ Fmul(h5, h15, h19);
690*f5c631daSSadaf Ebrahimi   __ Fmul(h6, h19, h16);
691*f5c631daSSadaf Ebrahimi   END();
692*f5c631daSSadaf Ebrahimi 
693*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
694*f5c631daSSadaf Ebrahimi     RUN();
695*f5c631daSSadaf Ebrahimi 
696*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(6.5), h0);
697*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(0.0), h1);
698*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(0.0), h2);
699*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16NegativeInfinity, h3);
700*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16PositiveInfinity, h4);
701*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h5);
702*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h6);
703*f5c631daSSadaf Ebrahimi   }
704*f5c631daSSadaf Ebrahimi }
705*f5c631daSSadaf Ebrahimi 
706*f5c631daSSadaf Ebrahimi 
TEST(fnmul_h)707*f5c631daSSadaf Ebrahimi TEST(fnmul_h) {
708*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
709*f5c631daSSadaf Ebrahimi 
710*f5c631daSSadaf Ebrahimi   START();
711*f5c631daSSadaf Ebrahimi   __ Fmov(h14, -0.0f);
712*f5c631daSSadaf Ebrahimi   __ Fmov(h15, kFP16PositiveInfinity);
713*f5c631daSSadaf Ebrahimi   __ Fmov(h16, kFP16NegativeInfinity);
714*f5c631daSSadaf Ebrahimi   __ Fmov(h17, 3.25f);
715*f5c631daSSadaf Ebrahimi   __ Fmov(h18, 2.0f);
716*f5c631daSSadaf Ebrahimi   __ Fmov(h19, 0.0f);
717*f5c631daSSadaf Ebrahimi   __ Fmov(h20, -2.0f);
718*f5c631daSSadaf Ebrahimi 
719*f5c631daSSadaf Ebrahimi   __ Fnmul(h0, h17, h18);
720*f5c631daSSadaf Ebrahimi   __ Fnmul(h1, h18, h19);
721*f5c631daSSadaf Ebrahimi   __ Fnmul(h2, h14, h14);
722*f5c631daSSadaf Ebrahimi   __ Fnmul(h3, h15, h20);
723*f5c631daSSadaf Ebrahimi   __ Fnmul(h4, h16, h20);
724*f5c631daSSadaf Ebrahimi   __ Fnmul(h5, h15, h19);
725*f5c631daSSadaf Ebrahimi   __ Fnmul(h6, h19, h16);
726*f5c631daSSadaf Ebrahimi   END();
727*f5c631daSSadaf Ebrahimi 
728*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
729*f5c631daSSadaf Ebrahimi     RUN();
730*f5c631daSSadaf Ebrahimi 
731*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-6.5), h0);
732*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-0.0), h1);
733*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-0.0), h2);
734*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16PositiveInfinity, h3);
735*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16NegativeInfinity, h4);
736*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0xfe00), h5);
737*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(RawbitsToFloat16(0xfe00), h6);
738*f5c631daSSadaf Ebrahimi   }
739*f5c631daSSadaf Ebrahimi }
740*f5c631daSSadaf Ebrahimi 
741*f5c631daSSadaf Ebrahimi 
FmaddFmsubHelper(double n,double m,double a,double fmadd,double fmsub,double fnmadd,double fnmsub)742*f5c631daSSadaf Ebrahimi static void FmaddFmsubHelper(double n,
743*f5c631daSSadaf Ebrahimi                              double m,
744*f5c631daSSadaf Ebrahimi                              double a,
745*f5c631daSSadaf Ebrahimi                              double fmadd,
746*f5c631daSSadaf Ebrahimi                              double fmsub,
747*f5c631daSSadaf Ebrahimi                              double fnmadd,
748*f5c631daSSadaf Ebrahimi                              double fnmsub) {
749*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
750*f5c631daSSadaf Ebrahimi 
751*f5c631daSSadaf Ebrahimi   START();
752*f5c631daSSadaf Ebrahimi 
753*f5c631daSSadaf Ebrahimi   __ Fmov(d0, n);
754*f5c631daSSadaf Ebrahimi   __ Fmov(d1, m);
755*f5c631daSSadaf Ebrahimi   __ Fmov(d2, a);
756*f5c631daSSadaf Ebrahimi   __ Fmadd(d28, d0, d1, d2);
757*f5c631daSSadaf Ebrahimi   __ Fmsub(d29, d0, d1, d2);
758*f5c631daSSadaf Ebrahimi   __ Fnmadd(d30, d0, d1, d2);
759*f5c631daSSadaf Ebrahimi   __ Fnmsub(d31, d0, d1, d2);
760*f5c631daSSadaf Ebrahimi 
761*f5c631daSSadaf Ebrahimi   END();
762*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
763*f5c631daSSadaf Ebrahimi     RUN();
764*f5c631daSSadaf Ebrahimi 
765*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(fmadd, d28);
766*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(fmsub, d29);
767*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(fnmadd, d30);
768*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(fnmsub, d31);
769*f5c631daSSadaf Ebrahimi   }
770*f5c631daSSadaf Ebrahimi }
771*f5c631daSSadaf Ebrahimi 
772*f5c631daSSadaf Ebrahimi 
TEST(fmadd_fmsub_double)773*f5c631daSSadaf Ebrahimi TEST(fmadd_fmsub_double) {
774*f5c631daSSadaf Ebrahimi   // It's hard to check the result of fused operations because the only way to
775*f5c631daSSadaf Ebrahimi   // calculate the result is using fma, which is what the Simulator uses anyway.
776*f5c631daSSadaf Ebrahimi 
777*f5c631daSSadaf Ebrahimi   // Basic operation.
778*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(1.0, 2.0, 3.0, 5.0, 1.0, -5.0, -1.0);
779*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-1.0, 2.0, 3.0, 1.0, 5.0, -1.0, -5.0);
780*f5c631daSSadaf Ebrahimi 
781*f5c631daSSadaf Ebrahimi   // Check the sign of exact zeroes.
782*f5c631daSSadaf Ebrahimi   //               n     m     a     fmadd  fmsub  fnmadd fnmsub
783*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0, +0.0, -0.0, -0.0, +0.0, +0.0, +0.0);
784*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0, +0.0, -0.0, +0.0, -0.0, +0.0, +0.0);
785*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0, +0.0, +0.0, +0.0, +0.0, -0.0, +0.0);
786*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0, +0.0, +0.0, +0.0, +0.0, +0.0, -0.0);
787*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0, -0.0, -0.0, -0.0, +0.0, +0.0, +0.0);
788*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0, -0.0, -0.0, +0.0, -0.0, +0.0, +0.0);
789*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0, -0.0, +0.0, +0.0, +0.0, -0.0, +0.0);
790*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0, -0.0, +0.0, +0.0, +0.0, +0.0, -0.0);
791*f5c631daSSadaf Ebrahimi 
792*f5c631daSSadaf Ebrahimi   // Check NaN generation.
793*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP64PositiveInfinity,
794*f5c631daSSadaf Ebrahimi                    0.0,
795*f5c631daSSadaf Ebrahimi                    42.0,
796*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
797*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
798*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
799*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);
800*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0.0,
801*f5c631daSSadaf Ebrahimi                    kFP64PositiveInfinity,
802*f5c631daSSadaf Ebrahimi                    42.0,
803*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
804*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
805*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
806*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);
807*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP64PositiveInfinity,
808*f5c631daSSadaf Ebrahimi                    1.0,
809*f5c631daSSadaf Ebrahimi                    kFP64PositiveInfinity,
810*f5c631daSSadaf Ebrahimi                    kFP64PositiveInfinity,  //  inf + ( inf * 1) = inf
811*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,        //  inf + (-inf * 1) = NaN
812*f5c631daSSadaf Ebrahimi                    kFP64NegativeInfinity,  // -inf + (-inf * 1) = -inf
813*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);       // -inf + ( inf * 1) = NaN
814*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP64NegativeInfinity,
815*f5c631daSSadaf Ebrahimi                    1.0,
816*f5c631daSSadaf Ebrahimi                    kFP64PositiveInfinity,
817*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,         //  inf + (-inf * 1) = NaN
818*f5c631daSSadaf Ebrahimi                    kFP64PositiveInfinity,   //  inf + ( inf * 1) = inf
819*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,         // -inf + ( inf * 1) = NaN
820*f5c631daSSadaf Ebrahimi                    kFP64NegativeInfinity);  // -inf + (-inf * 1) = -inf
821*f5c631daSSadaf Ebrahimi }
822*f5c631daSSadaf Ebrahimi 
823*f5c631daSSadaf Ebrahimi 
FmaddFmsubHelper(float n,float m,float a,float fmadd,float fmsub,float fnmadd,float fnmsub)824*f5c631daSSadaf Ebrahimi static void FmaddFmsubHelper(float n,
825*f5c631daSSadaf Ebrahimi                              float m,
826*f5c631daSSadaf Ebrahimi                              float a,
827*f5c631daSSadaf Ebrahimi                              float fmadd,
828*f5c631daSSadaf Ebrahimi                              float fmsub,
829*f5c631daSSadaf Ebrahimi                              float fnmadd,
830*f5c631daSSadaf Ebrahimi                              float fnmsub) {
831*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
832*f5c631daSSadaf Ebrahimi 
833*f5c631daSSadaf Ebrahimi   START();
834*f5c631daSSadaf Ebrahimi 
835*f5c631daSSadaf Ebrahimi   __ Fmov(s0, n);
836*f5c631daSSadaf Ebrahimi   __ Fmov(s1, m);
837*f5c631daSSadaf Ebrahimi   __ Fmov(s2, a);
838*f5c631daSSadaf Ebrahimi   __ Fmadd(s28, s0, s1, s2);
839*f5c631daSSadaf Ebrahimi   __ Fmsub(s29, s0, s1, s2);
840*f5c631daSSadaf Ebrahimi   __ Fnmadd(s30, s0, s1, s2);
841*f5c631daSSadaf Ebrahimi   __ Fnmsub(s31, s0, s1, s2);
842*f5c631daSSadaf Ebrahimi 
843*f5c631daSSadaf Ebrahimi   END();
844*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
845*f5c631daSSadaf Ebrahimi     RUN();
846*f5c631daSSadaf Ebrahimi 
847*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(fmadd, s28);
848*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(fmsub, s29);
849*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(fnmadd, s30);
850*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(fnmsub, s31);
851*f5c631daSSadaf Ebrahimi   }
852*f5c631daSSadaf Ebrahimi }
853*f5c631daSSadaf Ebrahimi 
854*f5c631daSSadaf Ebrahimi 
TEST(fmadd_fmsub_float)855*f5c631daSSadaf Ebrahimi TEST(fmadd_fmsub_float) {
856*f5c631daSSadaf Ebrahimi   // It's hard to check the result of fused operations because the only way to
857*f5c631daSSadaf Ebrahimi   // calculate the result is using fma, which is what the simulator uses anyway.
858*f5c631daSSadaf Ebrahimi 
859*f5c631daSSadaf Ebrahimi   // Basic operation.
860*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(1.0f, 2.0f, 3.0f, 5.0f, 1.0f, -5.0f, -1.0f);
861*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-1.0f, 2.0f, 3.0f, 1.0f, 5.0f, -1.0f, -5.0f);
862*f5c631daSSadaf Ebrahimi 
863*f5c631daSSadaf Ebrahimi   // Check the sign of exact zeroes.
864*f5c631daSSadaf Ebrahimi   //               n      m      a      fmadd  fmsub  fnmadd fnmsub
865*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0f, +0.0f, -0.0f, -0.0f, +0.0f, +0.0f, +0.0f);
866*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0f, +0.0f, -0.0f, +0.0f, -0.0f, +0.0f, +0.0f);
867*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0f, +0.0f, +0.0f, +0.0f, +0.0f, -0.0f, +0.0f);
868*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0f, +0.0f, +0.0f, +0.0f, +0.0f, +0.0f, -0.0f);
869*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0f, -0.0f, -0.0f, -0.0f, +0.0f, +0.0f, +0.0f);
870*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0f, -0.0f, -0.0f, +0.0f, -0.0f, +0.0f, +0.0f);
871*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(-0.0f, -0.0f, +0.0f, +0.0f, +0.0f, -0.0f, +0.0f);
872*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(+0.0f, -0.0f, +0.0f, +0.0f, +0.0f, +0.0f, -0.0f);
873*f5c631daSSadaf Ebrahimi 
874*f5c631daSSadaf Ebrahimi   // Check NaN generation.
875*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP32PositiveInfinity,
876*f5c631daSSadaf Ebrahimi                    0.0f,
877*f5c631daSSadaf Ebrahimi                    42.0f,
878*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
879*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
880*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
881*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);
882*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0.0f,
883*f5c631daSSadaf Ebrahimi                    kFP32PositiveInfinity,
884*f5c631daSSadaf Ebrahimi                    42.0f,
885*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
886*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
887*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
888*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);
889*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP32PositiveInfinity,
890*f5c631daSSadaf Ebrahimi                    1.0f,
891*f5c631daSSadaf Ebrahimi                    kFP32PositiveInfinity,
892*f5c631daSSadaf Ebrahimi                    kFP32PositiveInfinity,  //  inf + ( inf * 1) = inf
893*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,        //  inf + (-inf * 1) = NaN
894*f5c631daSSadaf Ebrahimi                    kFP32NegativeInfinity,  // -inf + (-inf * 1) = -inf
895*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);       // -inf + ( inf * 1) = NaN
896*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP32NegativeInfinity,
897*f5c631daSSadaf Ebrahimi                    1.0f,
898*f5c631daSSadaf Ebrahimi                    kFP32PositiveInfinity,
899*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,         //  inf + (-inf * 1) = NaN
900*f5c631daSSadaf Ebrahimi                    kFP32PositiveInfinity,   //  inf + ( inf * 1) = inf
901*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,         // -inf + ( inf * 1) = NaN
902*f5c631daSSadaf Ebrahimi                    kFP32NegativeInfinity);  // -inf + (-inf * 1) = -inf
903*f5c631daSSadaf Ebrahimi }
904*f5c631daSSadaf Ebrahimi 
905*f5c631daSSadaf Ebrahimi 
TEST(fmadd_fmsub_double_nans)906*f5c631daSSadaf Ebrahimi TEST(fmadd_fmsub_double_nans) {
907*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
908*f5c631daSSadaf Ebrahimi   double sig1 = RawbitsToDouble(0x7ff5555511111111);
909*f5c631daSSadaf Ebrahimi   double sig2 = RawbitsToDouble(0x7ff5555522222222);
910*f5c631daSSadaf Ebrahimi   double siga = RawbitsToDouble(0x7ff55555aaaaaaaa);
911*f5c631daSSadaf Ebrahimi   double qui1 = RawbitsToDouble(0x7ffaaaaa11111111);
912*f5c631daSSadaf Ebrahimi   double qui2 = RawbitsToDouble(0x7ffaaaaa22222222);
913*f5c631daSSadaf Ebrahimi   double quia = RawbitsToDouble(0x7ffaaaaaaaaaaaaa);
914*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sig1));
915*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sig2));
916*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(siga));
917*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui1));
918*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui2));
919*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(quia));
920*f5c631daSSadaf Ebrahimi 
921*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
922*f5c631daSSadaf Ebrahimi   double sig1_proc = RawbitsToDouble(0x7ffd555511111111);
923*f5c631daSSadaf Ebrahimi   double sig2_proc = RawbitsToDouble(0x7ffd555522222222);
924*f5c631daSSadaf Ebrahimi   double siga_proc = RawbitsToDouble(0x7ffd5555aaaaaaaa);
925*f5c631daSSadaf Ebrahimi   double qui1_proc = qui1;
926*f5c631daSSadaf Ebrahimi   double qui2_proc = qui2;
927*f5c631daSSadaf Ebrahimi   double quia_proc = quia;
928*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sig1_proc));
929*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sig2_proc));
930*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(siga_proc));
931*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui1_proc));
932*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui2_proc));
933*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(quia_proc));
934*f5c631daSSadaf Ebrahimi 
935*f5c631daSSadaf Ebrahimi   // Negated NaNs as it would be done on ARMv8 hardware.
936*f5c631daSSadaf Ebrahimi   double sig1_proc_neg = RawbitsToDouble(0xfffd555511111111);
937*f5c631daSSadaf Ebrahimi   double siga_proc_neg = RawbitsToDouble(0xfffd5555aaaaaaaa);
938*f5c631daSSadaf Ebrahimi   double qui1_proc_neg = RawbitsToDouble(0xfffaaaaa11111111);
939*f5c631daSSadaf Ebrahimi   double quia_proc_neg = RawbitsToDouble(0xfffaaaaaaaaaaaaa);
940*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sig1_proc_neg));
941*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(siga_proc_neg));
942*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui1_proc_neg));
943*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(quia_proc_neg));
944*f5c631daSSadaf Ebrahimi 
945*f5c631daSSadaf Ebrahimi   // Quiet NaNs are propagated.
946*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
947*f5c631daSSadaf Ebrahimi                    0,
948*f5c631daSSadaf Ebrahimi                    0,
949*f5c631daSSadaf Ebrahimi                    qui1_proc,
950*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
951*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
952*f5c631daSSadaf Ebrahimi                    qui1_proc);
953*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0, qui2, 0, qui2_proc, qui2_proc, qui2_proc, qui2_proc);
954*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
955*f5c631daSSadaf Ebrahimi                    0,
956*f5c631daSSadaf Ebrahimi                    quia,
957*f5c631daSSadaf Ebrahimi                    quia_proc,
958*f5c631daSSadaf Ebrahimi                    quia_proc,
959*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
960*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
961*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
962*f5c631daSSadaf Ebrahimi                    qui2,
963*f5c631daSSadaf Ebrahimi                    0,
964*f5c631daSSadaf Ebrahimi                    qui1_proc,
965*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
966*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
967*f5c631daSSadaf Ebrahimi                    qui1_proc);
968*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
969*f5c631daSSadaf Ebrahimi                    qui2,
970*f5c631daSSadaf Ebrahimi                    quia,
971*f5c631daSSadaf Ebrahimi                    quia_proc,
972*f5c631daSSadaf Ebrahimi                    quia_proc,
973*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
974*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
975*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
976*f5c631daSSadaf Ebrahimi                    0,
977*f5c631daSSadaf Ebrahimi                    quia,
978*f5c631daSSadaf Ebrahimi                    quia_proc,
979*f5c631daSSadaf Ebrahimi                    quia_proc,
980*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
981*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
982*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
983*f5c631daSSadaf Ebrahimi                    qui2,
984*f5c631daSSadaf Ebrahimi                    quia,
985*f5c631daSSadaf Ebrahimi                    quia_proc,
986*f5c631daSSadaf Ebrahimi                    quia_proc,
987*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
988*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
989*f5c631daSSadaf Ebrahimi 
990*f5c631daSSadaf Ebrahimi   // Signalling NaNs are propagated, and made quiet.
991*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
992*f5c631daSSadaf Ebrahimi                    0,
993*f5c631daSSadaf Ebrahimi                    0,
994*f5c631daSSadaf Ebrahimi                    sig1_proc,
995*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
996*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
997*f5c631daSSadaf Ebrahimi                    sig1_proc);
998*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0, sig2, 0, sig2_proc, sig2_proc, sig2_proc, sig2_proc);
999*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1000*f5c631daSSadaf Ebrahimi                    0,
1001*f5c631daSSadaf Ebrahimi                    siga,
1002*f5c631daSSadaf Ebrahimi                    siga_proc,
1003*f5c631daSSadaf Ebrahimi                    siga_proc,
1004*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1005*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1006*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1007*f5c631daSSadaf Ebrahimi                    sig2,
1008*f5c631daSSadaf Ebrahimi                    0,
1009*f5c631daSSadaf Ebrahimi                    sig1_proc,
1010*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1011*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1012*f5c631daSSadaf Ebrahimi                    sig1_proc);
1013*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1014*f5c631daSSadaf Ebrahimi                    sig2,
1015*f5c631daSSadaf Ebrahimi                    siga,
1016*f5c631daSSadaf Ebrahimi                    siga_proc,
1017*f5c631daSSadaf Ebrahimi                    siga_proc,
1018*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1019*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1020*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1021*f5c631daSSadaf Ebrahimi                    0,
1022*f5c631daSSadaf Ebrahimi                    siga,
1023*f5c631daSSadaf Ebrahimi                    siga_proc,
1024*f5c631daSSadaf Ebrahimi                    siga_proc,
1025*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1026*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1027*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1028*f5c631daSSadaf Ebrahimi                    sig2,
1029*f5c631daSSadaf Ebrahimi                    siga,
1030*f5c631daSSadaf Ebrahimi                    siga_proc,
1031*f5c631daSSadaf Ebrahimi                    siga_proc,
1032*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1033*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1034*f5c631daSSadaf Ebrahimi 
1035*f5c631daSSadaf Ebrahimi   // Signalling NaNs take precedence over quiet NaNs.
1036*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1037*f5c631daSSadaf Ebrahimi                    qui2,
1038*f5c631daSSadaf Ebrahimi                    quia,
1039*f5c631daSSadaf Ebrahimi                    sig1_proc,
1040*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1041*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1042*f5c631daSSadaf Ebrahimi                    sig1_proc);
1043*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1044*f5c631daSSadaf Ebrahimi                    sig2,
1045*f5c631daSSadaf Ebrahimi                    quia,
1046*f5c631daSSadaf Ebrahimi                    sig2_proc,
1047*f5c631daSSadaf Ebrahimi                    sig2_proc,
1048*f5c631daSSadaf Ebrahimi                    sig2_proc,
1049*f5c631daSSadaf Ebrahimi                    sig2_proc);
1050*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1051*f5c631daSSadaf Ebrahimi                    qui2,
1052*f5c631daSSadaf Ebrahimi                    siga,
1053*f5c631daSSadaf Ebrahimi                    siga_proc,
1054*f5c631daSSadaf Ebrahimi                    siga_proc,
1055*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1056*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1057*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1058*f5c631daSSadaf Ebrahimi                    sig2,
1059*f5c631daSSadaf Ebrahimi                    quia,
1060*f5c631daSSadaf Ebrahimi                    sig1_proc,
1061*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1062*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1063*f5c631daSSadaf Ebrahimi                    sig1_proc);
1064*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1065*f5c631daSSadaf Ebrahimi                    sig2,
1066*f5c631daSSadaf Ebrahimi                    siga,
1067*f5c631daSSadaf Ebrahimi                    siga_proc,
1068*f5c631daSSadaf Ebrahimi                    siga_proc,
1069*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1070*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1071*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1072*f5c631daSSadaf Ebrahimi                    qui2,
1073*f5c631daSSadaf Ebrahimi                    siga,
1074*f5c631daSSadaf Ebrahimi                    siga_proc,
1075*f5c631daSSadaf Ebrahimi                    siga_proc,
1076*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1077*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1078*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1079*f5c631daSSadaf Ebrahimi                    sig2,
1080*f5c631daSSadaf Ebrahimi                    siga,
1081*f5c631daSSadaf Ebrahimi                    siga_proc,
1082*f5c631daSSadaf Ebrahimi                    siga_proc,
1083*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1084*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1085*f5c631daSSadaf Ebrahimi 
1086*f5c631daSSadaf Ebrahimi   // A NaN generated by the intermediate op1 * op2 overrides a quiet NaN in a.
1087*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1088*f5c631daSSadaf Ebrahimi                    kFP64PositiveInfinity,
1089*f5c631daSSadaf Ebrahimi                    quia,
1090*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1091*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1092*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1093*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);
1094*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP64PositiveInfinity,
1095*f5c631daSSadaf Ebrahimi                    0,
1096*f5c631daSSadaf Ebrahimi                    quia,
1097*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1098*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1099*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1100*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);
1101*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1102*f5c631daSSadaf Ebrahimi                    kFP64NegativeInfinity,
1103*f5c631daSSadaf Ebrahimi                    quia,
1104*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1105*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1106*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1107*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);
1108*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP64NegativeInfinity,
1109*f5c631daSSadaf Ebrahimi                    0,
1110*f5c631daSSadaf Ebrahimi                    quia,
1111*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1112*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1113*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN,
1114*f5c631daSSadaf Ebrahimi                    kFP64DefaultNaN);
1115*f5c631daSSadaf Ebrahimi }
1116*f5c631daSSadaf Ebrahimi 
1117*f5c631daSSadaf Ebrahimi 
TEST(fmadd_fmsub_float_nans)1118*f5c631daSSadaf Ebrahimi TEST(fmadd_fmsub_float_nans) {
1119*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
1120*f5c631daSSadaf Ebrahimi   float sig1 = RawbitsToFloat(0x7f951111);
1121*f5c631daSSadaf Ebrahimi   float sig2 = RawbitsToFloat(0x7f952222);
1122*f5c631daSSadaf Ebrahimi   float siga = RawbitsToFloat(0x7f95aaaa);
1123*f5c631daSSadaf Ebrahimi   float qui1 = RawbitsToFloat(0x7fea1111);
1124*f5c631daSSadaf Ebrahimi   float qui2 = RawbitsToFloat(0x7fea2222);
1125*f5c631daSSadaf Ebrahimi   float quia = RawbitsToFloat(0x7feaaaaa);
1126*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sig1));
1127*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sig2));
1128*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(siga));
1129*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui1));
1130*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui2));
1131*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(quia));
1132*f5c631daSSadaf Ebrahimi 
1133*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
1134*f5c631daSSadaf Ebrahimi   float sig1_proc = RawbitsToFloat(0x7fd51111);
1135*f5c631daSSadaf Ebrahimi   float sig2_proc = RawbitsToFloat(0x7fd52222);
1136*f5c631daSSadaf Ebrahimi   float siga_proc = RawbitsToFloat(0x7fd5aaaa);
1137*f5c631daSSadaf Ebrahimi   float qui1_proc = qui1;
1138*f5c631daSSadaf Ebrahimi   float qui2_proc = qui2;
1139*f5c631daSSadaf Ebrahimi   float quia_proc = quia;
1140*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sig1_proc));
1141*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sig2_proc));
1142*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(siga_proc));
1143*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui1_proc));
1144*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui2_proc));
1145*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(quia_proc));
1146*f5c631daSSadaf Ebrahimi 
1147*f5c631daSSadaf Ebrahimi   // Negated NaNs as it would be done on ARMv8 hardware.
1148*f5c631daSSadaf Ebrahimi   float sig1_proc_neg = RawbitsToFloat(0xffd51111);
1149*f5c631daSSadaf Ebrahimi   float siga_proc_neg = RawbitsToFloat(0xffd5aaaa);
1150*f5c631daSSadaf Ebrahimi   float qui1_proc_neg = RawbitsToFloat(0xffea1111);
1151*f5c631daSSadaf Ebrahimi   float quia_proc_neg = RawbitsToFloat(0xffeaaaaa);
1152*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sig1_proc_neg));
1153*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(siga_proc_neg));
1154*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qui1_proc_neg));
1155*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(quia_proc_neg));
1156*f5c631daSSadaf Ebrahimi 
1157*f5c631daSSadaf Ebrahimi   // Quiet NaNs are propagated.
1158*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1159*f5c631daSSadaf Ebrahimi                    0,
1160*f5c631daSSadaf Ebrahimi                    0,
1161*f5c631daSSadaf Ebrahimi                    qui1_proc,
1162*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
1163*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
1164*f5c631daSSadaf Ebrahimi                    qui1_proc);
1165*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0, qui2, 0, qui2_proc, qui2_proc, qui2_proc, qui2_proc);
1166*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1167*f5c631daSSadaf Ebrahimi                    0,
1168*f5c631daSSadaf Ebrahimi                    quia,
1169*f5c631daSSadaf Ebrahimi                    quia_proc,
1170*f5c631daSSadaf Ebrahimi                    quia_proc,
1171*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
1172*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
1173*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1174*f5c631daSSadaf Ebrahimi                    qui2,
1175*f5c631daSSadaf Ebrahimi                    0,
1176*f5c631daSSadaf Ebrahimi                    qui1_proc,
1177*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
1178*f5c631daSSadaf Ebrahimi                    qui1_proc_neg,
1179*f5c631daSSadaf Ebrahimi                    qui1_proc);
1180*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1181*f5c631daSSadaf Ebrahimi                    qui2,
1182*f5c631daSSadaf Ebrahimi                    quia,
1183*f5c631daSSadaf Ebrahimi                    quia_proc,
1184*f5c631daSSadaf Ebrahimi                    quia_proc,
1185*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
1186*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
1187*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1188*f5c631daSSadaf Ebrahimi                    0,
1189*f5c631daSSadaf Ebrahimi                    quia,
1190*f5c631daSSadaf Ebrahimi                    quia_proc,
1191*f5c631daSSadaf Ebrahimi                    quia_proc,
1192*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
1193*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
1194*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1195*f5c631daSSadaf Ebrahimi                    qui2,
1196*f5c631daSSadaf Ebrahimi                    quia,
1197*f5c631daSSadaf Ebrahimi                    quia_proc,
1198*f5c631daSSadaf Ebrahimi                    quia_proc,
1199*f5c631daSSadaf Ebrahimi                    quia_proc_neg,
1200*f5c631daSSadaf Ebrahimi                    quia_proc_neg);
1201*f5c631daSSadaf Ebrahimi 
1202*f5c631daSSadaf Ebrahimi   // Signalling NaNs are propagated, and made quiet.
1203*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1204*f5c631daSSadaf Ebrahimi                    0,
1205*f5c631daSSadaf Ebrahimi                    0,
1206*f5c631daSSadaf Ebrahimi                    sig1_proc,
1207*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1208*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1209*f5c631daSSadaf Ebrahimi                    sig1_proc);
1210*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0, sig2, 0, sig2_proc, sig2_proc, sig2_proc, sig2_proc);
1211*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1212*f5c631daSSadaf Ebrahimi                    0,
1213*f5c631daSSadaf Ebrahimi                    siga,
1214*f5c631daSSadaf Ebrahimi                    siga_proc,
1215*f5c631daSSadaf Ebrahimi                    siga_proc,
1216*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1217*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1218*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1219*f5c631daSSadaf Ebrahimi                    sig2,
1220*f5c631daSSadaf Ebrahimi                    0,
1221*f5c631daSSadaf Ebrahimi                    sig1_proc,
1222*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1223*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1224*f5c631daSSadaf Ebrahimi                    sig1_proc);
1225*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1226*f5c631daSSadaf Ebrahimi                    sig2,
1227*f5c631daSSadaf Ebrahimi                    siga,
1228*f5c631daSSadaf Ebrahimi                    siga_proc,
1229*f5c631daSSadaf Ebrahimi                    siga_proc,
1230*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1231*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1232*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1233*f5c631daSSadaf Ebrahimi                    0,
1234*f5c631daSSadaf Ebrahimi                    siga,
1235*f5c631daSSadaf Ebrahimi                    siga_proc,
1236*f5c631daSSadaf Ebrahimi                    siga_proc,
1237*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1238*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1239*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1240*f5c631daSSadaf Ebrahimi                    sig2,
1241*f5c631daSSadaf Ebrahimi                    siga,
1242*f5c631daSSadaf Ebrahimi                    siga_proc,
1243*f5c631daSSadaf Ebrahimi                    siga_proc,
1244*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1245*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1246*f5c631daSSadaf Ebrahimi 
1247*f5c631daSSadaf Ebrahimi   // Signalling NaNs take precedence over quiet NaNs.
1248*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1249*f5c631daSSadaf Ebrahimi                    qui2,
1250*f5c631daSSadaf Ebrahimi                    quia,
1251*f5c631daSSadaf Ebrahimi                    sig1_proc,
1252*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1253*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1254*f5c631daSSadaf Ebrahimi                    sig1_proc);
1255*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1256*f5c631daSSadaf Ebrahimi                    sig2,
1257*f5c631daSSadaf Ebrahimi                    quia,
1258*f5c631daSSadaf Ebrahimi                    sig2_proc,
1259*f5c631daSSadaf Ebrahimi                    sig2_proc,
1260*f5c631daSSadaf Ebrahimi                    sig2_proc,
1261*f5c631daSSadaf Ebrahimi                    sig2_proc);
1262*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1263*f5c631daSSadaf Ebrahimi                    qui2,
1264*f5c631daSSadaf Ebrahimi                    siga,
1265*f5c631daSSadaf Ebrahimi                    siga_proc,
1266*f5c631daSSadaf Ebrahimi                    siga_proc,
1267*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1268*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1269*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1270*f5c631daSSadaf Ebrahimi                    sig2,
1271*f5c631daSSadaf Ebrahimi                    quia,
1272*f5c631daSSadaf Ebrahimi                    sig1_proc,
1273*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1274*f5c631daSSadaf Ebrahimi                    sig1_proc_neg,
1275*f5c631daSSadaf Ebrahimi                    sig1_proc);
1276*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(qui1,
1277*f5c631daSSadaf Ebrahimi                    sig2,
1278*f5c631daSSadaf Ebrahimi                    siga,
1279*f5c631daSSadaf Ebrahimi                    siga_proc,
1280*f5c631daSSadaf Ebrahimi                    siga_proc,
1281*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1282*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1283*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1284*f5c631daSSadaf Ebrahimi                    qui2,
1285*f5c631daSSadaf Ebrahimi                    siga,
1286*f5c631daSSadaf Ebrahimi                    siga_proc,
1287*f5c631daSSadaf Ebrahimi                    siga_proc,
1288*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1289*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1290*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(sig1,
1291*f5c631daSSadaf Ebrahimi                    sig2,
1292*f5c631daSSadaf Ebrahimi                    siga,
1293*f5c631daSSadaf Ebrahimi                    siga_proc,
1294*f5c631daSSadaf Ebrahimi                    siga_proc,
1295*f5c631daSSadaf Ebrahimi                    siga_proc_neg,
1296*f5c631daSSadaf Ebrahimi                    siga_proc_neg);
1297*f5c631daSSadaf Ebrahimi 
1298*f5c631daSSadaf Ebrahimi   // A NaN generated by the intermediate op1 * op2 overrides a quiet NaN in a.
1299*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1300*f5c631daSSadaf Ebrahimi                    kFP32PositiveInfinity,
1301*f5c631daSSadaf Ebrahimi                    quia,
1302*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1303*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1304*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1305*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);
1306*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP32PositiveInfinity,
1307*f5c631daSSadaf Ebrahimi                    0,
1308*f5c631daSSadaf Ebrahimi                    quia,
1309*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1310*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1311*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1312*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);
1313*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(0,
1314*f5c631daSSadaf Ebrahimi                    kFP32NegativeInfinity,
1315*f5c631daSSadaf Ebrahimi                    quia,
1316*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1317*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1318*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1319*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);
1320*f5c631daSSadaf Ebrahimi   FmaddFmsubHelper(kFP32NegativeInfinity,
1321*f5c631daSSadaf Ebrahimi                    0,
1322*f5c631daSSadaf Ebrahimi                    quia,
1323*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1324*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1325*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN,
1326*f5c631daSSadaf Ebrahimi                    kFP32DefaultNaN);
1327*f5c631daSSadaf Ebrahimi }
1328*f5c631daSSadaf Ebrahimi 
1329*f5c631daSSadaf Ebrahimi 
TEST(fdiv)1330*f5c631daSSadaf Ebrahimi TEST(fdiv) {
1331*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
1332*f5c631daSSadaf Ebrahimi 
1333*f5c631daSSadaf Ebrahimi   START();
1334*f5c631daSSadaf Ebrahimi   __ Fmov(s14, -0.0f);
1335*f5c631daSSadaf Ebrahimi   __ Fmov(s15, kFP32PositiveInfinity);
1336*f5c631daSSadaf Ebrahimi   __ Fmov(s16, kFP32NegativeInfinity);
1337*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 3.25f);
1338*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 2.0f);
1339*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 2.0f);
1340*f5c631daSSadaf Ebrahimi   __ Fmov(s20, -2.0f);
1341*f5c631daSSadaf Ebrahimi 
1342*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
1343*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
1344*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
1345*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0.0);
1346*f5c631daSSadaf Ebrahimi   __ Fmov(d30, -2.0);
1347*f5c631daSSadaf Ebrahimi   __ Fmov(d31, 2.25);
1348*f5c631daSSadaf Ebrahimi 
1349*f5c631daSSadaf Ebrahimi   __ Fdiv(s0, s17, s18);
1350*f5c631daSSadaf Ebrahimi   __ Fdiv(s1, s18, s19);
1351*f5c631daSSadaf Ebrahimi   __ Fdiv(s2, s14, s18);
1352*f5c631daSSadaf Ebrahimi   __ Fdiv(s3, s18, s15);
1353*f5c631daSSadaf Ebrahimi   __ Fdiv(s4, s18, s16);
1354*f5c631daSSadaf Ebrahimi   __ Fdiv(s5, s15, s16);
1355*f5c631daSSadaf Ebrahimi   __ Fdiv(s6, s14, s14);
1356*f5c631daSSadaf Ebrahimi 
1357*f5c631daSSadaf Ebrahimi   __ Fdiv(d7, d31, d30);
1358*f5c631daSSadaf Ebrahimi   __ Fdiv(d8, d29, d31);
1359*f5c631daSSadaf Ebrahimi   __ Fdiv(d9, d26, d31);
1360*f5c631daSSadaf Ebrahimi   __ Fdiv(d10, d31, d27);
1361*f5c631daSSadaf Ebrahimi   __ Fdiv(d11, d31, d28);
1362*f5c631daSSadaf Ebrahimi   __ Fdiv(d12, d28, d27);
1363*f5c631daSSadaf Ebrahimi   __ Fdiv(d13, d29, d29);
1364*f5c631daSSadaf Ebrahimi   END();
1365*f5c631daSSadaf Ebrahimi 
1366*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1367*f5c631daSSadaf Ebrahimi     RUN();
1368*f5c631daSSadaf Ebrahimi 
1369*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.625f, s0);
1370*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0f, s1);
1371*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0f, s2);
1372*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0f, s3);
1373*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0f, s4);
1374*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5);
1375*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6);
1376*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.125, d7);
1377*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d8);
1378*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d9);
1379*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d10);
1380*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d11);
1381*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12);
1382*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13);
1383*f5c631daSSadaf Ebrahimi   }
1384*f5c631daSSadaf Ebrahimi }
1385*f5c631daSSadaf Ebrahimi 
1386*f5c631daSSadaf Ebrahimi 
TEST(fdiv_h)1387*f5c631daSSadaf Ebrahimi TEST(fdiv_h) {
1388*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
1389*f5c631daSSadaf Ebrahimi 
1390*f5c631daSSadaf Ebrahimi   START();
1391*f5c631daSSadaf Ebrahimi   __ Fmov(h14, -0.0f);
1392*f5c631daSSadaf Ebrahimi   __ Fmov(h15, kFP16PositiveInfinity);
1393*f5c631daSSadaf Ebrahimi   __ Fmov(h16, kFP16NegativeInfinity);
1394*f5c631daSSadaf Ebrahimi   __ Fmov(h17, 3.25f);
1395*f5c631daSSadaf Ebrahimi   __ Fmov(h18, 2.0f);
1396*f5c631daSSadaf Ebrahimi   __ Fmov(h19, 2.0f);
1397*f5c631daSSadaf Ebrahimi   __ Fmov(h20, -2.0f);
1398*f5c631daSSadaf Ebrahimi 
1399*f5c631daSSadaf Ebrahimi   __ Fdiv(h0, h17, h18);
1400*f5c631daSSadaf Ebrahimi   __ Fdiv(h1, h18, h19);
1401*f5c631daSSadaf Ebrahimi   __ Fdiv(h2, h14, h18);
1402*f5c631daSSadaf Ebrahimi   __ Fdiv(h3, h18, h15);
1403*f5c631daSSadaf Ebrahimi   __ Fdiv(h4, h18, h16);
1404*f5c631daSSadaf Ebrahimi   __ Fdiv(h5, h15, h16);
1405*f5c631daSSadaf Ebrahimi   __ Fdiv(h6, h14, h14);
1406*f5c631daSSadaf Ebrahimi   END();
1407*f5c631daSSadaf Ebrahimi 
1408*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1409*f5c631daSSadaf Ebrahimi     RUN();
1410*f5c631daSSadaf Ebrahimi 
1411*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.625f), h0);
1412*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0f), h1);
1413*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-0.0f), h2);
1414*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(0.0f), h3);
1415*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(-0.0f), h4);
1416*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h5);
1417*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(kFP16DefaultNaN, h6);
1418*f5c631daSSadaf Ebrahimi   }
1419*f5c631daSSadaf Ebrahimi }
1420*f5c631daSSadaf Ebrahimi 
MinMaxHelper(float n,float m,bool min,float quiet_nan_substitute=0.0)1421*f5c631daSSadaf Ebrahimi static float MinMaxHelper(float n,
1422*f5c631daSSadaf Ebrahimi                           float m,
1423*f5c631daSSadaf Ebrahimi                           bool min,
1424*f5c631daSSadaf Ebrahimi                           float quiet_nan_substitute = 0.0) {
1425*f5c631daSSadaf Ebrahimi   const uint64_t kFP32QuietNaNMask = 0x00400000;
1426*f5c631daSSadaf Ebrahimi   uint32_t raw_n = FloatToRawbits(n);
1427*f5c631daSSadaf Ebrahimi   uint32_t raw_m = FloatToRawbits(m);
1428*f5c631daSSadaf Ebrahimi 
1429*f5c631daSSadaf Ebrahimi   if (IsNaN(n) && ((raw_n & kFP32QuietNaNMask) == 0)) {
1430*f5c631daSSadaf Ebrahimi     // n is signalling NaN.
1431*f5c631daSSadaf Ebrahimi     return RawbitsToFloat(raw_n | kFP32QuietNaNMask);
1432*f5c631daSSadaf Ebrahimi   } else if (IsNaN(m) && ((raw_m & kFP32QuietNaNMask) == 0)) {
1433*f5c631daSSadaf Ebrahimi     // m is signalling NaN.
1434*f5c631daSSadaf Ebrahimi     return RawbitsToFloat(raw_m | kFP32QuietNaNMask);
1435*f5c631daSSadaf Ebrahimi   } else if (quiet_nan_substitute == 0.0) {
1436*f5c631daSSadaf Ebrahimi     if (IsNaN(n)) {
1437*f5c631daSSadaf Ebrahimi       // n is quiet NaN.
1438*f5c631daSSadaf Ebrahimi       return n;
1439*f5c631daSSadaf Ebrahimi     } else if (IsNaN(m)) {
1440*f5c631daSSadaf Ebrahimi       // m is quiet NaN.
1441*f5c631daSSadaf Ebrahimi       return m;
1442*f5c631daSSadaf Ebrahimi     }
1443*f5c631daSSadaf Ebrahimi   } else {
1444*f5c631daSSadaf Ebrahimi     // Substitute n or m if one is quiet, but not both.
1445*f5c631daSSadaf Ebrahimi     if (IsNaN(n) && !IsNaN(m)) {
1446*f5c631daSSadaf Ebrahimi       // n is quiet NaN: replace with substitute.
1447*f5c631daSSadaf Ebrahimi       n = quiet_nan_substitute;
1448*f5c631daSSadaf Ebrahimi     } else if (!IsNaN(n) && IsNaN(m)) {
1449*f5c631daSSadaf Ebrahimi       // m is quiet NaN: replace with substitute.
1450*f5c631daSSadaf Ebrahimi       m = quiet_nan_substitute;
1451*f5c631daSSadaf Ebrahimi     }
1452*f5c631daSSadaf Ebrahimi   }
1453*f5c631daSSadaf Ebrahimi 
1454*f5c631daSSadaf Ebrahimi   if ((n == 0.0) && (m == 0.0) && (copysign(1.0, n) != copysign(1.0, m))) {
1455*f5c631daSSadaf Ebrahimi     return min ? -0.0 : 0.0;
1456*f5c631daSSadaf Ebrahimi   }
1457*f5c631daSSadaf Ebrahimi 
1458*f5c631daSSadaf Ebrahimi   return min ? fminf(n, m) : fmaxf(n, m);
1459*f5c631daSSadaf Ebrahimi }
1460*f5c631daSSadaf Ebrahimi 
1461*f5c631daSSadaf Ebrahimi 
MinMaxHelper(double n,double m,bool min,double quiet_nan_substitute=0.0)1462*f5c631daSSadaf Ebrahimi static double MinMaxHelper(double n,
1463*f5c631daSSadaf Ebrahimi                            double m,
1464*f5c631daSSadaf Ebrahimi                            bool min,
1465*f5c631daSSadaf Ebrahimi                            double quiet_nan_substitute = 0.0) {
1466*f5c631daSSadaf Ebrahimi   const uint64_t kFP64QuietNaNMask = 0x0008000000000000;
1467*f5c631daSSadaf Ebrahimi   uint64_t raw_n = DoubleToRawbits(n);
1468*f5c631daSSadaf Ebrahimi   uint64_t raw_m = DoubleToRawbits(m);
1469*f5c631daSSadaf Ebrahimi 
1470*f5c631daSSadaf Ebrahimi   if (IsNaN(n) && ((raw_n & kFP64QuietNaNMask) == 0)) {
1471*f5c631daSSadaf Ebrahimi     // n is signalling NaN.
1472*f5c631daSSadaf Ebrahimi     return RawbitsToDouble(raw_n | kFP64QuietNaNMask);
1473*f5c631daSSadaf Ebrahimi   } else if (IsNaN(m) && ((raw_m & kFP64QuietNaNMask) == 0)) {
1474*f5c631daSSadaf Ebrahimi     // m is signalling NaN.
1475*f5c631daSSadaf Ebrahimi     return RawbitsToDouble(raw_m | kFP64QuietNaNMask);
1476*f5c631daSSadaf Ebrahimi   } else if (quiet_nan_substitute == 0.0) {
1477*f5c631daSSadaf Ebrahimi     if (IsNaN(n)) {
1478*f5c631daSSadaf Ebrahimi       // n is quiet NaN.
1479*f5c631daSSadaf Ebrahimi       return n;
1480*f5c631daSSadaf Ebrahimi     } else if (IsNaN(m)) {
1481*f5c631daSSadaf Ebrahimi       // m is quiet NaN.
1482*f5c631daSSadaf Ebrahimi       return m;
1483*f5c631daSSadaf Ebrahimi     }
1484*f5c631daSSadaf Ebrahimi   } else {
1485*f5c631daSSadaf Ebrahimi     // Substitute n or m if one is quiet, but not both.
1486*f5c631daSSadaf Ebrahimi     if (IsNaN(n) && !IsNaN(m)) {
1487*f5c631daSSadaf Ebrahimi       // n is quiet NaN: replace with substitute.
1488*f5c631daSSadaf Ebrahimi       n = quiet_nan_substitute;
1489*f5c631daSSadaf Ebrahimi     } else if (!IsNaN(n) && IsNaN(m)) {
1490*f5c631daSSadaf Ebrahimi       // m is quiet NaN: replace with substitute.
1491*f5c631daSSadaf Ebrahimi       m = quiet_nan_substitute;
1492*f5c631daSSadaf Ebrahimi     }
1493*f5c631daSSadaf Ebrahimi   }
1494*f5c631daSSadaf Ebrahimi 
1495*f5c631daSSadaf Ebrahimi   if ((n == 0.0) && (m == 0.0) && (copysign(1.0, n) != copysign(1.0, m))) {
1496*f5c631daSSadaf Ebrahimi     return min ? -0.0 : 0.0;
1497*f5c631daSSadaf Ebrahimi   }
1498*f5c631daSSadaf Ebrahimi 
1499*f5c631daSSadaf Ebrahimi   return min ? fmin(n, m) : fmax(n, m);
1500*f5c631daSSadaf Ebrahimi }
1501*f5c631daSSadaf Ebrahimi 
1502*f5c631daSSadaf Ebrahimi 
FminFmaxDoubleHelper(double n,double m,double min,double max,double minnm,double maxnm)1503*f5c631daSSadaf Ebrahimi static void FminFmaxDoubleHelper(
1504*f5c631daSSadaf Ebrahimi     double n, double m, double min, double max, double minnm, double maxnm) {
1505*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
1506*f5c631daSSadaf Ebrahimi 
1507*f5c631daSSadaf Ebrahimi   START();
1508*f5c631daSSadaf Ebrahimi   __ Fmov(d0, n);
1509*f5c631daSSadaf Ebrahimi   __ Fmov(d1, m);
1510*f5c631daSSadaf Ebrahimi   __ Fmin(d28, d0, d1);
1511*f5c631daSSadaf Ebrahimi   __ Fmax(d29, d0, d1);
1512*f5c631daSSadaf Ebrahimi   __ Fminnm(d30, d0, d1);
1513*f5c631daSSadaf Ebrahimi   __ Fmaxnm(d31, d0, d1);
1514*f5c631daSSadaf Ebrahimi   END();
1515*f5c631daSSadaf Ebrahimi 
1516*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1517*f5c631daSSadaf Ebrahimi     RUN();
1518*f5c631daSSadaf Ebrahimi 
1519*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(min, d28);
1520*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(max, d29);
1521*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(minnm, d30);
1522*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(maxnm, d31);
1523*f5c631daSSadaf Ebrahimi   }
1524*f5c631daSSadaf Ebrahimi }
1525*f5c631daSSadaf Ebrahimi 
1526*f5c631daSSadaf Ebrahimi 
TEST(fmax_fmin_d)1527*f5c631daSSadaf Ebrahimi TEST(fmax_fmin_d) {
1528*f5c631daSSadaf Ebrahimi   // Use non-standard NaNs to check that the payload bits are preserved.
1529*f5c631daSSadaf Ebrahimi   double snan = RawbitsToDouble(0x7ff5555512345678);
1530*f5c631daSSadaf Ebrahimi   double qnan = RawbitsToDouble(0x7ffaaaaa87654321);
1531*f5c631daSSadaf Ebrahimi 
1532*f5c631daSSadaf Ebrahimi   double snan_processed = RawbitsToDouble(0x7ffd555512345678);
1533*f5c631daSSadaf Ebrahimi   double qnan_processed = qnan;
1534*f5c631daSSadaf Ebrahimi 
1535*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(snan));
1536*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qnan));
1537*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(snan_processed));
1538*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qnan_processed));
1539*f5c631daSSadaf Ebrahimi 
1540*f5c631daSSadaf Ebrahimi   // Bootstrap tests.
1541*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(0, 0, 0, 0, 0, 0);
1542*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(0, 1, 0, 1, 0, 1);
1543*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(kFP64PositiveInfinity,
1544*f5c631daSSadaf Ebrahimi                        kFP64NegativeInfinity,
1545*f5c631daSSadaf Ebrahimi                        kFP64NegativeInfinity,
1546*f5c631daSSadaf Ebrahimi                        kFP64PositiveInfinity,
1547*f5c631daSSadaf Ebrahimi                        kFP64NegativeInfinity,
1548*f5c631daSSadaf Ebrahimi                        kFP64PositiveInfinity);
1549*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(snan,
1550*f5c631daSSadaf Ebrahimi                        0,
1551*f5c631daSSadaf Ebrahimi                        snan_processed,
1552*f5c631daSSadaf Ebrahimi                        snan_processed,
1553*f5c631daSSadaf Ebrahimi                        snan_processed,
1554*f5c631daSSadaf Ebrahimi                        snan_processed);
1555*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(0,
1556*f5c631daSSadaf Ebrahimi                        snan,
1557*f5c631daSSadaf Ebrahimi                        snan_processed,
1558*f5c631daSSadaf Ebrahimi                        snan_processed,
1559*f5c631daSSadaf Ebrahimi                        snan_processed,
1560*f5c631daSSadaf Ebrahimi                        snan_processed);
1561*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(qnan, 0, qnan_processed, qnan_processed, 0, 0);
1562*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(0, qnan, qnan_processed, qnan_processed, 0, 0);
1563*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(qnan,
1564*f5c631daSSadaf Ebrahimi                        snan,
1565*f5c631daSSadaf Ebrahimi                        snan_processed,
1566*f5c631daSSadaf Ebrahimi                        snan_processed,
1567*f5c631daSSadaf Ebrahimi                        snan_processed,
1568*f5c631daSSadaf Ebrahimi                        snan_processed);
1569*f5c631daSSadaf Ebrahimi   FminFmaxDoubleHelper(snan,
1570*f5c631daSSadaf Ebrahimi                        qnan,
1571*f5c631daSSadaf Ebrahimi                        snan_processed,
1572*f5c631daSSadaf Ebrahimi                        snan_processed,
1573*f5c631daSSadaf Ebrahimi                        snan_processed,
1574*f5c631daSSadaf Ebrahimi                        snan_processed);
1575*f5c631daSSadaf Ebrahimi 
1576*f5c631daSSadaf Ebrahimi   // Iterate over all combinations of inputs.
1577*f5c631daSSadaf Ebrahimi   double inputs[] = {DBL_MAX,
1578*f5c631daSSadaf Ebrahimi                      DBL_MIN,
1579*f5c631daSSadaf Ebrahimi                      1.0,
1580*f5c631daSSadaf Ebrahimi                      0.0,
1581*f5c631daSSadaf Ebrahimi                      -DBL_MAX,
1582*f5c631daSSadaf Ebrahimi                      -DBL_MIN,
1583*f5c631daSSadaf Ebrahimi                      -1.0,
1584*f5c631daSSadaf Ebrahimi                      -0.0,
1585*f5c631daSSadaf Ebrahimi                      kFP64PositiveInfinity,
1586*f5c631daSSadaf Ebrahimi                      kFP64NegativeInfinity,
1587*f5c631daSSadaf Ebrahimi                      kFP64QuietNaN,
1588*f5c631daSSadaf Ebrahimi                      kFP64SignallingNaN};
1589*f5c631daSSadaf Ebrahimi 
1590*f5c631daSSadaf Ebrahimi   const int count = sizeof(inputs) / sizeof(inputs[0]);
1591*f5c631daSSadaf Ebrahimi 
1592*f5c631daSSadaf Ebrahimi   for (int in = 0; in < count; in++) {
1593*f5c631daSSadaf Ebrahimi     double n = inputs[in];
1594*f5c631daSSadaf Ebrahimi     for (int im = 0; im < count; im++) {
1595*f5c631daSSadaf Ebrahimi       double m = inputs[im];
1596*f5c631daSSadaf Ebrahimi       FminFmaxDoubleHelper(n,
1597*f5c631daSSadaf Ebrahimi                            m,
1598*f5c631daSSadaf Ebrahimi                            MinMaxHelper(n, m, true),
1599*f5c631daSSadaf Ebrahimi                            MinMaxHelper(n, m, false),
1600*f5c631daSSadaf Ebrahimi                            MinMaxHelper(n, m, true, kFP64PositiveInfinity),
1601*f5c631daSSadaf Ebrahimi                            MinMaxHelper(n, m, false, kFP64NegativeInfinity));
1602*f5c631daSSadaf Ebrahimi     }
1603*f5c631daSSadaf Ebrahimi   }
1604*f5c631daSSadaf Ebrahimi }
1605*f5c631daSSadaf Ebrahimi 
1606*f5c631daSSadaf Ebrahimi 
FminFmaxFloatHelper(float n,float m,float min,float max,float minnm,float maxnm)1607*f5c631daSSadaf Ebrahimi static void FminFmaxFloatHelper(
1608*f5c631daSSadaf Ebrahimi     float n, float m, float min, float max, float minnm, float maxnm) {
1609*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
1610*f5c631daSSadaf Ebrahimi 
1611*f5c631daSSadaf Ebrahimi   START();
1612*f5c631daSSadaf Ebrahimi   __ Fmov(s0, n);
1613*f5c631daSSadaf Ebrahimi   __ Fmov(s1, m);
1614*f5c631daSSadaf Ebrahimi   __ Fmin(s28, s0, s1);
1615*f5c631daSSadaf Ebrahimi   __ Fmax(s29, s0, s1);
1616*f5c631daSSadaf Ebrahimi   __ Fminnm(s30, s0, s1);
1617*f5c631daSSadaf Ebrahimi   __ Fmaxnm(s31, s0, s1);
1618*f5c631daSSadaf Ebrahimi   END();
1619*f5c631daSSadaf Ebrahimi 
1620*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1621*f5c631daSSadaf Ebrahimi     RUN();
1622*f5c631daSSadaf Ebrahimi 
1623*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(min, s28);
1624*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(max, s29);
1625*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(minnm, s30);
1626*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(maxnm, s31);
1627*f5c631daSSadaf Ebrahimi   }
1628*f5c631daSSadaf Ebrahimi }
1629*f5c631daSSadaf Ebrahimi 
1630*f5c631daSSadaf Ebrahimi 
TEST(fmax_fmin_s)1631*f5c631daSSadaf Ebrahimi TEST(fmax_fmin_s) {
1632*f5c631daSSadaf Ebrahimi   // Use non-standard NaNs to check that the payload bits are preserved.
1633*f5c631daSSadaf Ebrahimi   float snan = RawbitsToFloat(0x7f951234);
1634*f5c631daSSadaf Ebrahimi   float qnan = RawbitsToFloat(0x7fea8765);
1635*f5c631daSSadaf Ebrahimi 
1636*f5c631daSSadaf Ebrahimi   float snan_processed = RawbitsToFloat(0x7fd51234);
1637*f5c631daSSadaf Ebrahimi   float qnan_processed = qnan;
1638*f5c631daSSadaf Ebrahimi 
1639*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(snan));
1640*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qnan));
1641*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(snan_processed));
1642*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qnan_processed));
1643*f5c631daSSadaf Ebrahimi 
1644*f5c631daSSadaf Ebrahimi   // Bootstrap tests.
1645*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(0, 0, 0, 0, 0, 0);
1646*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(0, 1, 0, 1, 0, 1);
1647*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(kFP32PositiveInfinity,
1648*f5c631daSSadaf Ebrahimi                       kFP32NegativeInfinity,
1649*f5c631daSSadaf Ebrahimi                       kFP32NegativeInfinity,
1650*f5c631daSSadaf Ebrahimi                       kFP32PositiveInfinity,
1651*f5c631daSSadaf Ebrahimi                       kFP32NegativeInfinity,
1652*f5c631daSSadaf Ebrahimi                       kFP32PositiveInfinity);
1653*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(snan,
1654*f5c631daSSadaf Ebrahimi                       0,
1655*f5c631daSSadaf Ebrahimi                       snan_processed,
1656*f5c631daSSadaf Ebrahimi                       snan_processed,
1657*f5c631daSSadaf Ebrahimi                       snan_processed,
1658*f5c631daSSadaf Ebrahimi                       snan_processed);
1659*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(0,
1660*f5c631daSSadaf Ebrahimi                       snan,
1661*f5c631daSSadaf Ebrahimi                       snan_processed,
1662*f5c631daSSadaf Ebrahimi                       snan_processed,
1663*f5c631daSSadaf Ebrahimi                       snan_processed,
1664*f5c631daSSadaf Ebrahimi                       snan_processed);
1665*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(qnan, 0, qnan_processed, qnan_processed, 0, 0);
1666*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(0, qnan, qnan_processed, qnan_processed, 0, 0);
1667*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(qnan,
1668*f5c631daSSadaf Ebrahimi                       snan,
1669*f5c631daSSadaf Ebrahimi                       snan_processed,
1670*f5c631daSSadaf Ebrahimi                       snan_processed,
1671*f5c631daSSadaf Ebrahimi                       snan_processed,
1672*f5c631daSSadaf Ebrahimi                       snan_processed);
1673*f5c631daSSadaf Ebrahimi   FminFmaxFloatHelper(snan,
1674*f5c631daSSadaf Ebrahimi                       qnan,
1675*f5c631daSSadaf Ebrahimi                       snan_processed,
1676*f5c631daSSadaf Ebrahimi                       snan_processed,
1677*f5c631daSSadaf Ebrahimi                       snan_processed,
1678*f5c631daSSadaf Ebrahimi                       snan_processed);
1679*f5c631daSSadaf Ebrahimi 
1680*f5c631daSSadaf Ebrahimi   // Iterate over all combinations of inputs.
1681*f5c631daSSadaf Ebrahimi   float inputs[] = {FLT_MAX,
1682*f5c631daSSadaf Ebrahimi                     FLT_MIN,
1683*f5c631daSSadaf Ebrahimi                     1.0,
1684*f5c631daSSadaf Ebrahimi                     0.0,
1685*f5c631daSSadaf Ebrahimi                     -FLT_MAX,
1686*f5c631daSSadaf Ebrahimi                     -FLT_MIN,
1687*f5c631daSSadaf Ebrahimi                     -1.0,
1688*f5c631daSSadaf Ebrahimi                     -0.0,
1689*f5c631daSSadaf Ebrahimi                     kFP32PositiveInfinity,
1690*f5c631daSSadaf Ebrahimi                     kFP32NegativeInfinity,
1691*f5c631daSSadaf Ebrahimi                     kFP32QuietNaN,
1692*f5c631daSSadaf Ebrahimi                     kFP32SignallingNaN};
1693*f5c631daSSadaf Ebrahimi 
1694*f5c631daSSadaf Ebrahimi   const int count = sizeof(inputs) / sizeof(inputs[0]);
1695*f5c631daSSadaf Ebrahimi 
1696*f5c631daSSadaf Ebrahimi   for (int in = 0; in < count; in++) {
1697*f5c631daSSadaf Ebrahimi     float n = inputs[in];
1698*f5c631daSSadaf Ebrahimi     for (int im = 0; im < count; im++) {
1699*f5c631daSSadaf Ebrahimi       float m = inputs[im];
1700*f5c631daSSadaf Ebrahimi       FminFmaxFloatHelper(n,
1701*f5c631daSSadaf Ebrahimi                           m,
1702*f5c631daSSadaf Ebrahimi                           MinMaxHelper(n, m, true),
1703*f5c631daSSadaf Ebrahimi                           MinMaxHelper(n, m, false),
1704*f5c631daSSadaf Ebrahimi                           MinMaxHelper(n, m, true, kFP32PositiveInfinity),
1705*f5c631daSSadaf Ebrahimi                           MinMaxHelper(n, m, false, kFP32NegativeInfinity));
1706*f5c631daSSadaf Ebrahimi     }
1707*f5c631daSSadaf Ebrahimi   }
1708*f5c631daSSadaf Ebrahimi }
1709*f5c631daSSadaf Ebrahimi 
TEST(fccmp)1710*f5c631daSSadaf Ebrahimi TEST(fccmp) {
1711*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
1712*f5c631daSSadaf Ebrahimi 
1713*f5c631daSSadaf Ebrahimi   START();
1714*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 0.0);
1715*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 0.5);
1716*f5c631daSSadaf Ebrahimi   __ Fmov(d18, -0.5);
1717*f5c631daSSadaf Ebrahimi   __ Fmov(d19, -1.0);
1718*f5c631daSSadaf Ebrahimi   __ Mov(x20, 0);
1719*f5c631daSSadaf Ebrahimi   __ Mov(x21, 0x7ff0000000000001);  // Double precision NaN.
1720*f5c631daSSadaf Ebrahimi   __ Fmov(d21, x21);
1721*f5c631daSSadaf Ebrahimi   __ Mov(w22, 0x7f800001);  // Single precision NaN.
1722*f5c631daSSadaf Ebrahimi   __ Fmov(s22, w22);
1723*f5c631daSSadaf Ebrahimi 
1724*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1725*f5c631daSSadaf Ebrahimi   __ Fccmp(s16, s16, NoFlag, eq);
1726*f5c631daSSadaf Ebrahimi   __ Mrs(x0, NZCV);
1727*f5c631daSSadaf Ebrahimi 
1728*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1729*f5c631daSSadaf Ebrahimi   __ Fccmp(s16, s16, VFlag, ne);
1730*f5c631daSSadaf Ebrahimi   __ Mrs(x1, NZCV);
1731*f5c631daSSadaf Ebrahimi 
1732*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1733*f5c631daSSadaf Ebrahimi   __ Fccmp(s16, s17, CFlag, ge);
1734*f5c631daSSadaf Ebrahimi   __ Mrs(x2, NZCV);
1735*f5c631daSSadaf Ebrahimi 
1736*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1737*f5c631daSSadaf Ebrahimi   __ Fccmp(s16, s17, CVFlag, lt);
1738*f5c631daSSadaf Ebrahimi   __ Mrs(x3, NZCV);
1739*f5c631daSSadaf Ebrahimi 
1740*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1741*f5c631daSSadaf Ebrahimi   __ Fccmp(d18, d18, ZFlag, le);
1742*f5c631daSSadaf Ebrahimi   __ Mrs(x4, NZCV);
1743*f5c631daSSadaf Ebrahimi 
1744*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1745*f5c631daSSadaf Ebrahimi   __ Fccmp(d18, d18, ZVFlag, gt);
1746*f5c631daSSadaf Ebrahimi   __ Mrs(x5, NZCV);
1747*f5c631daSSadaf Ebrahimi 
1748*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1749*f5c631daSSadaf Ebrahimi   __ Fccmp(d18, d19, ZCVFlag, ls);
1750*f5c631daSSadaf Ebrahimi   __ Mrs(x6, NZCV);
1751*f5c631daSSadaf Ebrahimi 
1752*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1753*f5c631daSSadaf Ebrahimi   __ Fccmp(d18, d19, NFlag, hi);
1754*f5c631daSSadaf Ebrahimi   __ Mrs(x7, NZCV);
1755*f5c631daSSadaf Ebrahimi 
1756*f5c631daSSadaf Ebrahimi   // The Macro Assembler does not allow al or nv as condition.
1757*f5c631daSSadaf Ebrahimi   {
1758*f5c631daSSadaf Ebrahimi     ExactAssemblyScope scope(&masm, kInstructionSize);
1759*f5c631daSSadaf Ebrahimi     __ fccmp(s16, s16, NFlag, al);
1760*f5c631daSSadaf Ebrahimi   }
1761*f5c631daSSadaf Ebrahimi   __ Mrs(x8, NZCV);
1762*f5c631daSSadaf Ebrahimi 
1763*f5c631daSSadaf Ebrahimi   {
1764*f5c631daSSadaf Ebrahimi     ExactAssemblyScope scope(&masm, kInstructionSize);
1765*f5c631daSSadaf Ebrahimi     __ fccmp(d18, d18, NFlag, nv);
1766*f5c631daSSadaf Ebrahimi   }
1767*f5c631daSSadaf Ebrahimi   __ Mrs(x9, NZCV);
1768*f5c631daSSadaf Ebrahimi 
1769*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1770*f5c631daSSadaf Ebrahimi   __ Fccmpe(s16, s16, NoFlag, eq);
1771*f5c631daSSadaf Ebrahimi   __ Mrs(x10, NZCV);
1772*f5c631daSSadaf Ebrahimi 
1773*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1774*f5c631daSSadaf Ebrahimi   __ Fccmpe(d18, d19, ZCVFlag, ls);
1775*f5c631daSSadaf Ebrahimi   __ Mrs(x11, NZCV);
1776*f5c631daSSadaf Ebrahimi 
1777*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1778*f5c631daSSadaf Ebrahimi   __ Fccmpe(d21, d21, NoFlag, eq);
1779*f5c631daSSadaf Ebrahimi   __ Mrs(x12, NZCV);
1780*f5c631daSSadaf Ebrahimi 
1781*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1782*f5c631daSSadaf Ebrahimi   __ Fccmpe(s22, s22, NoFlag, eq);
1783*f5c631daSSadaf Ebrahimi   __ Mrs(x13, NZCV);
1784*f5c631daSSadaf Ebrahimi   END();
1785*f5c631daSSadaf Ebrahimi 
1786*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1787*f5c631daSSadaf Ebrahimi     RUN();
1788*f5c631daSSadaf Ebrahimi 
1789*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w0);
1790*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(VFlag, w1);
1791*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w2);
1792*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w3);
1793*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w4);
1794*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZVFlag, w5);
1795*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CFlag, w6);
1796*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w7);
1797*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w8);
1798*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w9);
1799*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w10);
1800*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CFlag, w11);
1801*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w12);
1802*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w13);
1803*f5c631daSSadaf Ebrahimi   }
1804*f5c631daSSadaf Ebrahimi }
1805*f5c631daSSadaf Ebrahimi 
1806*f5c631daSSadaf Ebrahimi 
TEST(fccmp_h)1807*f5c631daSSadaf Ebrahimi TEST(fccmp_h) {
1808*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
1809*f5c631daSSadaf Ebrahimi 
1810*f5c631daSSadaf Ebrahimi   START();
1811*f5c631daSSadaf Ebrahimi   __ Fmov(h16, Float16(0.0));
1812*f5c631daSSadaf Ebrahimi   __ Fmov(h17, Float16(0.5));
1813*f5c631daSSadaf Ebrahimi   __ Mov(x20, 0);
1814*f5c631daSSadaf Ebrahimi   __ Fmov(h21, kFP16DefaultNaN);
1815*f5c631daSSadaf Ebrahimi 
1816*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1817*f5c631daSSadaf Ebrahimi   __ Fccmp(h16, h16, NoFlag, eq);
1818*f5c631daSSadaf Ebrahimi   __ Mrs(x0, NZCV);
1819*f5c631daSSadaf Ebrahimi 
1820*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1821*f5c631daSSadaf Ebrahimi   __ Fccmp(h16, h16, VFlag, ne);
1822*f5c631daSSadaf Ebrahimi   __ Mrs(x1, NZCV);
1823*f5c631daSSadaf Ebrahimi 
1824*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1825*f5c631daSSadaf Ebrahimi   __ Fccmp(h16, h17, CFlag, ge);
1826*f5c631daSSadaf Ebrahimi   __ Mrs(x2, NZCV);
1827*f5c631daSSadaf Ebrahimi 
1828*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1829*f5c631daSSadaf Ebrahimi   __ Fccmp(h16, h17, CVFlag, lt);
1830*f5c631daSSadaf Ebrahimi   __ Mrs(x3, NZCV);
1831*f5c631daSSadaf Ebrahimi 
1832*f5c631daSSadaf Ebrahimi   // The Macro Assembler does not allow al or nv as condition.
1833*f5c631daSSadaf Ebrahimi   {
1834*f5c631daSSadaf Ebrahimi     ExactAssemblyScope scope(&masm, kInstructionSize);
1835*f5c631daSSadaf Ebrahimi     __ fccmp(h16, h16, NFlag, al);
1836*f5c631daSSadaf Ebrahimi   }
1837*f5c631daSSadaf Ebrahimi   __ Mrs(x4, NZCV);
1838*f5c631daSSadaf Ebrahimi   {
1839*f5c631daSSadaf Ebrahimi     ExactAssemblyScope scope(&masm, kInstructionSize);
1840*f5c631daSSadaf Ebrahimi     __ fccmp(h16, h16, NFlag, nv);
1841*f5c631daSSadaf Ebrahimi   }
1842*f5c631daSSadaf Ebrahimi   __ Mrs(x5, NZCV);
1843*f5c631daSSadaf Ebrahimi 
1844*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1845*f5c631daSSadaf Ebrahimi   __ Fccmpe(h16, h16, NoFlag, eq);
1846*f5c631daSSadaf Ebrahimi   __ Mrs(x6, NZCV);
1847*f5c631daSSadaf Ebrahimi 
1848*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1849*f5c631daSSadaf Ebrahimi   __ Fccmpe(h16, h21, NoFlag, eq);
1850*f5c631daSSadaf Ebrahimi   __ Mrs(x7, NZCV);
1851*f5c631daSSadaf Ebrahimi 
1852*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1853*f5c631daSSadaf Ebrahimi   __ Fccmpe(h21, h16, NoFlag, eq);
1854*f5c631daSSadaf Ebrahimi   __ Mrs(x8, NZCV);
1855*f5c631daSSadaf Ebrahimi 
1856*f5c631daSSadaf Ebrahimi   __ Cmp(x20, 0);
1857*f5c631daSSadaf Ebrahimi   __ Fccmpe(h21, h21, NoFlag, eq);
1858*f5c631daSSadaf Ebrahimi   __ Mrs(x9, NZCV);
1859*f5c631daSSadaf Ebrahimi   END();
1860*f5c631daSSadaf Ebrahimi 
1861*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1862*f5c631daSSadaf Ebrahimi     RUN();
1863*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w0);
1864*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(VFlag, w1);
1865*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w2);
1866*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w3);
1867*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w4);
1868*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w5);
1869*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w6);
1870*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w7);
1871*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w8);
1872*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w9);
1873*f5c631daSSadaf Ebrahimi   }
1874*f5c631daSSadaf Ebrahimi }
1875*f5c631daSSadaf Ebrahimi 
1876*f5c631daSSadaf Ebrahimi 
TEST(fcmp)1877*f5c631daSSadaf Ebrahimi TEST(fcmp) {
1878*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
1879*f5c631daSSadaf Ebrahimi 
1880*f5c631daSSadaf Ebrahimi   START();
1881*f5c631daSSadaf Ebrahimi 
1882*f5c631daSSadaf Ebrahimi   // Some of these tests require a floating-point scratch register assigned to
1883*f5c631daSSadaf Ebrahimi   // the macro assembler, but most do not.
1884*f5c631daSSadaf Ebrahimi   {
1885*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(&masm);
1886*f5c631daSSadaf Ebrahimi     temps.ExcludeAll();
1887*f5c631daSSadaf Ebrahimi     temps.Include(ip0, ip1);
1888*f5c631daSSadaf Ebrahimi 
1889*f5c631daSSadaf Ebrahimi     __ Fmov(s8, 0.0);
1890*f5c631daSSadaf Ebrahimi     __ Fmov(s9, 0.5);
1891*f5c631daSSadaf Ebrahimi     __ Mov(w18, 0x7f800001);  // Single precision NaN.
1892*f5c631daSSadaf Ebrahimi     __ Fmov(s18, w18);
1893*f5c631daSSadaf Ebrahimi 
1894*f5c631daSSadaf Ebrahimi     __ Fcmp(s8, s8);
1895*f5c631daSSadaf Ebrahimi     __ Mrs(x0, NZCV);
1896*f5c631daSSadaf Ebrahimi     __ Fcmp(s8, s9);
1897*f5c631daSSadaf Ebrahimi     __ Mrs(x1, NZCV);
1898*f5c631daSSadaf Ebrahimi     __ Fcmp(s9, s8);
1899*f5c631daSSadaf Ebrahimi     __ Mrs(x2, NZCV);
1900*f5c631daSSadaf Ebrahimi     __ Fcmp(s8, s18);
1901*f5c631daSSadaf Ebrahimi     __ Mrs(x3, NZCV);
1902*f5c631daSSadaf Ebrahimi     __ Fcmp(s18, s18);
1903*f5c631daSSadaf Ebrahimi     __ Mrs(x4, NZCV);
1904*f5c631daSSadaf Ebrahimi     __ Fcmp(s8, 0.0);
1905*f5c631daSSadaf Ebrahimi     __ Mrs(x5, NZCV);
1906*f5c631daSSadaf Ebrahimi     temps.Include(d0);
1907*f5c631daSSadaf Ebrahimi     __ Fcmp(s8, 255.0);
1908*f5c631daSSadaf Ebrahimi     temps.Exclude(d0);
1909*f5c631daSSadaf Ebrahimi     __ Mrs(x6, NZCV);
1910*f5c631daSSadaf Ebrahimi 
1911*f5c631daSSadaf Ebrahimi     __ Fmov(d19, 0.0);
1912*f5c631daSSadaf Ebrahimi     __ Fmov(d20, 0.5);
1913*f5c631daSSadaf Ebrahimi     __ Mov(x21, 0x7ff0000000000001);  // Double precision NaN.
1914*f5c631daSSadaf Ebrahimi     __ Fmov(d21, x21);
1915*f5c631daSSadaf Ebrahimi 
1916*f5c631daSSadaf Ebrahimi     __ Fcmp(d19, d19);
1917*f5c631daSSadaf Ebrahimi     __ Mrs(x10, NZCV);
1918*f5c631daSSadaf Ebrahimi     __ Fcmp(d19, d20);
1919*f5c631daSSadaf Ebrahimi     __ Mrs(x11, NZCV);
1920*f5c631daSSadaf Ebrahimi     __ Fcmp(d20, d19);
1921*f5c631daSSadaf Ebrahimi     __ Mrs(x12, NZCV);
1922*f5c631daSSadaf Ebrahimi     __ Fcmp(d19, d21);
1923*f5c631daSSadaf Ebrahimi     __ Mrs(x13, NZCV);
1924*f5c631daSSadaf Ebrahimi     __ Fcmp(d21, d21);
1925*f5c631daSSadaf Ebrahimi     __ Mrs(x14, NZCV);
1926*f5c631daSSadaf Ebrahimi     __ Fcmp(d19, 0.0);
1927*f5c631daSSadaf Ebrahimi     __ Mrs(x15, NZCV);
1928*f5c631daSSadaf Ebrahimi     temps.Include(d0);
1929*f5c631daSSadaf Ebrahimi     __ Fcmp(d19, 12.3456);
1930*f5c631daSSadaf Ebrahimi     temps.Exclude(d0);
1931*f5c631daSSadaf Ebrahimi     __ Mrs(x16, NZCV);
1932*f5c631daSSadaf Ebrahimi 
1933*f5c631daSSadaf Ebrahimi     __ Fcmpe(s8, s8);
1934*f5c631daSSadaf Ebrahimi     __ Mrs(x22, NZCV);
1935*f5c631daSSadaf Ebrahimi     __ Fcmpe(s8, 0.0);
1936*f5c631daSSadaf Ebrahimi     __ Mrs(x23, NZCV);
1937*f5c631daSSadaf Ebrahimi     __ Fcmpe(d19, d19);
1938*f5c631daSSadaf Ebrahimi     __ Mrs(x24, NZCV);
1939*f5c631daSSadaf Ebrahimi     __ Fcmpe(d19, 0.0);
1940*f5c631daSSadaf Ebrahimi     __ Mrs(x25, NZCV);
1941*f5c631daSSadaf Ebrahimi     __ Fcmpe(s18, s18);
1942*f5c631daSSadaf Ebrahimi     __ Mrs(x26, NZCV);
1943*f5c631daSSadaf Ebrahimi     __ Fcmpe(d21, d21);
1944*f5c631daSSadaf Ebrahimi     __ Mrs(x27, NZCV);
1945*f5c631daSSadaf Ebrahimi   }
1946*f5c631daSSadaf Ebrahimi 
1947*f5c631daSSadaf Ebrahimi   END();
1948*f5c631daSSadaf Ebrahimi 
1949*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
1950*f5c631daSSadaf Ebrahimi     RUN();
1951*f5c631daSSadaf Ebrahimi 
1952*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w0);
1953*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w1);
1954*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CFlag, w2);
1955*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w3);
1956*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w4);
1957*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w5);
1958*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w6);
1959*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w10);
1960*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w11);
1961*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CFlag, w12);
1962*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w13);
1963*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w14);
1964*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w15);
1965*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w16);
1966*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w22);
1967*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w23);
1968*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w24);
1969*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w25);
1970*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w26);
1971*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w27);
1972*f5c631daSSadaf Ebrahimi   }
1973*f5c631daSSadaf Ebrahimi }
1974*f5c631daSSadaf Ebrahimi 
1975*f5c631daSSadaf Ebrahimi 
TEST(fcmp_h)1976*f5c631daSSadaf Ebrahimi TEST(fcmp_h) {
1977*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
1978*f5c631daSSadaf Ebrahimi 
1979*f5c631daSSadaf Ebrahimi   START();
1980*f5c631daSSadaf Ebrahimi 
1981*f5c631daSSadaf Ebrahimi   // Some of these tests require a floating-point scratch register assigned to
1982*f5c631daSSadaf Ebrahimi   // the macro assembler, but most do not.
1983*f5c631daSSadaf Ebrahimi   {
1984*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(&masm);
1985*f5c631daSSadaf Ebrahimi     temps.ExcludeAll();
1986*f5c631daSSadaf Ebrahimi     temps.Include(ip0, ip1);
1987*f5c631daSSadaf Ebrahimi 
1988*f5c631daSSadaf Ebrahimi     __ Fmov(h8, Float16(0.0));
1989*f5c631daSSadaf Ebrahimi     __ Fmov(h9, Float16(0.5));
1990*f5c631daSSadaf Ebrahimi     __ Fmov(h18, kFP16DefaultNaN);
1991*f5c631daSSadaf Ebrahimi 
1992*f5c631daSSadaf Ebrahimi     __ Fcmp(h8, h8);
1993*f5c631daSSadaf Ebrahimi     __ Mrs(x0, NZCV);
1994*f5c631daSSadaf Ebrahimi     __ Fcmp(h8, h9);
1995*f5c631daSSadaf Ebrahimi     __ Mrs(x1, NZCV);
1996*f5c631daSSadaf Ebrahimi     __ Fcmp(h9, h8);
1997*f5c631daSSadaf Ebrahimi     __ Mrs(x2, NZCV);
1998*f5c631daSSadaf Ebrahimi     __ Fcmp(h8, h18);
1999*f5c631daSSadaf Ebrahimi     __ Mrs(x3, NZCV);
2000*f5c631daSSadaf Ebrahimi     __ Fcmp(h18, h18);
2001*f5c631daSSadaf Ebrahimi     __ Mrs(x4, NZCV);
2002*f5c631daSSadaf Ebrahimi     __ Fcmp(h8, 0.0);
2003*f5c631daSSadaf Ebrahimi     __ Mrs(x5, NZCV);
2004*f5c631daSSadaf Ebrahimi     temps.Include(d0);
2005*f5c631daSSadaf Ebrahimi     __ Fcmp(h8, 255.0);
2006*f5c631daSSadaf Ebrahimi     temps.Exclude(d0);
2007*f5c631daSSadaf Ebrahimi     __ Mrs(x6, NZCV);
2008*f5c631daSSadaf Ebrahimi 
2009*f5c631daSSadaf Ebrahimi     __ Fcmpe(h8, h8);
2010*f5c631daSSadaf Ebrahimi     __ Mrs(x22, NZCV);
2011*f5c631daSSadaf Ebrahimi     __ Fcmpe(h8, 0.0);
2012*f5c631daSSadaf Ebrahimi     __ Mrs(x23, NZCV);
2013*f5c631daSSadaf Ebrahimi     __ Fcmpe(h8, h18);
2014*f5c631daSSadaf Ebrahimi     __ Mrs(x24, NZCV);
2015*f5c631daSSadaf Ebrahimi     __ Fcmpe(h18, h8);
2016*f5c631daSSadaf Ebrahimi     __ Mrs(x25, NZCV);
2017*f5c631daSSadaf Ebrahimi     __ Fcmpe(h18, h18);
2018*f5c631daSSadaf Ebrahimi     __ Mrs(x26, NZCV);
2019*f5c631daSSadaf Ebrahimi   }
2020*f5c631daSSadaf Ebrahimi 
2021*f5c631daSSadaf Ebrahimi   END();
2022*f5c631daSSadaf Ebrahimi 
2023*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2024*f5c631daSSadaf Ebrahimi     RUN();
2025*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w0);
2026*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w1);
2027*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CFlag, w2);
2028*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w3);
2029*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w4);
2030*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w5);
2031*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(NFlag, w6);
2032*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w22);
2033*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(ZCFlag, w23);
2034*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w24);
2035*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w25);
2036*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(CVFlag, w26);
2037*f5c631daSSadaf Ebrahimi   }
2038*f5c631daSSadaf Ebrahimi }
2039*f5c631daSSadaf Ebrahimi 
2040*f5c631daSSadaf Ebrahimi 
TEST(fcsel)2041*f5c631daSSadaf Ebrahimi TEST(fcsel) {
2042*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
2043*f5c631daSSadaf Ebrahimi 
2044*f5c631daSSadaf Ebrahimi   START();
2045*f5c631daSSadaf Ebrahimi   __ Mov(x16, 0);
2046*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
2047*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 2.0);
2048*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 3.0);
2049*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 4.0);
2050*f5c631daSSadaf Ebrahimi 
2051*f5c631daSSadaf Ebrahimi   __ Cmp(x16, 0);
2052*f5c631daSSadaf Ebrahimi   __ Fcsel(s0, s16, s17, eq);
2053*f5c631daSSadaf Ebrahimi   __ Fcsel(s1, s16, s17, ne);
2054*f5c631daSSadaf Ebrahimi   __ Fcsel(d2, d18, d19, eq);
2055*f5c631daSSadaf Ebrahimi   __ Fcsel(d3, d18, d19, ne);
2056*f5c631daSSadaf Ebrahimi   // The Macro Assembler does not allow al or nv as condition.
2057*f5c631daSSadaf Ebrahimi   {
2058*f5c631daSSadaf Ebrahimi     ExactAssemblyScope scope(&masm, 2 * kInstructionSize);
2059*f5c631daSSadaf Ebrahimi     __ fcsel(s4, s16, s17, al);
2060*f5c631daSSadaf Ebrahimi     __ fcsel(d5, d18, d19, nv);
2061*f5c631daSSadaf Ebrahimi   }
2062*f5c631daSSadaf Ebrahimi   END();
2063*f5c631daSSadaf Ebrahimi 
2064*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2065*f5c631daSSadaf Ebrahimi     RUN();
2066*f5c631daSSadaf Ebrahimi 
2067*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2068*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s1);
2069*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.0, d2);
2070*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(4.0, d3);
2071*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s4);
2072*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.0, d5);
2073*f5c631daSSadaf Ebrahimi   }
2074*f5c631daSSadaf Ebrahimi }
2075*f5c631daSSadaf Ebrahimi 
2076*f5c631daSSadaf Ebrahimi 
TEST(fcsel_h)2077*f5c631daSSadaf Ebrahimi TEST(fcsel_h) {
2078*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
2079*f5c631daSSadaf Ebrahimi 
2080*f5c631daSSadaf Ebrahimi   START();
2081*f5c631daSSadaf Ebrahimi   __ Mov(x16, 0);
2082*f5c631daSSadaf Ebrahimi   __ Fmov(h16, Float16(1.0));
2083*f5c631daSSadaf Ebrahimi   __ Fmov(h17, Float16(2.0));
2084*f5c631daSSadaf Ebrahimi 
2085*f5c631daSSadaf Ebrahimi   __ Cmp(x16, 0);
2086*f5c631daSSadaf Ebrahimi   __ Fcsel(h0, h16, h17, eq);
2087*f5c631daSSadaf Ebrahimi   __ Fcsel(h1, h16, h17, ne);
2088*f5c631daSSadaf Ebrahimi   // The Macro Assembler does not allow al or nv as condition.
2089*f5c631daSSadaf Ebrahimi   {
2090*f5c631daSSadaf Ebrahimi     ExactAssemblyScope scope(&masm, 2 * kInstructionSize);
2091*f5c631daSSadaf Ebrahimi     __ fcsel(h4, h16, h17, al);
2092*f5c631daSSadaf Ebrahimi     __ fcsel(h5, h16, h17, nv);
2093*f5c631daSSadaf Ebrahimi   }
2094*f5c631daSSadaf Ebrahimi   END();
2095*f5c631daSSadaf Ebrahimi 
2096*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2097*f5c631daSSadaf Ebrahimi     RUN();
2098*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0), h0);
2099*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(2.0), h1);
2100*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0), h4);
2101*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(Float16(1.0), h5);
2102*f5c631daSSadaf Ebrahimi   }
2103*f5c631daSSadaf Ebrahimi }
2104*f5c631daSSadaf Ebrahimi 
2105*f5c631daSSadaf Ebrahimi 
TEST(fneg)2106*f5c631daSSadaf Ebrahimi TEST(fneg) {
2107*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
2108*f5c631daSSadaf Ebrahimi 
2109*f5c631daSSadaf Ebrahimi   START();
2110*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
2111*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 0.0);
2112*f5c631daSSadaf Ebrahimi   __ Fmov(s18, kFP32PositiveInfinity);
2113*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.0);
2114*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 0.0);
2115*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64PositiveInfinity);
2116*f5c631daSSadaf Ebrahimi 
2117*f5c631daSSadaf Ebrahimi   __ Fneg(s0, s16);
2118*f5c631daSSadaf Ebrahimi   __ Fneg(s1, s0);
2119*f5c631daSSadaf Ebrahimi   __ Fneg(s2, s17);
2120*f5c631daSSadaf Ebrahimi   __ Fneg(s3, s2);
2121*f5c631daSSadaf Ebrahimi   __ Fneg(s4, s18);
2122*f5c631daSSadaf Ebrahimi   __ Fneg(s5, s4);
2123*f5c631daSSadaf Ebrahimi   __ Fneg(d6, d19);
2124*f5c631daSSadaf Ebrahimi   __ Fneg(d7, d6);
2125*f5c631daSSadaf Ebrahimi   __ Fneg(d8, d20);
2126*f5c631daSSadaf Ebrahimi   __ Fneg(d9, d8);
2127*f5c631daSSadaf Ebrahimi   __ Fneg(d10, d21);
2128*f5c631daSSadaf Ebrahimi   __ Fneg(d11, d10);
2129*f5c631daSSadaf Ebrahimi   END();
2130*f5c631daSSadaf Ebrahimi 
2131*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2132*f5c631daSSadaf Ebrahimi     RUN();
2133*f5c631daSSadaf Ebrahimi 
2134*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s0);
2135*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2136*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s2);
2137*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s3);
2138*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s4);
2139*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s5);
2140*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.0, d6);
2141*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d7);
2142*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d8);
2143*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d9);
2144*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d10);
2145*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d11);
2146*f5c631daSSadaf Ebrahimi   }
2147*f5c631daSSadaf Ebrahimi }
2148*f5c631daSSadaf Ebrahimi 
2149*f5c631daSSadaf Ebrahimi 
TEST(fabs)2150*f5c631daSSadaf Ebrahimi TEST(fabs) {
2151*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
2152*f5c631daSSadaf Ebrahimi 
2153*f5c631daSSadaf Ebrahimi   START();
2154*f5c631daSSadaf Ebrahimi   __ Fmov(s16, -1.0);
2155*f5c631daSSadaf Ebrahimi   __ Fmov(s17, -0.0);
2156*f5c631daSSadaf Ebrahimi   __ Fmov(s18, kFP32NegativeInfinity);
2157*f5c631daSSadaf Ebrahimi   __ Fmov(d19, -1.0);
2158*f5c631daSSadaf Ebrahimi   __ Fmov(d20, -0.0);
2159*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64NegativeInfinity);
2160*f5c631daSSadaf Ebrahimi 
2161*f5c631daSSadaf Ebrahimi   __ Fabs(s0, s16);
2162*f5c631daSSadaf Ebrahimi   __ Fabs(s1, s0);
2163*f5c631daSSadaf Ebrahimi   __ Fabs(s2, s17);
2164*f5c631daSSadaf Ebrahimi   __ Fabs(s3, s18);
2165*f5c631daSSadaf Ebrahimi   __ Fabs(d4, d19);
2166*f5c631daSSadaf Ebrahimi   __ Fabs(d5, d4);
2167*f5c631daSSadaf Ebrahimi   __ Fabs(d6, d20);
2168*f5c631daSSadaf Ebrahimi   __ Fabs(d7, d21);
2169*f5c631daSSadaf Ebrahimi   END();
2170*f5c631daSSadaf Ebrahimi 
2171*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2172*f5c631daSSadaf Ebrahimi     RUN();
2173*f5c631daSSadaf Ebrahimi 
2174*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2175*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2176*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s2);
2177*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s3);
2178*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d4);
2179*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d5);
2180*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d6);
2181*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d7);
2182*f5c631daSSadaf Ebrahimi   }
2183*f5c631daSSadaf Ebrahimi }
2184*f5c631daSSadaf Ebrahimi 
2185*f5c631daSSadaf Ebrahimi 
TEST(fsqrt)2186*f5c631daSSadaf Ebrahimi TEST(fsqrt) {
2187*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
2188*f5c631daSSadaf Ebrahimi 
2189*f5c631daSSadaf Ebrahimi   START();
2190*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 0.0);
2191*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.0);
2192*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 0.25);
2193*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 65536.0);
2194*f5c631daSSadaf Ebrahimi   __ Fmov(s20, -0.0);
2195*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32PositiveInfinity);
2196*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -1.0);
2197*f5c631daSSadaf Ebrahimi   __ Fmov(d23, 0.0);
2198*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.0);
2199*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.25);
2200*f5c631daSSadaf Ebrahimi   __ Fmov(d26, 4294967296.0);
2201*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.0);
2202*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64PositiveInfinity);
2203*f5c631daSSadaf Ebrahimi   __ Fmov(d29, -1.0);
2204*f5c631daSSadaf Ebrahimi 
2205*f5c631daSSadaf Ebrahimi   __ Fsqrt(s0, s16);
2206*f5c631daSSadaf Ebrahimi   __ Fsqrt(s1, s17);
2207*f5c631daSSadaf Ebrahimi   __ Fsqrt(s2, s18);
2208*f5c631daSSadaf Ebrahimi   __ Fsqrt(s3, s19);
2209*f5c631daSSadaf Ebrahimi   __ Fsqrt(s4, s20);
2210*f5c631daSSadaf Ebrahimi   __ Fsqrt(s5, s21);
2211*f5c631daSSadaf Ebrahimi   __ Fsqrt(s6, s22);
2212*f5c631daSSadaf Ebrahimi   __ Fsqrt(d7, d23);
2213*f5c631daSSadaf Ebrahimi   __ Fsqrt(d8, d24);
2214*f5c631daSSadaf Ebrahimi   __ Fsqrt(d9, d25);
2215*f5c631daSSadaf Ebrahimi   __ Fsqrt(d10, d26);
2216*f5c631daSSadaf Ebrahimi   __ Fsqrt(d11, d27);
2217*f5c631daSSadaf Ebrahimi   __ Fsqrt(d12, d28);
2218*f5c631daSSadaf Ebrahimi   __ Fsqrt(d13, d29);
2219*f5c631daSSadaf Ebrahimi   END();
2220*f5c631daSSadaf Ebrahimi 
2221*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2222*f5c631daSSadaf Ebrahimi     RUN();
2223*f5c631daSSadaf Ebrahimi 
2224*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s0);
2225*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2226*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.5, s2);
2227*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(256.0, s3);
2228*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s4);
2229*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s5);
2230*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6);
2231*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d7);
2232*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d8);
2233*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.5, d9);
2234*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(65536.0, d10);
2235*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d11);
2236*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP32PositiveInfinity, d12);
2237*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13);
2238*f5c631daSSadaf Ebrahimi   }
2239*f5c631daSSadaf Ebrahimi }
2240*f5c631daSSadaf Ebrahimi 
TEST(frint32x_s)2241*f5c631daSSadaf Ebrahimi TEST(frint32x_s) {
2242*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2243*f5c631daSSadaf Ebrahimi 
2244*f5c631daSSadaf Ebrahimi   START();
2245*f5c631daSSadaf Ebrahimi 
2246*f5c631daSSadaf Ebrahimi   __ Fmov(s13, 1.0);
2247*f5c631daSSadaf Ebrahimi   __ Fmov(s14, 1.1);
2248*f5c631daSSadaf Ebrahimi   __ Fmov(s15, 1.5);
2249*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.9);
2250*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 2.5);
2251*f5c631daSSadaf Ebrahimi   __ Fmov(s18, -1.5);
2252*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -2.5);
2253*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
2254*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
2255*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0.0);
2256*f5c631daSSadaf Ebrahimi   __ Fmov(s23, -0.0);
2257*f5c631daSSadaf Ebrahimi   __ Fmov(s24, -0.2);
2258*f5c631daSSadaf Ebrahimi   __ Fmov(s25, kFP32DefaultNaN);
2259*f5c631daSSadaf Ebrahimi   __ Fmov(s26, INT32_MIN);
2260*f5c631daSSadaf Ebrahimi   __ Fmov(s27, INT32_MIN + 0x80);  // The next representable FP32.
2261*f5c631daSSadaf Ebrahimi   __ Fmov(s28, 0x80000000);
2262*f5c631daSSadaf Ebrahimi   __ Fmov(s29, 0x7fffff80);  // The largest int32_t representable as FP32.
2263*f5c631daSSadaf Ebrahimi   __ Fmov(s30, FLT_MIN);
2264*f5c631daSSadaf Ebrahimi   __ Fmov(s31, FLT_MAX);
2265*f5c631daSSadaf Ebrahimi 
2266*f5c631daSSadaf Ebrahimi   __ Frint32x(s0, s13);
2267*f5c631daSSadaf Ebrahimi   __ Frint32x(s1, s14);
2268*f5c631daSSadaf Ebrahimi   __ Frint32x(s2, s15);
2269*f5c631daSSadaf Ebrahimi   __ Frint32x(s3, s16);
2270*f5c631daSSadaf Ebrahimi   __ Frint32x(s4, s17);
2271*f5c631daSSadaf Ebrahimi   __ Frint32x(s5, s18);
2272*f5c631daSSadaf Ebrahimi   __ Frint32x(s6, s19);
2273*f5c631daSSadaf Ebrahimi   __ Frint32x(s7, s20);
2274*f5c631daSSadaf Ebrahimi   __ Frint32x(s8, s21);
2275*f5c631daSSadaf Ebrahimi   __ Frint32x(s9, s22);
2276*f5c631daSSadaf Ebrahimi   __ Frint32x(s10, s23);
2277*f5c631daSSadaf Ebrahimi   __ Frint32x(s11, s24);
2278*f5c631daSSadaf Ebrahimi   __ Frint32x(s12, s25);
2279*f5c631daSSadaf Ebrahimi   __ Frint32x(s13, s26);
2280*f5c631daSSadaf Ebrahimi   __ Frint32x(s14, s27);
2281*f5c631daSSadaf Ebrahimi   __ Frint32x(s15, s28);
2282*f5c631daSSadaf Ebrahimi   __ Frint32x(s16, s29);
2283*f5c631daSSadaf Ebrahimi   __ Frint32x(s17, s30);
2284*f5c631daSSadaf Ebrahimi   __ Frint32x(s18, s31);
2285*f5c631daSSadaf Ebrahimi 
2286*f5c631daSSadaf Ebrahimi   END();
2287*f5c631daSSadaf Ebrahimi 
2288*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2289*f5c631daSSadaf Ebrahimi     RUN();
2290*f5c631daSSadaf Ebrahimi 
2291*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2292*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2293*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
2294*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
2295*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
2296*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
2297*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
2298*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s7);
2299*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s8);
2300*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
2301*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
2302*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
2303*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s12);  // NaN.
2304*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s13);
2305*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN + 0x80, s14);
2306*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s15);  // Out of range.
2307*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0x7fffff80, s16);
2308*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0, s17);
2309*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s18);
2310*f5c631daSSadaf Ebrahimi   }
2311*f5c631daSSadaf Ebrahimi }
2312*f5c631daSSadaf Ebrahimi 
TEST(frint32x_d)2313*f5c631daSSadaf Ebrahimi TEST(frint32x_d) {
2314*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2315*f5c631daSSadaf Ebrahimi 
2316*f5c631daSSadaf Ebrahimi   START();
2317*f5c631daSSadaf Ebrahimi 
2318*f5c631daSSadaf Ebrahimi   __ Fmov(d13, 1.0);
2319*f5c631daSSadaf Ebrahimi   __ Fmov(d14, 1.1);
2320*f5c631daSSadaf Ebrahimi   __ Fmov(d15, 1.5);
2321*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.9);
2322*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 2.5);
2323*f5c631daSSadaf Ebrahimi   __ Fmov(d18, -1.5);
2324*f5c631daSSadaf Ebrahimi   __ Fmov(d19, -2.5);
2325*f5c631daSSadaf Ebrahimi   __ Fmov(d20, kFP64PositiveInfinity);
2326*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64NegativeInfinity);
2327*f5c631daSSadaf Ebrahimi   __ Fmov(d22, 0.0);
2328*f5c631daSSadaf Ebrahimi   __ Fmov(d23, -0.0);
2329*f5c631daSSadaf Ebrahimi   __ Fmov(d24, -0.2);
2330*f5c631daSSadaf Ebrahimi   __ Fmov(d25, kFP64DefaultNaN);
2331*f5c631daSSadaf Ebrahimi   __ Fmov(d26, INT32_MIN);
2332*f5c631daSSadaf Ebrahimi   __ Fmov(d27, INT32_MIN + 1);
2333*f5c631daSSadaf Ebrahimi   __ Fmov(d28, INT32_MAX);
2334*f5c631daSSadaf Ebrahimi   __ Fmov(d29, INT32_MAX - 1);
2335*f5c631daSSadaf Ebrahimi   __ Fmov(d30, FLT_MIN);
2336*f5c631daSSadaf Ebrahimi   __ Fmov(d31, FLT_MAX);
2337*f5c631daSSadaf Ebrahimi 
2338*f5c631daSSadaf Ebrahimi   __ Frint32x(d0, d13);
2339*f5c631daSSadaf Ebrahimi   __ Frint32x(d1, d14);
2340*f5c631daSSadaf Ebrahimi   __ Frint32x(d2, d15);
2341*f5c631daSSadaf Ebrahimi   __ Frint32x(d3, d16);
2342*f5c631daSSadaf Ebrahimi   __ Frint32x(d4, d17);
2343*f5c631daSSadaf Ebrahimi   __ Frint32x(d5, d18);
2344*f5c631daSSadaf Ebrahimi   __ Frint32x(d6, d19);
2345*f5c631daSSadaf Ebrahimi   __ Frint32x(d7, d20);
2346*f5c631daSSadaf Ebrahimi   __ Frint32x(d8, d21);
2347*f5c631daSSadaf Ebrahimi   __ Frint32x(d9, d22);
2348*f5c631daSSadaf Ebrahimi   __ Frint32x(d10, d23);
2349*f5c631daSSadaf Ebrahimi   __ Frint32x(d11, d24);
2350*f5c631daSSadaf Ebrahimi   __ Frint32x(d12, d25);
2351*f5c631daSSadaf Ebrahimi   __ Frint32x(d13, d26);
2352*f5c631daSSadaf Ebrahimi   __ Frint32x(d14, d27);
2353*f5c631daSSadaf Ebrahimi   __ Frint32x(d15, d28);
2354*f5c631daSSadaf Ebrahimi   __ Frint32x(d16, d29);
2355*f5c631daSSadaf Ebrahimi   __ Frint32x(d17, d30);
2356*f5c631daSSadaf Ebrahimi   __ Frint32x(d18, d31);
2357*f5c631daSSadaf Ebrahimi 
2358*f5c631daSSadaf Ebrahimi   END();
2359*f5c631daSSadaf Ebrahimi 
2360*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2361*f5c631daSSadaf Ebrahimi     RUN();
2362*f5c631daSSadaf Ebrahimi 
2363*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d0);
2364*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d1);
2365*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d2);
2366*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d3);
2367*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d4);
2368*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d5);
2369*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d6);
2370*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d7);
2371*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d8);
2372*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d9);
2373*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d10);
2374*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d11);
2375*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d12);
2376*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d13);
2377*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN + 1, d14);
2378*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MAX, d15);
2379*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MAX - 1, d16);
2380*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0, d17);
2381*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d18);
2382*f5c631daSSadaf Ebrahimi   }
2383*f5c631daSSadaf Ebrahimi }
2384*f5c631daSSadaf Ebrahimi 
TEST(frint32z_s)2385*f5c631daSSadaf Ebrahimi TEST(frint32z_s) {
2386*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2387*f5c631daSSadaf Ebrahimi 
2388*f5c631daSSadaf Ebrahimi   START();
2389*f5c631daSSadaf Ebrahimi 
2390*f5c631daSSadaf Ebrahimi   __ Fmov(s13, 1.0);
2391*f5c631daSSadaf Ebrahimi   __ Fmov(s14, 1.1);
2392*f5c631daSSadaf Ebrahimi   __ Fmov(s15, 1.5);
2393*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.9);
2394*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 2.5);
2395*f5c631daSSadaf Ebrahimi   __ Fmov(s18, -1.5);
2396*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -2.5);
2397*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
2398*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
2399*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0.0);
2400*f5c631daSSadaf Ebrahimi   __ Fmov(s23, -0.0);
2401*f5c631daSSadaf Ebrahimi   __ Fmov(s24, -0.2);
2402*f5c631daSSadaf Ebrahimi   __ Fmov(s25, kFP32DefaultNaN);
2403*f5c631daSSadaf Ebrahimi   __ Fmov(s26, INT32_MIN);
2404*f5c631daSSadaf Ebrahimi   __ Fmov(s27, INT32_MIN + 0x80);  // The next representable FP32.
2405*f5c631daSSadaf Ebrahimi   __ Fmov(s28, 0x80000000);
2406*f5c631daSSadaf Ebrahimi   __ Fmov(s29, 0x7fffff80);  // The largest int32_t representable as FP32.
2407*f5c631daSSadaf Ebrahimi   __ Fmov(s30, FLT_MIN);
2408*f5c631daSSadaf Ebrahimi   __ Fmov(s31, FLT_MAX);
2409*f5c631daSSadaf Ebrahimi 
2410*f5c631daSSadaf Ebrahimi   __ Frint32z(s0, s13);
2411*f5c631daSSadaf Ebrahimi   __ Frint32z(s1, s14);
2412*f5c631daSSadaf Ebrahimi   __ Frint32z(s2, s15);
2413*f5c631daSSadaf Ebrahimi   __ Frint32z(s3, s16);
2414*f5c631daSSadaf Ebrahimi   __ Frint32z(s4, s17);
2415*f5c631daSSadaf Ebrahimi   __ Frint32z(s5, s18);
2416*f5c631daSSadaf Ebrahimi   __ Frint32z(s6, s19);
2417*f5c631daSSadaf Ebrahimi   __ Frint32z(s7, s20);
2418*f5c631daSSadaf Ebrahimi   __ Frint32z(s8, s21);
2419*f5c631daSSadaf Ebrahimi   __ Frint32z(s9, s22);
2420*f5c631daSSadaf Ebrahimi   __ Frint32z(s10, s23);
2421*f5c631daSSadaf Ebrahimi   __ Frint32z(s11, s24);
2422*f5c631daSSadaf Ebrahimi   __ Frint32z(s12, s25);
2423*f5c631daSSadaf Ebrahimi   __ Frint32z(s13, s26);
2424*f5c631daSSadaf Ebrahimi   __ Frint32z(s14, s27);
2425*f5c631daSSadaf Ebrahimi   __ Frint32z(s15, s28);
2426*f5c631daSSadaf Ebrahimi   __ Frint32z(s16, s29);
2427*f5c631daSSadaf Ebrahimi   __ Frint32z(s17, s30);
2428*f5c631daSSadaf Ebrahimi   __ Frint32z(s18, s31);
2429*f5c631daSSadaf Ebrahimi 
2430*f5c631daSSadaf Ebrahimi   END();
2431*f5c631daSSadaf Ebrahimi 
2432*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2433*f5c631daSSadaf Ebrahimi     RUN();
2434*f5c631daSSadaf Ebrahimi 
2435*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2436*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2437*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s2);
2438*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s3);
2439*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
2440*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s5);
2441*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
2442*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s7);
2443*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s8);
2444*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
2445*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
2446*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
2447*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s12);  // NaN.
2448*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s13);
2449*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN + 0x80, s14);
2450*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s15);  // Out of range.
2451*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0x7fffff80, s16);
2452*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0, s17);
2453*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT32_MIN, s18);
2454*f5c631daSSadaf Ebrahimi   }
2455*f5c631daSSadaf Ebrahimi }
2456*f5c631daSSadaf Ebrahimi 
TEST(frint32z_d)2457*f5c631daSSadaf Ebrahimi TEST(frint32z_d) {
2458*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2459*f5c631daSSadaf Ebrahimi 
2460*f5c631daSSadaf Ebrahimi   START();
2461*f5c631daSSadaf Ebrahimi 
2462*f5c631daSSadaf Ebrahimi   __ Fmov(d13, 1.0);
2463*f5c631daSSadaf Ebrahimi   __ Fmov(d14, 1.1);
2464*f5c631daSSadaf Ebrahimi   __ Fmov(d15, 1.5);
2465*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.9);
2466*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 2.5);
2467*f5c631daSSadaf Ebrahimi   __ Fmov(d18, -1.5);
2468*f5c631daSSadaf Ebrahimi   __ Fmov(d19, -2.5);
2469*f5c631daSSadaf Ebrahimi   __ Fmov(d20, kFP64PositiveInfinity);
2470*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64NegativeInfinity);
2471*f5c631daSSadaf Ebrahimi   __ Fmov(d22, 0.0);
2472*f5c631daSSadaf Ebrahimi   __ Fmov(d23, -0.0);
2473*f5c631daSSadaf Ebrahimi   __ Fmov(d24, -0.2);
2474*f5c631daSSadaf Ebrahimi   __ Fmov(d25, kFP64DefaultNaN);
2475*f5c631daSSadaf Ebrahimi   __ Fmov(d26, INT32_MIN);
2476*f5c631daSSadaf Ebrahimi   __ Fmov(d27, INT32_MIN + 1);
2477*f5c631daSSadaf Ebrahimi   __ Fmov(d28, INT32_MAX);
2478*f5c631daSSadaf Ebrahimi   __ Fmov(d29, INT32_MAX - 1);
2479*f5c631daSSadaf Ebrahimi   __ Fmov(d30, FLT_MIN);
2480*f5c631daSSadaf Ebrahimi   __ Fmov(d31, FLT_MAX);
2481*f5c631daSSadaf Ebrahimi 
2482*f5c631daSSadaf Ebrahimi   __ Frint32z(d0, d13);
2483*f5c631daSSadaf Ebrahimi   __ Frint32z(d1, d14);
2484*f5c631daSSadaf Ebrahimi   __ Frint32z(d2, d15);
2485*f5c631daSSadaf Ebrahimi   __ Frint32z(d3, d16);
2486*f5c631daSSadaf Ebrahimi   __ Frint32z(d4, d17);
2487*f5c631daSSadaf Ebrahimi   __ Frint32z(d5, d18);
2488*f5c631daSSadaf Ebrahimi   __ Frint32z(d6, d19);
2489*f5c631daSSadaf Ebrahimi   __ Frint32z(d7, d20);
2490*f5c631daSSadaf Ebrahimi   __ Frint32z(d8, d21);
2491*f5c631daSSadaf Ebrahimi   __ Frint32z(d9, d22);
2492*f5c631daSSadaf Ebrahimi   __ Frint32z(d10, d23);
2493*f5c631daSSadaf Ebrahimi   __ Frint32z(d11, d24);
2494*f5c631daSSadaf Ebrahimi   __ Frint32z(d12, d25);
2495*f5c631daSSadaf Ebrahimi   __ Frint32z(d13, d26);
2496*f5c631daSSadaf Ebrahimi   __ Frint32z(d14, d27);
2497*f5c631daSSadaf Ebrahimi   __ Frint32z(d15, d28);
2498*f5c631daSSadaf Ebrahimi   __ Frint32z(d16, d29);
2499*f5c631daSSadaf Ebrahimi   __ Frint32z(d17, d30);
2500*f5c631daSSadaf Ebrahimi   __ Frint32z(d18, d31);
2501*f5c631daSSadaf Ebrahimi 
2502*f5c631daSSadaf Ebrahimi   END();
2503*f5c631daSSadaf Ebrahimi 
2504*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2505*f5c631daSSadaf Ebrahimi     RUN();
2506*f5c631daSSadaf Ebrahimi 
2507*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d0);
2508*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d1);
2509*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d2);
2510*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d3);
2511*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d4);
2512*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.0, d5);
2513*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d6);
2514*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d7);
2515*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d8);
2516*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d9);
2517*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d10);
2518*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d11);
2519*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d12);
2520*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d13);
2521*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN + 1, d14);
2522*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MAX, d15);
2523*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MAX - 1, d16);
2524*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0, d17);
2525*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT32_MIN, d18);
2526*f5c631daSSadaf Ebrahimi   }
2527*f5c631daSSadaf Ebrahimi }
2528*f5c631daSSadaf Ebrahimi 
TEST(frint64x_s)2529*f5c631daSSadaf Ebrahimi TEST(frint64x_s) {
2530*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2531*f5c631daSSadaf Ebrahimi 
2532*f5c631daSSadaf Ebrahimi   START();
2533*f5c631daSSadaf Ebrahimi 
2534*f5c631daSSadaf Ebrahimi   __ Fmov(s13, 1.0);
2535*f5c631daSSadaf Ebrahimi   __ Fmov(s14, 1.1);
2536*f5c631daSSadaf Ebrahimi   __ Fmov(s15, 1.5);
2537*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.9);
2538*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 2.5);
2539*f5c631daSSadaf Ebrahimi   __ Fmov(s18, -1.5);
2540*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -2.5);
2541*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP64PositiveInfinity);
2542*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP64NegativeInfinity);
2543*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0.0);
2544*f5c631daSSadaf Ebrahimi   __ Fmov(s23, -0.0);
2545*f5c631daSSadaf Ebrahimi   __ Fmov(s24, -0.2);
2546*f5c631daSSadaf Ebrahimi   __ Fmov(s25, kFP64DefaultNaN);
2547*f5c631daSSadaf Ebrahimi   __ Fmov(s26, INT64_MIN);
2548*f5c631daSSadaf Ebrahimi   __ Fmov(s27, INT64_MIN + 0x80'00000000);  // The next representable FP32.
2549*f5c631daSSadaf Ebrahimi   __ Fmov(s28, 0x80000000'00000000);
2550*f5c631daSSadaf Ebrahimi   // The largest int64_t representable as FP32.
2551*f5c631daSSadaf Ebrahimi   __ Fmov(s29, 0x7fffff80'00000000);
2552*f5c631daSSadaf Ebrahimi   __ Fmov(s30, FLT_MIN);
2553*f5c631daSSadaf Ebrahimi   __ Fmov(s31, FLT_MAX);
2554*f5c631daSSadaf Ebrahimi 
2555*f5c631daSSadaf Ebrahimi   __ Frint64x(s0, s13);
2556*f5c631daSSadaf Ebrahimi   __ Frint64x(s1, s14);
2557*f5c631daSSadaf Ebrahimi   __ Frint64x(s2, s15);
2558*f5c631daSSadaf Ebrahimi   __ Frint64x(s3, s16);
2559*f5c631daSSadaf Ebrahimi   __ Frint64x(s4, s17);
2560*f5c631daSSadaf Ebrahimi   __ Frint64x(s5, s18);
2561*f5c631daSSadaf Ebrahimi   __ Frint64x(s6, s19);
2562*f5c631daSSadaf Ebrahimi   __ Frint64x(s7, s20);
2563*f5c631daSSadaf Ebrahimi   __ Frint64x(s8, s21);
2564*f5c631daSSadaf Ebrahimi   __ Frint64x(s9, s22);
2565*f5c631daSSadaf Ebrahimi   __ Frint64x(s10, s23);
2566*f5c631daSSadaf Ebrahimi   __ Frint64x(s11, s24);
2567*f5c631daSSadaf Ebrahimi   __ Frint64x(s12, s25);
2568*f5c631daSSadaf Ebrahimi   __ Frint64x(s13, s26);
2569*f5c631daSSadaf Ebrahimi   __ Frint64x(s14, s27);
2570*f5c631daSSadaf Ebrahimi   __ Frint64x(s15, s28);
2571*f5c631daSSadaf Ebrahimi   __ Frint64x(s16, s29);
2572*f5c631daSSadaf Ebrahimi   __ Frint64x(s17, s30);
2573*f5c631daSSadaf Ebrahimi   __ Frint64x(s18, s31);
2574*f5c631daSSadaf Ebrahimi 
2575*f5c631daSSadaf Ebrahimi   END();
2576*f5c631daSSadaf Ebrahimi 
2577*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2578*f5c631daSSadaf Ebrahimi     RUN();
2579*f5c631daSSadaf Ebrahimi 
2580*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2581*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2582*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
2583*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
2584*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
2585*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
2586*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
2587*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s7);
2588*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s8);
2589*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
2590*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
2591*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
2592*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s12);  // Nan.
2593*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s13);
2594*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN + 0x80'00000000, s14);
2595*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s15);  // Out of range.
2596*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0x7fffff80'00000000, s16);
2597*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0, s17);
2598*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s18);
2599*f5c631daSSadaf Ebrahimi   }
2600*f5c631daSSadaf Ebrahimi }
2601*f5c631daSSadaf Ebrahimi 
TEST(frint64x_d)2602*f5c631daSSadaf Ebrahimi TEST(frint64x_d) {
2603*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2604*f5c631daSSadaf Ebrahimi 
2605*f5c631daSSadaf Ebrahimi   START();
2606*f5c631daSSadaf Ebrahimi 
2607*f5c631daSSadaf Ebrahimi   __ Fmov(d13, 1.0);
2608*f5c631daSSadaf Ebrahimi   __ Fmov(d14, 1.1);
2609*f5c631daSSadaf Ebrahimi   __ Fmov(d15, 1.5);
2610*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.9);
2611*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 2.5);
2612*f5c631daSSadaf Ebrahimi   __ Fmov(d18, -1.5);
2613*f5c631daSSadaf Ebrahimi   __ Fmov(d19, -2.5);
2614*f5c631daSSadaf Ebrahimi   __ Fmov(d20, kFP64PositiveInfinity);
2615*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64NegativeInfinity);
2616*f5c631daSSadaf Ebrahimi   __ Fmov(d22, 0.0);
2617*f5c631daSSadaf Ebrahimi   __ Fmov(d23, -0.0);
2618*f5c631daSSadaf Ebrahimi   __ Fmov(d24, -0.2);
2619*f5c631daSSadaf Ebrahimi   __ Fmov(d25, kFP64DefaultNaN);
2620*f5c631daSSadaf Ebrahimi   __ Fmov(d26, INT64_MIN);
2621*f5c631daSSadaf Ebrahimi   __ Fmov(d27, INT64_MIN + 0x400);  // The next representable FP64.
2622*f5c631daSSadaf Ebrahimi   __ Fmov(d28, 0x80000000'00000000);
2623*f5c631daSSadaf Ebrahimi   // The largest int64_t representable as FP64.
2624*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7fffffff'fffffc00);
2625*f5c631daSSadaf Ebrahimi   __ Fmov(d30, FLT_MIN);
2626*f5c631daSSadaf Ebrahimi   __ Fmov(d31, FLT_MAX);
2627*f5c631daSSadaf Ebrahimi 
2628*f5c631daSSadaf Ebrahimi   __ Frint64x(d0, d13);
2629*f5c631daSSadaf Ebrahimi   __ Frint64x(d1, d14);
2630*f5c631daSSadaf Ebrahimi   __ Frint64x(d2, d15);
2631*f5c631daSSadaf Ebrahimi   __ Frint64x(d3, d16);
2632*f5c631daSSadaf Ebrahimi   __ Frint64x(d4, d17);
2633*f5c631daSSadaf Ebrahimi   __ Frint64x(d5, d18);
2634*f5c631daSSadaf Ebrahimi   __ Frint64x(d6, d19);
2635*f5c631daSSadaf Ebrahimi   __ Frint64x(d7, d20);
2636*f5c631daSSadaf Ebrahimi   __ Frint64x(d8, d21);
2637*f5c631daSSadaf Ebrahimi   __ Frint64x(d9, d22);
2638*f5c631daSSadaf Ebrahimi   __ Frint64x(d10, d23);
2639*f5c631daSSadaf Ebrahimi   __ Frint64x(d11, d24);
2640*f5c631daSSadaf Ebrahimi   __ Frint64x(d12, d25);
2641*f5c631daSSadaf Ebrahimi   __ Frint64x(d13, d26);
2642*f5c631daSSadaf Ebrahimi   __ Frint64x(d14, d27);
2643*f5c631daSSadaf Ebrahimi   __ Frint64x(d15, d28);
2644*f5c631daSSadaf Ebrahimi   __ Frint64x(d16, d29);
2645*f5c631daSSadaf Ebrahimi   __ Frint64x(d17, d30);
2646*f5c631daSSadaf Ebrahimi   __ Frint64x(d18, d31);
2647*f5c631daSSadaf Ebrahimi 
2648*f5c631daSSadaf Ebrahimi   END();
2649*f5c631daSSadaf Ebrahimi 
2650*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2651*f5c631daSSadaf Ebrahimi     RUN();
2652*f5c631daSSadaf Ebrahimi 
2653*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d0);
2654*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d1);
2655*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d2);
2656*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d3);
2657*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d4);
2658*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d5);
2659*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d6);
2660*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d7);
2661*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d8);
2662*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d9);
2663*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d10);
2664*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d11);
2665*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d12);  // NaN.
2666*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d13);
2667*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN + 0x400, d14);
2668*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d15);  // Out of range.
2669*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0x7fffffff'fffffc00, d16);
2670*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0, d17);
2671*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d18);
2672*f5c631daSSadaf Ebrahimi   }
2673*f5c631daSSadaf Ebrahimi }
2674*f5c631daSSadaf Ebrahimi 
TEST(frint64z_s)2675*f5c631daSSadaf Ebrahimi TEST(frint64z_s) {
2676*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2677*f5c631daSSadaf Ebrahimi 
2678*f5c631daSSadaf Ebrahimi   START();
2679*f5c631daSSadaf Ebrahimi 
2680*f5c631daSSadaf Ebrahimi   __ Fmov(s13, 1.0);
2681*f5c631daSSadaf Ebrahimi   __ Fmov(s14, 1.1);
2682*f5c631daSSadaf Ebrahimi   __ Fmov(s15, 1.5);
2683*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.9);
2684*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 2.5);
2685*f5c631daSSadaf Ebrahimi   __ Fmov(s18, -1.5);
2686*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -2.5);
2687*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP64PositiveInfinity);
2688*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP64NegativeInfinity);
2689*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0.0);
2690*f5c631daSSadaf Ebrahimi   __ Fmov(s23, -0.0);
2691*f5c631daSSadaf Ebrahimi   __ Fmov(s24, -0.2);
2692*f5c631daSSadaf Ebrahimi   __ Fmov(s25, kFP64DefaultNaN);
2693*f5c631daSSadaf Ebrahimi   __ Fmov(s26, INT64_MIN);
2694*f5c631daSSadaf Ebrahimi   __ Fmov(s27, INT64_MIN + 0x80'00000000);  // The next representable FP32.
2695*f5c631daSSadaf Ebrahimi   __ Fmov(s28, 0x80000000'00000000);
2696*f5c631daSSadaf Ebrahimi   // The largest int64_t representable as FP32.
2697*f5c631daSSadaf Ebrahimi   __ Fmov(s29, 0x7fffff80'00000000);
2698*f5c631daSSadaf Ebrahimi   __ Fmov(s30, FLT_MIN);
2699*f5c631daSSadaf Ebrahimi   __ Fmov(s31, FLT_MAX);
2700*f5c631daSSadaf Ebrahimi 
2701*f5c631daSSadaf Ebrahimi   __ Frint64z(s0, s13);
2702*f5c631daSSadaf Ebrahimi   __ Frint64z(s1, s14);
2703*f5c631daSSadaf Ebrahimi   __ Frint64z(s2, s15);
2704*f5c631daSSadaf Ebrahimi   __ Frint64z(s3, s16);
2705*f5c631daSSadaf Ebrahimi   __ Frint64z(s4, s17);
2706*f5c631daSSadaf Ebrahimi   __ Frint64z(s5, s18);
2707*f5c631daSSadaf Ebrahimi   __ Frint64z(s6, s19);
2708*f5c631daSSadaf Ebrahimi   __ Frint64z(s7, s20);
2709*f5c631daSSadaf Ebrahimi   __ Frint64z(s8, s21);
2710*f5c631daSSadaf Ebrahimi   __ Frint64z(s9, s22);
2711*f5c631daSSadaf Ebrahimi   __ Frint64z(s10, s23);
2712*f5c631daSSadaf Ebrahimi   __ Frint64z(s11, s24);
2713*f5c631daSSadaf Ebrahimi   __ Frint64z(s12, s25);
2714*f5c631daSSadaf Ebrahimi   __ Frint64z(s13, s26);
2715*f5c631daSSadaf Ebrahimi   __ Frint64z(s14, s27);
2716*f5c631daSSadaf Ebrahimi   __ Frint64z(s15, s28);
2717*f5c631daSSadaf Ebrahimi   __ Frint64z(s16, s29);
2718*f5c631daSSadaf Ebrahimi   __ Frint64z(s17, s30);
2719*f5c631daSSadaf Ebrahimi   __ Frint64z(s18, s31);
2720*f5c631daSSadaf Ebrahimi 
2721*f5c631daSSadaf Ebrahimi   END();
2722*f5c631daSSadaf Ebrahimi 
2723*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2724*f5c631daSSadaf Ebrahimi     RUN();
2725*f5c631daSSadaf Ebrahimi 
2726*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2727*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2728*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s2);
2729*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s3);
2730*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
2731*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s5);
2732*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
2733*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s7);
2734*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s8);
2735*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
2736*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
2737*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
2738*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s12);  // Nan.
2739*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s13);
2740*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN + 0x80'00000000, s14);
2741*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s15);  // Out of range.
2742*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0x7fffff80'00000000, s16);
2743*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0, s17);
2744*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(INT64_MIN, s18);
2745*f5c631daSSadaf Ebrahimi   }
2746*f5c631daSSadaf Ebrahimi }
2747*f5c631daSSadaf Ebrahimi 
TEST(frint64z_d)2748*f5c631daSSadaf Ebrahimi TEST(frint64z_d) {
2749*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFrintToFixedSizedInt);
2750*f5c631daSSadaf Ebrahimi 
2751*f5c631daSSadaf Ebrahimi   START();
2752*f5c631daSSadaf Ebrahimi 
2753*f5c631daSSadaf Ebrahimi   __ Fmov(d13, 1.0);
2754*f5c631daSSadaf Ebrahimi   __ Fmov(d14, 1.1);
2755*f5c631daSSadaf Ebrahimi   __ Fmov(d15, 1.5);
2756*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.9);
2757*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 2.5);
2758*f5c631daSSadaf Ebrahimi   __ Fmov(d18, -1.5);
2759*f5c631daSSadaf Ebrahimi   __ Fmov(d19, -2.5);
2760*f5c631daSSadaf Ebrahimi   __ Fmov(d20, kFP64PositiveInfinity);
2761*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64NegativeInfinity);
2762*f5c631daSSadaf Ebrahimi   __ Fmov(d22, 0.0);
2763*f5c631daSSadaf Ebrahimi   __ Fmov(d23, -0.0);
2764*f5c631daSSadaf Ebrahimi   __ Fmov(d24, -0.2);
2765*f5c631daSSadaf Ebrahimi   __ Fmov(d25, kFP64DefaultNaN);
2766*f5c631daSSadaf Ebrahimi   __ Fmov(d26, INT64_MIN);
2767*f5c631daSSadaf Ebrahimi   __ Fmov(d27, INT64_MIN + 0x400);  // The next representable FP64.
2768*f5c631daSSadaf Ebrahimi   __ Fmov(d28, 0x80000000'00000000);
2769*f5c631daSSadaf Ebrahimi   // The largest int64_t representable as FP64.
2770*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7fffffff'fffffc00);
2771*f5c631daSSadaf Ebrahimi   __ Fmov(d30, FLT_MIN);
2772*f5c631daSSadaf Ebrahimi   __ Fmov(d31, FLT_MAX);
2773*f5c631daSSadaf Ebrahimi 
2774*f5c631daSSadaf Ebrahimi   __ Frint64z(d0, d13);
2775*f5c631daSSadaf Ebrahimi   __ Frint64z(d1, d14);
2776*f5c631daSSadaf Ebrahimi   __ Frint64z(d2, d15);
2777*f5c631daSSadaf Ebrahimi   __ Frint64z(d3, d16);
2778*f5c631daSSadaf Ebrahimi   __ Frint64z(d4, d17);
2779*f5c631daSSadaf Ebrahimi   __ Frint64z(d5, d18);
2780*f5c631daSSadaf Ebrahimi   __ Frint64z(d6, d19);
2781*f5c631daSSadaf Ebrahimi   __ Frint64z(d7, d20);
2782*f5c631daSSadaf Ebrahimi   __ Frint64z(d8, d21);
2783*f5c631daSSadaf Ebrahimi   __ Frint64z(d9, d22);
2784*f5c631daSSadaf Ebrahimi   __ Frint64z(d10, d23);
2785*f5c631daSSadaf Ebrahimi   __ Frint64z(d11, d24);
2786*f5c631daSSadaf Ebrahimi   __ Frint64z(d12, d25);
2787*f5c631daSSadaf Ebrahimi   __ Frint64z(d13, d26);
2788*f5c631daSSadaf Ebrahimi   __ Frint64z(d14, d27);
2789*f5c631daSSadaf Ebrahimi   __ Frint64z(d15, d28);
2790*f5c631daSSadaf Ebrahimi   __ Frint64z(d16, d29);
2791*f5c631daSSadaf Ebrahimi   __ Frint64z(d17, d30);
2792*f5c631daSSadaf Ebrahimi   __ Frint64z(d18, d31);
2793*f5c631daSSadaf Ebrahimi 
2794*f5c631daSSadaf Ebrahimi   END();
2795*f5c631daSSadaf Ebrahimi 
2796*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2797*f5c631daSSadaf Ebrahimi     RUN();
2798*f5c631daSSadaf Ebrahimi 
2799*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d0);
2800*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d1);
2801*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d2);
2802*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d3);
2803*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d4);
2804*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.0, d5);
2805*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d6);
2806*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d7);
2807*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d8);
2808*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d9);
2809*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d10);
2810*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d11);
2811*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d12);  // NaN.
2812*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d13);
2813*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN + 0x400, d14);
2814*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d15);  // Out of range.
2815*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0x7fffffff'fffffc00, d16);
2816*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0, d17);
2817*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(INT64_MIN, d18);
2818*f5c631daSSadaf Ebrahimi   }
2819*f5c631daSSadaf Ebrahimi }
2820*f5c631daSSadaf Ebrahimi 
TEST(frinta)2821*f5c631daSSadaf Ebrahimi TEST(frinta) {
2822*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
2823*f5c631daSSadaf Ebrahimi 
2824*f5c631daSSadaf Ebrahimi   START();
2825*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
2826*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
2827*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
2828*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
2829*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
2830*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
2831*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
2832*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
2833*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
2834*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
2835*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
2836*f5c631daSSadaf Ebrahimi   __ Fmov(s27, -0.2);
2837*f5c631daSSadaf Ebrahimi 
2838*f5c631daSSadaf Ebrahimi   __ Frinta(s0, s16);
2839*f5c631daSSadaf Ebrahimi   __ Frinta(s1, s17);
2840*f5c631daSSadaf Ebrahimi   __ Frinta(s2, s18);
2841*f5c631daSSadaf Ebrahimi   __ Frinta(s3, s19);
2842*f5c631daSSadaf Ebrahimi   __ Frinta(s4, s20);
2843*f5c631daSSadaf Ebrahimi   __ Frinta(s5, s21);
2844*f5c631daSSadaf Ebrahimi   __ Frinta(s6, s22);
2845*f5c631daSSadaf Ebrahimi   __ Frinta(s7, s23);
2846*f5c631daSSadaf Ebrahimi   __ Frinta(s8, s24);
2847*f5c631daSSadaf Ebrahimi   __ Frinta(s9, s25);
2848*f5c631daSSadaf Ebrahimi   __ Frinta(s10, s26);
2849*f5c631daSSadaf Ebrahimi   __ Frinta(s11, s27);
2850*f5c631daSSadaf Ebrahimi 
2851*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
2852*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
2853*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
2854*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
2855*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
2856*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
2857*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
2858*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
2859*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
2860*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
2861*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
2862*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.2);
2863*f5c631daSSadaf Ebrahimi 
2864*f5c631daSSadaf Ebrahimi   __ Frinta(d12, d16);
2865*f5c631daSSadaf Ebrahimi   __ Frinta(d13, d17);
2866*f5c631daSSadaf Ebrahimi   __ Frinta(d14, d18);
2867*f5c631daSSadaf Ebrahimi   __ Frinta(d15, d19);
2868*f5c631daSSadaf Ebrahimi   __ Frinta(d16, d20);
2869*f5c631daSSadaf Ebrahimi   __ Frinta(d17, d21);
2870*f5c631daSSadaf Ebrahimi   __ Frinta(d18, d22);
2871*f5c631daSSadaf Ebrahimi   __ Frinta(d19, d23);
2872*f5c631daSSadaf Ebrahimi   __ Frinta(d20, d24);
2873*f5c631daSSadaf Ebrahimi   __ Frinta(d21, d25);
2874*f5c631daSSadaf Ebrahimi   __ Frinta(d22, d26);
2875*f5c631daSSadaf Ebrahimi   __ Frinta(d23, d27);
2876*f5c631daSSadaf Ebrahimi   END();
2877*f5c631daSSadaf Ebrahimi 
2878*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2879*f5c631daSSadaf Ebrahimi     RUN();
2880*f5c631daSSadaf Ebrahimi 
2881*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2882*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2883*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
2884*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
2885*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.0, s4);
2886*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
2887*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-3.0, s6);
2888*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
2889*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
2890*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
2891*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
2892*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
2893*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
2894*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d13);
2895*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d14);
2896*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d15);
2897*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.0, d16);
2898*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d17);
2899*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-3.0, d18);
2900*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19);
2901*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20);
2902*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d21);
2903*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d22);
2904*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d23);
2905*f5c631daSSadaf Ebrahimi   }
2906*f5c631daSSadaf Ebrahimi }
2907*f5c631daSSadaf Ebrahimi 
2908*f5c631daSSadaf Ebrahimi 
TEST(frinti)2909*f5c631daSSadaf Ebrahimi TEST(frinti) {
2910*f5c631daSSadaf Ebrahimi   // VIXL only supports the round-to-nearest FPCR mode, so this test has the
2911*f5c631daSSadaf Ebrahimi   // same results as frintn.
2912*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
2913*f5c631daSSadaf Ebrahimi 
2914*f5c631daSSadaf Ebrahimi   START();
2915*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
2916*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
2917*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
2918*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
2919*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
2920*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
2921*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
2922*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
2923*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
2924*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
2925*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
2926*f5c631daSSadaf Ebrahimi   __ Fmov(s27, -0.2);
2927*f5c631daSSadaf Ebrahimi 
2928*f5c631daSSadaf Ebrahimi   __ Frinti(s0, s16);
2929*f5c631daSSadaf Ebrahimi   __ Frinti(s1, s17);
2930*f5c631daSSadaf Ebrahimi   __ Frinti(s2, s18);
2931*f5c631daSSadaf Ebrahimi   __ Frinti(s3, s19);
2932*f5c631daSSadaf Ebrahimi   __ Frinti(s4, s20);
2933*f5c631daSSadaf Ebrahimi   __ Frinti(s5, s21);
2934*f5c631daSSadaf Ebrahimi   __ Frinti(s6, s22);
2935*f5c631daSSadaf Ebrahimi   __ Frinti(s7, s23);
2936*f5c631daSSadaf Ebrahimi   __ Frinti(s8, s24);
2937*f5c631daSSadaf Ebrahimi   __ Frinti(s9, s25);
2938*f5c631daSSadaf Ebrahimi   __ Frinti(s10, s26);
2939*f5c631daSSadaf Ebrahimi   __ Frinti(s11, s27);
2940*f5c631daSSadaf Ebrahimi 
2941*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
2942*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
2943*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
2944*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
2945*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
2946*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
2947*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
2948*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
2949*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
2950*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
2951*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
2952*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.2);
2953*f5c631daSSadaf Ebrahimi 
2954*f5c631daSSadaf Ebrahimi   __ Frinti(d12, d16);
2955*f5c631daSSadaf Ebrahimi   __ Frinti(d13, d17);
2956*f5c631daSSadaf Ebrahimi   __ Frinti(d14, d18);
2957*f5c631daSSadaf Ebrahimi   __ Frinti(d15, d19);
2958*f5c631daSSadaf Ebrahimi   __ Frinti(d16, d20);
2959*f5c631daSSadaf Ebrahimi   __ Frinti(d17, d21);
2960*f5c631daSSadaf Ebrahimi   __ Frinti(d18, d22);
2961*f5c631daSSadaf Ebrahimi   __ Frinti(d19, d23);
2962*f5c631daSSadaf Ebrahimi   __ Frinti(d20, d24);
2963*f5c631daSSadaf Ebrahimi   __ Frinti(d21, d25);
2964*f5c631daSSadaf Ebrahimi   __ Frinti(d22, d26);
2965*f5c631daSSadaf Ebrahimi   __ Frinti(d23, d27);
2966*f5c631daSSadaf Ebrahimi   END();
2967*f5c631daSSadaf Ebrahimi 
2968*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
2969*f5c631daSSadaf Ebrahimi     RUN();
2970*f5c631daSSadaf Ebrahimi 
2971*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
2972*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
2973*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
2974*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
2975*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
2976*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
2977*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
2978*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
2979*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
2980*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
2981*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
2982*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
2983*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
2984*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d13);
2985*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d14);
2986*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d15);
2987*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d16);
2988*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d17);
2989*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d18);
2990*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19);
2991*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20);
2992*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d21);
2993*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d22);
2994*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d23);
2995*f5c631daSSadaf Ebrahimi   }
2996*f5c631daSSadaf Ebrahimi }
2997*f5c631daSSadaf Ebrahimi 
2998*f5c631daSSadaf Ebrahimi 
TEST(frintm)2999*f5c631daSSadaf Ebrahimi TEST(frintm) {
3000*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3001*f5c631daSSadaf Ebrahimi 
3002*f5c631daSSadaf Ebrahimi   START();
3003*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3004*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3005*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3006*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
3007*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
3008*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
3009*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
3010*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
3011*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
3012*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
3013*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
3014*f5c631daSSadaf Ebrahimi   __ Fmov(s27, -0.2);
3015*f5c631daSSadaf Ebrahimi 
3016*f5c631daSSadaf Ebrahimi   __ Frintm(s0, s16);
3017*f5c631daSSadaf Ebrahimi   __ Frintm(s1, s17);
3018*f5c631daSSadaf Ebrahimi   __ Frintm(s2, s18);
3019*f5c631daSSadaf Ebrahimi   __ Frintm(s3, s19);
3020*f5c631daSSadaf Ebrahimi   __ Frintm(s4, s20);
3021*f5c631daSSadaf Ebrahimi   __ Frintm(s5, s21);
3022*f5c631daSSadaf Ebrahimi   __ Frintm(s6, s22);
3023*f5c631daSSadaf Ebrahimi   __ Frintm(s7, s23);
3024*f5c631daSSadaf Ebrahimi   __ Frintm(s8, s24);
3025*f5c631daSSadaf Ebrahimi   __ Frintm(s9, s25);
3026*f5c631daSSadaf Ebrahimi   __ Frintm(s10, s26);
3027*f5c631daSSadaf Ebrahimi   __ Frintm(s11, s27);
3028*f5c631daSSadaf Ebrahimi 
3029*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
3030*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
3031*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
3032*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
3033*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
3034*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
3035*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
3036*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
3037*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
3038*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
3039*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
3040*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.2);
3041*f5c631daSSadaf Ebrahimi 
3042*f5c631daSSadaf Ebrahimi   __ Frintm(d12, d16);
3043*f5c631daSSadaf Ebrahimi   __ Frintm(d13, d17);
3044*f5c631daSSadaf Ebrahimi   __ Frintm(d14, d18);
3045*f5c631daSSadaf Ebrahimi   __ Frintm(d15, d19);
3046*f5c631daSSadaf Ebrahimi   __ Frintm(d16, d20);
3047*f5c631daSSadaf Ebrahimi   __ Frintm(d17, d21);
3048*f5c631daSSadaf Ebrahimi   __ Frintm(d18, d22);
3049*f5c631daSSadaf Ebrahimi   __ Frintm(d19, d23);
3050*f5c631daSSadaf Ebrahimi   __ Frintm(d20, d24);
3051*f5c631daSSadaf Ebrahimi   __ Frintm(d21, d25);
3052*f5c631daSSadaf Ebrahimi   __ Frintm(d22, d26);
3053*f5c631daSSadaf Ebrahimi   __ Frintm(d23, d27);
3054*f5c631daSSadaf Ebrahimi   END();
3055*f5c631daSSadaf Ebrahimi 
3056*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3057*f5c631daSSadaf Ebrahimi     RUN();
3058*f5c631daSSadaf Ebrahimi 
3059*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
3060*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
3061*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s2);
3062*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s3);
3063*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
3064*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
3065*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-3.0, s6);
3066*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
3067*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
3068*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
3069*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
3070*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s11);
3071*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
3072*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d13);
3073*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d14);
3074*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d15);
3075*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d16);
3076*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d17);
3077*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-3.0, d18);
3078*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19);
3079*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20);
3080*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d21);
3081*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d22);
3082*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.0, d23);
3083*f5c631daSSadaf Ebrahimi   }
3084*f5c631daSSadaf Ebrahimi }
3085*f5c631daSSadaf Ebrahimi 
3086*f5c631daSSadaf Ebrahimi 
TEST(frintn)3087*f5c631daSSadaf Ebrahimi TEST(frintn) {
3088*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3089*f5c631daSSadaf Ebrahimi 
3090*f5c631daSSadaf Ebrahimi   START();
3091*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3092*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3093*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3094*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
3095*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
3096*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
3097*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
3098*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
3099*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
3100*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
3101*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
3102*f5c631daSSadaf Ebrahimi   __ Fmov(s27, -0.2);
3103*f5c631daSSadaf Ebrahimi 
3104*f5c631daSSadaf Ebrahimi   __ Frintn(s0, s16);
3105*f5c631daSSadaf Ebrahimi   __ Frintn(s1, s17);
3106*f5c631daSSadaf Ebrahimi   __ Frintn(s2, s18);
3107*f5c631daSSadaf Ebrahimi   __ Frintn(s3, s19);
3108*f5c631daSSadaf Ebrahimi   __ Frintn(s4, s20);
3109*f5c631daSSadaf Ebrahimi   __ Frintn(s5, s21);
3110*f5c631daSSadaf Ebrahimi   __ Frintn(s6, s22);
3111*f5c631daSSadaf Ebrahimi   __ Frintn(s7, s23);
3112*f5c631daSSadaf Ebrahimi   __ Frintn(s8, s24);
3113*f5c631daSSadaf Ebrahimi   __ Frintn(s9, s25);
3114*f5c631daSSadaf Ebrahimi   __ Frintn(s10, s26);
3115*f5c631daSSadaf Ebrahimi   __ Frintn(s11, s27);
3116*f5c631daSSadaf Ebrahimi 
3117*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
3118*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
3119*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
3120*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
3121*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
3122*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
3123*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
3124*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
3125*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
3126*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
3127*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
3128*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.2);
3129*f5c631daSSadaf Ebrahimi 
3130*f5c631daSSadaf Ebrahimi   __ Frintn(d12, d16);
3131*f5c631daSSadaf Ebrahimi   __ Frintn(d13, d17);
3132*f5c631daSSadaf Ebrahimi   __ Frintn(d14, d18);
3133*f5c631daSSadaf Ebrahimi   __ Frintn(d15, d19);
3134*f5c631daSSadaf Ebrahimi   __ Frintn(d16, d20);
3135*f5c631daSSadaf Ebrahimi   __ Frintn(d17, d21);
3136*f5c631daSSadaf Ebrahimi   __ Frintn(d18, d22);
3137*f5c631daSSadaf Ebrahimi   __ Frintn(d19, d23);
3138*f5c631daSSadaf Ebrahimi   __ Frintn(d20, d24);
3139*f5c631daSSadaf Ebrahimi   __ Frintn(d21, d25);
3140*f5c631daSSadaf Ebrahimi   __ Frintn(d22, d26);
3141*f5c631daSSadaf Ebrahimi   __ Frintn(d23, d27);
3142*f5c631daSSadaf Ebrahimi   END();
3143*f5c631daSSadaf Ebrahimi 
3144*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3145*f5c631daSSadaf Ebrahimi     RUN();
3146*f5c631daSSadaf Ebrahimi 
3147*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
3148*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
3149*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
3150*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
3151*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
3152*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
3153*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
3154*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
3155*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
3156*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
3157*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
3158*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
3159*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
3160*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d13);
3161*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d14);
3162*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d15);
3163*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d16);
3164*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d17);
3165*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d18);
3166*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19);
3167*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20);
3168*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d21);
3169*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d22);
3170*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d23);
3171*f5c631daSSadaf Ebrahimi   }
3172*f5c631daSSadaf Ebrahimi }
3173*f5c631daSSadaf Ebrahimi 
3174*f5c631daSSadaf Ebrahimi 
TEST(frintp)3175*f5c631daSSadaf Ebrahimi TEST(frintp) {
3176*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3177*f5c631daSSadaf Ebrahimi 
3178*f5c631daSSadaf Ebrahimi   START();
3179*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3180*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3181*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3182*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
3183*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
3184*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
3185*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
3186*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
3187*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
3188*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
3189*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
3190*f5c631daSSadaf Ebrahimi   __ Fmov(s27, -0.2);
3191*f5c631daSSadaf Ebrahimi 
3192*f5c631daSSadaf Ebrahimi   __ Frintp(s0, s16);
3193*f5c631daSSadaf Ebrahimi   __ Frintp(s1, s17);
3194*f5c631daSSadaf Ebrahimi   __ Frintp(s2, s18);
3195*f5c631daSSadaf Ebrahimi   __ Frintp(s3, s19);
3196*f5c631daSSadaf Ebrahimi   __ Frintp(s4, s20);
3197*f5c631daSSadaf Ebrahimi   __ Frintp(s5, s21);
3198*f5c631daSSadaf Ebrahimi   __ Frintp(s6, s22);
3199*f5c631daSSadaf Ebrahimi   __ Frintp(s7, s23);
3200*f5c631daSSadaf Ebrahimi   __ Frintp(s8, s24);
3201*f5c631daSSadaf Ebrahimi   __ Frintp(s9, s25);
3202*f5c631daSSadaf Ebrahimi   __ Frintp(s10, s26);
3203*f5c631daSSadaf Ebrahimi   __ Frintp(s11, s27);
3204*f5c631daSSadaf Ebrahimi 
3205*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
3206*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
3207*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
3208*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
3209*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
3210*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
3211*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
3212*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
3213*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
3214*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
3215*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
3216*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.2);
3217*f5c631daSSadaf Ebrahimi 
3218*f5c631daSSadaf Ebrahimi   __ Frintp(d12, d16);
3219*f5c631daSSadaf Ebrahimi   __ Frintp(d13, d17);
3220*f5c631daSSadaf Ebrahimi   __ Frintp(d14, d18);
3221*f5c631daSSadaf Ebrahimi   __ Frintp(d15, d19);
3222*f5c631daSSadaf Ebrahimi   __ Frintp(d16, d20);
3223*f5c631daSSadaf Ebrahimi   __ Frintp(d17, d21);
3224*f5c631daSSadaf Ebrahimi   __ Frintp(d18, d22);
3225*f5c631daSSadaf Ebrahimi   __ Frintp(d19, d23);
3226*f5c631daSSadaf Ebrahimi   __ Frintp(d20, d24);
3227*f5c631daSSadaf Ebrahimi   __ Frintp(d21, d25);
3228*f5c631daSSadaf Ebrahimi   __ Frintp(d22, d26);
3229*f5c631daSSadaf Ebrahimi   __ Frintp(d23, d27);
3230*f5c631daSSadaf Ebrahimi   END();
3231*f5c631daSSadaf Ebrahimi 
3232*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3233*f5c631daSSadaf Ebrahimi     RUN();
3234*f5c631daSSadaf Ebrahimi 
3235*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
3236*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s1);
3237*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
3238*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
3239*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(3.0, s4);
3240*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s5);
3241*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
3242*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
3243*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
3244*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
3245*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
3246*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
3247*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
3248*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d13);
3249*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d14);
3250*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d15);
3251*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(3.0, d16);
3252*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.0, d17);
3253*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d18);
3254*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19);
3255*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20);
3256*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d21);
3257*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d22);
3258*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d23);
3259*f5c631daSSadaf Ebrahimi   }
3260*f5c631daSSadaf Ebrahimi }
3261*f5c631daSSadaf Ebrahimi 
3262*f5c631daSSadaf Ebrahimi 
TEST(frintx)3263*f5c631daSSadaf Ebrahimi TEST(frintx) {
3264*f5c631daSSadaf Ebrahimi   // VIXL only supports the round-to-nearest FPCR mode, and it doesn't support
3265*f5c631daSSadaf Ebrahimi   // FP exceptions, so this test has the same results as frintn (and frinti).
3266*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3267*f5c631daSSadaf Ebrahimi 
3268*f5c631daSSadaf Ebrahimi   START();
3269*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3270*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3271*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3272*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
3273*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
3274*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
3275*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
3276*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
3277*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
3278*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
3279*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
3280*f5c631daSSadaf Ebrahimi   __ Fmov(s27, -0.2);
3281*f5c631daSSadaf Ebrahimi 
3282*f5c631daSSadaf Ebrahimi   __ Frintx(s0, s16);
3283*f5c631daSSadaf Ebrahimi   __ Frintx(s1, s17);
3284*f5c631daSSadaf Ebrahimi   __ Frintx(s2, s18);
3285*f5c631daSSadaf Ebrahimi   __ Frintx(s3, s19);
3286*f5c631daSSadaf Ebrahimi   __ Frintx(s4, s20);
3287*f5c631daSSadaf Ebrahimi   __ Frintx(s5, s21);
3288*f5c631daSSadaf Ebrahimi   __ Frintx(s6, s22);
3289*f5c631daSSadaf Ebrahimi   __ Frintx(s7, s23);
3290*f5c631daSSadaf Ebrahimi   __ Frintx(s8, s24);
3291*f5c631daSSadaf Ebrahimi   __ Frintx(s9, s25);
3292*f5c631daSSadaf Ebrahimi   __ Frintx(s10, s26);
3293*f5c631daSSadaf Ebrahimi   __ Frintx(s11, s27);
3294*f5c631daSSadaf Ebrahimi 
3295*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
3296*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
3297*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
3298*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
3299*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
3300*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
3301*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
3302*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
3303*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
3304*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
3305*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
3306*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.2);
3307*f5c631daSSadaf Ebrahimi 
3308*f5c631daSSadaf Ebrahimi   __ Frintx(d12, d16);
3309*f5c631daSSadaf Ebrahimi   __ Frintx(d13, d17);
3310*f5c631daSSadaf Ebrahimi   __ Frintx(d14, d18);
3311*f5c631daSSadaf Ebrahimi   __ Frintx(d15, d19);
3312*f5c631daSSadaf Ebrahimi   __ Frintx(d16, d20);
3313*f5c631daSSadaf Ebrahimi   __ Frintx(d17, d21);
3314*f5c631daSSadaf Ebrahimi   __ Frintx(d18, d22);
3315*f5c631daSSadaf Ebrahimi   __ Frintx(d19, d23);
3316*f5c631daSSadaf Ebrahimi   __ Frintx(d20, d24);
3317*f5c631daSSadaf Ebrahimi   __ Frintx(d21, d25);
3318*f5c631daSSadaf Ebrahimi   __ Frintx(d22, d26);
3319*f5c631daSSadaf Ebrahimi   __ Frintx(d23, d27);
3320*f5c631daSSadaf Ebrahimi   END();
3321*f5c631daSSadaf Ebrahimi 
3322*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3323*f5c631daSSadaf Ebrahimi     RUN();
3324*f5c631daSSadaf Ebrahimi 
3325*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
3326*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
3327*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s2);
3328*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s3);
3329*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
3330*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s5);
3331*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
3332*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
3333*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
3334*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
3335*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
3336*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s11);
3337*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
3338*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d13);
3339*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d14);
3340*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d15);
3341*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d16);
3342*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d17);
3343*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d18);
3344*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19);
3345*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20);
3346*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d21);
3347*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d22);
3348*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d23);
3349*f5c631daSSadaf Ebrahimi   }
3350*f5c631daSSadaf Ebrahimi }
3351*f5c631daSSadaf Ebrahimi 
3352*f5c631daSSadaf Ebrahimi 
TEST(frintz)3353*f5c631daSSadaf Ebrahimi TEST(frintz) {
3354*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3355*f5c631daSSadaf Ebrahimi 
3356*f5c631daSSadaf Ebrahimi   START();
3357*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3358*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3359*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3360*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
3361*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
3362*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
3363*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
3364*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
3365*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
3366*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
3367*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
3368*f5c631daSSadaf Ebrahimi 
3369*f5c631daSSadaf Ebrahimi   __ Frintz(s0, s16);
3370*f5c631daSSadaf Ebrahimi   __ Frintz(s1, s17);
3371*f5c631daSSadaf Ebrahimi   __ Frintz(s2, s18);
3372*f5c631daSSadaf Ebrahimi   __ Frintz(s3, s19);
3373*f5c631daSSadaf Ebrahimi   __ Frintz(s4, s20);
3374*f5c631daSSadaf Ebrahimi   __ Frintz(s5, s21);
3375*f5c631daSSadaf Ebrahimi   __ Frintz(s6, s22);
3376*f5c631daSSadaf Ebrahimi   __ Frintz(s7, s23);
3377*f5c631daSSadaf Ebrahimi   __ Frintz(s8, s24);
3378*f5c631daSSadaf Ebrahimi   __ Frintz(s9, s25);
3379*f5c631daSSadaf Ebrahimi   __ Frintz(s10, s26);
3380*f5c631daSSadaf Ebrahimi 
3381*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
3382*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
3383*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
3384*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
3385*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
3386*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
3387*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
3388*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
3389*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
3390*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
3391*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
3392*f5c631daSSadaf Ebrahimi 
3393*f5c631daSSadaf Ebrahimi   __ Frintz(d11, d16);
3394*f5c631daSSadaf Ebrahimi   __ Frintz(d12, d17);
3395*f5c631daSSadaf Ebrahimi   __ Frintz(d13, d18);
3396*f5c631daSSadaf Ebrahimi   __ Frintz(d14, d19);
3397*f5c631daSSadaf Ebrahimi   __ Frintz(d15, d20);
3398*f5c631daSSadaf Ebrahimi   __ Frintz(d16, d21);
3399*f5c631daSSadaf Ebrahimi   __ Frintz(d17, d22);
3400*f5c631daSSadaf Ebrahimi   __ Frintz(d18, d23);
3401*f5c631daSSadaf Ebrahimi   __ Frintz(d19, d24);
3402*f5c631daSSadaf Ebrahimi   __ Frintz(d20, d25);
3403*f5c631daSSadaf Ebrahimi   __ Frintz(d21, d26);
3404*f5c631daSSadaf Ebrahimi   END();
3405*f5c631daSSadaf Ebrahimi 
3406*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3407*f5c631daSSadaf Ebrahimi     RUN();
3408*f5c631daSSadaf Ebrahimi 
3409*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s0);
3410*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s1);
3411*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s2);
3412*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0, s3);
3413*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.0, s4);
3414*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.0, s5);
3415*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.0, s6);
3416*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
3417*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
3418*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0, s9);
3419*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0, s10);
3420*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d11);
3421*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d12);
3422*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d13);
3423*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0, d14);
3424*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.0, d15);
3425*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.0, d16);
3426*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.0, d17);
3427*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d18);
3428*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d19);
3429*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0, d20);
3430*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0, d21);
3431*f5c631daSSadaf Ebrahimi   }
3432*f5c631daSSadaf Ebrahimi }
3433*f5c631daSSadaf Ebrahimi 
3434*f5c631daSSadaf Ebrahimi 
TEST(fcvt_ds)3435*f5c631daSSadaf Ebrahimi TEST(fcvt_ds) {
3436*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3437*f5c631daSSadaf Ebrahimi 
3438*f5c631daSSadaf Ebrahimi   START();
3439*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3440*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3441*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3442*f5c631daSSadaf Ebrahimi   __ Fmov(s19, 1.9);
3443*f5c631daSSadaf Ebrahimi   __ Fmov(s20, 2.5);
3444*f5c631daSSadaf Ebrahimi   __ Fmov(s21, -1.5);
3445*f5c631daSSadaf Ebrahimi   __ Fmov(s22, -2.5);
3446*f5c631daSSadaf Ebrahimi   __ Fmov(s23, kFP32PositiveInfinity);
3447*f5c631daSSadaf Ebrahimi   __ Fmov(s24, kFP32NegativeInfinity);
3448*f5c631daSSadaf Ebrahimi   __ Fmov(s25, 0.0);
3449*f5c631daSSadaf Ebrahimi   __ Fmov(s26, -0.0);
3450*f5c631daSSadaf Ebrahimi   __ Fmov(s27, FLT_MAX);
3451*f5c631daSSadaf Ebrahimi   __ Fmov(s28, FLT_MIN);
3452*f5c631daSSadaf Ebrahimi   __ Fmov(s29, RawbitsToFloat(0x7fc12345));  // Quiet NaN.
3453*f5c631daSSadaf Ebrahimi   __ Fmov(s30, RawbitsToFloat(0x7f812345));  // Signalling NaN.
3454*f5c631daSSadaf Ebrahimi 
3455*f5c631daSSadaf Ebrahimi   __ Fcvt(d0, s16);
3456*f5c631daSSadaf Ebrahimi   __ Fcvt(d1, s17);
3457*f5c631daSSadaf Ebrahimi   __ Fcvt(d2, s18);
3458*f5c631daSSadaf Ebrahimi   __ Fcvt(d3, s19);
3459*f5c631daSSadaf Ebrahimi   __ Fcvt(d4, s20);
3460*f5c631daSSadaf Ebrahimi   __ Fcvt(d5, s21);
3461*f5c631daSSadaf Ebrahimi   __ Fcvt(d6, s22);
3462*f5c631daSSadaf Ebrahimi   __ Fcvt(d7, s23);
3463*f5c631daSSadaf Ebrahimi   __ Fcvt(d8, s24);
3464*f5c631daSSadaf Ebrahimi   __ Fcvt(d9, s25);
3465*f5c631daSSadaf Ebrahimi   __ Fcvt(d10, s26);
3466*f5c631daSSadaf Ebrahimi   __ Fcvt(d11, s27);
3467*f5c631daSSadaf Ebrahimi   __ Fcvt(d12, s28);
3468*f5c631daSSadaf Ebrahimi   __ Fcvt(d13, s29);
3469*f5c631daSSadaf Ebrahimi   __ Fcvt(d14, s30);
3470*f5c631daSSadaf Ebrahimi   END();
3471*f5c631daSSadaf Ebrahimi 
3472*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3473*f5c631daSSadaf Ebrahimi     RUN();
3474*f5c631daSSadaf Ebrahimi 
3475*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.0f, d0);
3476*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.1f, d1);
3477*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.5f, d2);
3478*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(1.9f, d3);
3479*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(2.5f, d4);
3480*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-1.5f, d5);
3481*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-2.5f, d6);
3482*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d7);
3483*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d8);
3484*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(0.0f, d9);
3485*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(-0.0f, d10);
3486*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(FLT_MAX, d11);
3487*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(FLT_MIN, d12);
3488*f5c631daSSadaf Ebrahimi 
3489*f5c631daSSadaf Ebrahimi     // Check that the NaN payload is preserved according to Aarch64 conversion
3490*f5c631daSSadaf Ebrahimi     // rules:
3491*f5c631daSSadaf Ebrahimi     //  - The sign bit is preserved.
3492*f5c631daSSadaf Ebrahimi     //  - The top bit of the mantissa is forced to 1 (making it a quiet NaN).
3493*f5c631daSSadaf Ebrahimi     //  - The remaining mantissa bits are copied until they run out.
3494*f5c631daSSadaf Ebrahimi     //  - The low-order bits that haven't already been assigned are set to 0.
3495*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(RawbitsToDouble(0x7ff82468a0000000), d13);
3496*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(RawbitsToDouble(0x7ff82468a0000000), d14);
3497*f5c631daSSadaf Ebrahimi   }
3498*f5c631daSSadaf Ebrahimi }
3499*f5c631daSSadaf Ebrahimi 
3500*f5c631daSSadaf Ebrahimi 
TEST(fcvt_sd)3501*f5c631daSSadaf Ebrahimi TEST(fcvt_sd) {
3502*f5c631daSSadaf Ebrahimi   // Test simple conversions here. Complex behaviour (such as rounding
3503*f5c631daSSadaf Ebrahimi   // specifics) are tested in the simulator tests.
3504*f5c631daSSadaf Ebrahimi 
3505*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3506*f5c631daSSadaf Ebrahimi 
3507*f5c631daSSadaf Ebrahimi   START();
3508*f5c631daSSadaf Ebrahimi   __ Fmov(d16, 1.0);
3509*f5c631daSSadaf Ebrahimi   __ Fmov(d17, 1.1);
3510*f5c631daSSadaf Ebrahimi   __ Fmov(d18, 1.5);
3511*f5c631daSSadaf Ebrahimi   __ Fmov(d19, 1.9);
3512*f5c631daSSadaf Ebrahimi   __ Fmov(d20, 2.5);
3513*f5c631daSSadaf Ebrahimi   __ Fmov(d21, -1.5);
3514*f5c631daSSadaf Ebrahimi   __ Fmov(d22, -2.5);
3515*f5c631daSSadaf Ebrahimi   __ Fmov(d23, kFP32PositiveInfinity);
3516*f5c631daSSadaf Ebrahimi   __ Fmov(d24, kFP32NegativeInfinity);
3517*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 0.0);
3518*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -0.0);
3519*f5c631daSSadaf Ebrahimi   __ Fmov(d27, FLT_MAX);
3520*f5c631daSSadaf Ebrahimi   __ Fmov(d28, FLT_MIN);
3521*f5c631daSSadaf Ebrahimi   __ Fmov(d29, RawbitsToDouble(0x7ff82468a0000000));  // Quiet NaN.
3522*f5c631daSSadaf Ebrahimi   __ Fmov(d30, RawbitsToDouble(0x7ff02468a0000000));  // Signalling NaN.
3523*f5c631daSSadaf Ebrahimi 
3524*f5c631daSSadaf Ebrahimi   __ Fcvt(s0, d16);
3525*f5c631daSSadaf Ebrahimi   __ Fcvt(s1, d17);
3526*f5c631daSSadaf Ebrahimi   __ Fcvt(s2, d18);
3527*f5c631daSSadaf Ebrahimi   __ Fcvt(s3, d19);
3528*f5c631daSSadaf Ebrahimi   __ Fcvt(s4, d20);
3529*f5c631daSSadaf Ebrahimi   __ Fcvt(s5, d21);
3530*f5c631daSSadaf Ebrahimi   __ Fcvt(s6, d22);
3531*f5c631daSSadaf Ebrahimi   __ Fcvt(s7, d23);
3532*f5c631daSSadaf Ebrahimi   __ Fcvt(s8, d24);
3533*f5c631daSSadaf Ebrahimi   __ Fcvt(s9, d25);
3534*f5c631daSSadaf Ebrahimi   __ Fcvt(s10, d26);
3535*f5c631daSSadaf Ebrahimi   __ Fcvt(s11, d27);
3536*f5c631daSSadaf Ebrahimi   __ Fcvt(s12, d28);
3537*f5c631daSSadaf Ebrahimi   __ Fcvt(s13, d29);
3538*f5c631daSSadaf Ebrahimi   __ Fcvt(s14, d30);
3539*f5c631daSSadaf Ebrahimi   END();
3540*f5c631daSSadaf Ebrahimi 
3541*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3542*f5c631daSSadaf Ebrahimi     RUN();
3543*f5c631daSSadaf Ebrahimi 
3544*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.0f, s0);
3545*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.1f, s1);
3546*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.5f, s2);
3547*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(1.9f, s3);
3548*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(2.5f, s4);
3549*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-1.5f, s5);
3550*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-2.5f, s6);
3551*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7);
3552*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8);
3553*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(0.0f, s9);
3554*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(-0.0f, s10);
3555*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(FLT_MAX, s11);
3556*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(FLT_MIN, s12);
3557*f5c631daSSadaf Ebrahimi 
3558*f5c631daSSadaf Ebrahimi     // Check that the NaN payload is preserved according to Aarch64 conversion
3559*f5c631daSSadaf Ebrahimi     // rules:
3560*f5c631daSSadaf Ebrahimi     //  - The sign bit is preserved.
3561*f5c631daSSadaf Ebrahimi     //  - The top bit of the mantissa is forced to 1 (making it a quiet NaN).
3562*f5c631daSSadaf Ebrahimi     //  - The remaining mantissa bits are copied until they run out.
3563*f5c631daSSadaf Ebrahimi     //  - The low-order bits that haven't already been assigned are set to 0.
3564*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(0x7fc12345), s13);
3565*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(0x7fc12345), s14);
3566*f5c631daSSadaf Ebrahimi   }
3567*f5c631daSSadaf Ebrahimi }
3568*f5c631daSSadaf Ebrahimi 
3569*f5c631daSSadaf Ebrahimi 
TEST(fcvt_half)3570*f5c631daSSadaf Ebrahimi TEST(fcvt_half) {
3571*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3572*f5c631daSSadaf Ebrahimi 
3573*f5c631daSSadaf Ebrahimi   START();
3574*f5c631daSSadaf Ebrahimi   Label done;
3575*f5c631daSSadaf Ebrahimi   {
3576*f5c631daSSadaf Ebrahimi     // Check all exact conversions from half to float and back.
3577*f5c631daSSadaf Ebrahimi     Label ok, fail;
3578*f5c631daSSadaf Ebrahimi     __ Mov(w0, 0);
3579*f5c631daSSadaf Ebrahimi     for (int i = 0; i < 0xffff; i += 3) {
3580*f5c631daSSadaf Ebrahimi       if ((i & 0x7c00) == 0x7c00) continue;
3581*f5c631daSSadaf Ebrahimi       __ Mov(w1, i);
3582*f5c631daSSadaf Ebrahimi       __ Fmov(s1, w1);
3583*f5c631daSSadaf Ebrahimi       __ Fcvt(s2, h1);
3584*f5c631daSSadaf Ebrahimi       __ Fcvt(h2, s2);
3585*f5c631daSSadaf Ebrahimi       __ Fmov(w2, s2);
3586*f5c631daSSadaf Ebrahimi       __ Cmp(w1, w2);
3587*f5c631daSSadaf Ebrahimi       __ B(&fail, ne);
3588*f5c631daSSadaf Ebrahimi     }
3589*f5c631daSSadaf Ebrahimi     __ B(&ok);
3590*f5c631daSSadaf Ebrahimi     __ Bind(&fail);
3591*f5c631daSSadaf Ebrahimi     __ Mov(w0, 1);
3592*f5c631daSSadaf Ebrahimi     __ B(&done);
3593*f5c631daSSadaf Ebrahimi     __ Bind(&ok);
3594*f5c631daSSadaf Ebrahimi   }
3595*f5c631daSSadaf Ebrahimi   {
3596*f5c631daSSadaf Ebrahimi     // Check all exact conversions from half to double and back.
3597*f5c631daSSadaf Ebrahimi     Label ok, fail;
3598*f5c631daSSadaf Ebrahimi     for (int i = 0; i < 0xffff; i += 3) {
3599*f5c631daSSadaf Ebrahimi       if ((i & 0x7c00) == 0x7c00) continue;
3600*f5c631daSSadaf Ebrahimi       __ Mov(w1, i);
3601*f5c631daSSadaf Ebrahimi       __ Fmov(s1, w1);
3602*f5c631daSSadaf Ebrahimi       __ Fcvt(d2, h1);
3603*f5c631daSSadaf Ebrahimi       __ Fcvt(h2, d2);
3604*f5c631daSSadaf Ebrahimi       __ Fmov(w2, s2);
3605*f5c631daSSadaf Ebrahimi       __ Cmp(w1, w2);
3606*f5c631daSSadaf Ebrahimi       __ B(&fail, ne);
3607*f5c631daSSadaf Ebrahimi     }
3608*f5c631daSSadaf Ebrahimi     __ B(&ok);
3609*f5c631daSSadaf Ebrahimi     __ Bind(&fail);
3610*f5c631daSSadaf Ebrahimi     __ Mov(w0, 2);
3611*f5c631daSSadaf Ebrahimi     __ Bind(&ok);
3612*f5c631daSSadaf Ebrahimi   }
3613*f5c631daSSadaf Ebrahimi   __ Bind(&done);
3614*f5c631daSSadaf Ebrahimi 
3615*f5c631daSSadaf Ebrahimi   // Check some other interesting values.
3616*f5c631daSSadaf Ebrahimi   __ Fmov(s0, kFP32PositiveInfinity);
3617*f5c631daSSadaf Ebrahimi   __ Fmov(s1, kFP32NegativeInfinity);
3618*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 65504);       // Max half precision.
3619*f5c631daSSadaf Ebrahimi   __ Fmov(s3, 6.10352e-5);  // Min positive normal.
3620*f5c631daSSadaf Ebrahimi   __ Fmov(s4, 6.09756e-5);  // Max subnormal.
3621*f5c631daSSadaf Ebrahimi   __ Fmov(s5, 5.96046e-8);  // Min positive subnormal.
3622*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 5e-9);        // Not representable -> zero.
3623*f5c631daSSadaf Ebrahimi   __ Fmov(s7, -0.0);
3624*f5c631daSSadaf Ebrahimi   __ Fcvt(h0, s0);
3625*f5c631daSSadaf Ebrahimi   __ Fcvt(h1, s1);
3626*f5c631daSSadaf Ebrahimi   __ Fcvt(h2, s2);
3627*f5c631daSSadaf Ebrahimi   __ Fcvt(h3, s3);
3628*f5c631daSSadaf Ebrahimi   __ Fcvt(h4, s4);
3629*f5c631daSSadaf Ebrahimi   __ Fcvt(h5, s5);
3630*f5c631daSSadaf Ebrahimi   __ Fcvt(h6, s6);
3631*f5c631daSSadaf Ebrahimi   __ Fcvt(h7, s7);
3632*f5c631daSSadaf Ebrahimi 
3633*f5c631daSSadaf Ebrahimi   __ Fmov(d20, kFP64PositiveInfinity);
3634*f5c631daSSadaf Ebrahimi   __ Fmov(d21, kFP64NegativeInfinity);
3635*f5c631daSSadaf Ebrahimi   __ Fmov(d22, 65504);       // Max half precision.
3636*f5c631daSSadaf Ebrahimi   __ Fmov(d23, 6.10352e-5);  // Min positive normal.
3637*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 6.09756e-5);  // Max subnormal.
3638*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 5.96046e-8);  // Min positive subnormal.
3639*f5c631daSSadaf Ebrahimi   __ Fmov(d26, 5e-9);        // Not representable -> zero.
3640*f5c631daSSadaf Ebrahimi   __ Fmov(d27, -0.0);
3641*f5c631daSSadaf Ebrahimi   __ Fcvt(h20, d20);
3642*f5c631daSSadaf Ebrahimi   __ Fcvt(h21, d21);
3643*f5c631daSSadaf Ebrahimi   __ Fcvt(h22, d22);
3644*f5c631daSSadaf Ebrahimi   __ Fcvt(h23, d23);
3645*f5c631daSSadaf Ebrahimi   __ Fcvt(h24, d24);
3646*f5c631daSSadaf Ebrahimi   __ Fcvt(h25, d25);
3647*f5c631daSSadaf Ebrahimi   __ Fcvt(h26, d26);
3648*f5c631daSSadaf Ebrahimi   __ Fcvt(h27, d27);
3649*f5c631daSSadaf Ebrahimi   END();
3650*f5c631daSSadaf Ebrahimi 
3651*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3652*f5c631daSSadaf Ebrahimi     RUN();
3653*f5c631daSSadaf Ebrahimi 
3654*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(0, w0);  // 1 => float failed, 2 => double failed.
3655*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, Float16ToRawbits(kFP16PositiveInfinity), q0);
3656*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, Float16ToRawbits(kFP16NegativeInfinity), q1);
3657*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x7bff, q2);
3658*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x0400, q3);
3659*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x03ff, q4);
3660*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x0001, q5);
3661*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0, q6);
3662*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x8000, q7);
3663*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, Float16ToRawbits(kFP16PositiveInfinity), q20);
3664*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, Float16ToRawbits(kFP16NegativeInfinity), q21);
3665*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x7bff, q22);
3666*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x0400, q23);
3667*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x03ff, q24);
3668*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x0001, q25);
3669*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0, q26);
3670*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_128(0, 0x8000, q27);
3671*f5c631daSSadaf Ebrahimi   }
3672*f5c631daSSadaf Ebrahimi }
3673*f5c631daSSadaf Ebrahimi 
3674*f5c631daSSadaf Ebrahimi 
TEST(fcvtas)3675*f5c631daSSadaf Ebrahimi TEST(fcvtas) {
3676*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3677*f5c631daSSadaf Ebrahimi 
3678*f5c631daSSadaf Ebrahimi   START();
3679*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
3680*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
3681*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 2.5);
3682*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -2.5);
3683*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
3684*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
3685*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0x7fffff80);  // Largest float < INT32_MAX.
3686*f5c631daSSadaf Ebrahimi   __ Fneg(s7, s6);          // Smallest float > INT32_MIN.
3687*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
3688*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
3689*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 2.5);
3690*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -2.5);
3691*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
3692*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
3693*f5c631daSSadaf Ebrahimi   __ Fmov(d14, kWMaxInt - 1);
3694*f5c631daSSadaf Ebrahimi   __ Fmov(d15, kWMinInt + 1);
3695*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3696*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 2.5);
3697*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -2.5);
3698*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
3699*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
3700*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0x7fffff8000000000);  // Largest float < INT64_MAX.
3701*f5c631daSSadaf Ebrahimi   __ Fneg(s23, s22);                 // Smallest float > INT64_MIN.
3702*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
3703*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 2.5);
3704*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -2.5);
3705*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
3706*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
3707*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7ffffffffffffc00);  // Largest double < INT64_MAX.
3708*f5c631daSSadaf Ebrahimi   __ Fneg(d30, d29);                 // Smallest double > INT64_MIN.
3709*f5c631daSSadaf Ebrahimi 
3710*f5c631daSSadaf Ebrahimi   __ Fcvtas(w0, s0);
3711*f5c631daSSadaf Ebrahimi   __ Fcvtas(w1, s1);
3712*f5c631daSSadaf Ebrahimi   __ Fcvtas(w2, s2);
3713*f5c631daSSadaf Ebrahimi   __ Fcvtas(w3, s3);
3714*f5c631daSSadaf Ebrahimi   __ Fcvtas(w4, s4);
3715*f5c631daSSadaf Ebrahimi   __ Fcvtas(w5, s5);
3716*f5c631daSSadaf Ebrahimi   __ Fcvtas(w6, s6);
3717*f5c631daSSadaf Ebrahimi   __ Fcvtas(w7, s7);
3718*f5c631daSSadaf Ebrahimi   __ Fcvtas(w8, d8);
3719*f5c631daSSadaf Ebrahimi   __ Fcvtas(w9, d9);
3720*f5c631daSSadaf Ebrahimi   __ Fcvtas(w10, d10);
3721*f5c631daSSadaf Ebrahimi   __ Fcvtas(w11, d11);
3722*f5c631daSSadaf Ebrahimi   __ Fcvtas(w12, d12);
3723*f5c631daSSadaf Ebrahimi   __ Fcvtas(w13, d13);
3724*f5c631daSSadaf Ebrahimi   __ Fcvtas(w14, d14);
3725*f5c631daSSadaf Ebrahimi   __ Fcvtas(w15, d15);
3726*f5c631daSSadaf Ebrahimi   __ Fcvtas(x17, s17);
3727*f5c631daSSadaf Ebrahimi   __ Fcvtas(x18, s18);
3728*f5c631daSSadaf Ebrahimi   __ Fcvtas(x19, s19);
3729*f5c631daSSadaf Ebrahimi   __ Fcvtas(x20, s20);
3730*f5c631daSSadaf Ebrahimi   __ Fcvtas(x21, s21);
3731*f5c631daSSadaf Ebrahimi   __ Fcvtas(x22, s22);
3732*f5c631daSSadaf Ebrahimi   __ Fcvtas(x23, s23);
3733*f5c631daSSadaf Ebrahimi   __ Fcvtas(x24, d24);
3734*f5c631daSSadaf Ebrahimi   __ Fcvtas(x25, d25);
3735*f5c631daSSadaf Ebrahimi   __ Fcvtas(x26, d26);
3736*f5c631daSSadaf Ebrahimi   __ Fcvtas(x27, d27);
3737*f5c631daSSadaf Ebrahimi   __ Fcvtas(x28, d28);
3738*f5c631daSSadaf Ebrahimi   __ Fcvtas(x29, d29);
3739*f5c631daSSadaf Ebrahimi   __ Fcvtas(x30, d30);
3740*f5c631daSSadaf Ebrahimi   END();
3741*f5c631daSSadaf Ebrahimi 
3742*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3743*f5c631daSSadaf Ebrahimi     RUN();
3744*f5c631daSSadaf Ebrahimi 
3745*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
3746*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
3747*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x2);
3748*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffd, x3);
3749*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x4);
3750*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x5);
3751*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff80, x6);
3752*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000080, x7);
3753*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
3754*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
3755*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x10);
3756*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffd, x11);
3757*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x12);
3758*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x13);
3759*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffe, x14);
3760*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000001, x15);
3761*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
3762*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x18);
3763*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffffffd, x19);
3764*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x20);
3765*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x21);
3766*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff8000000000, x22);
3767*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000008000000000, x23);
3768*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
3769*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x25);
3770*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffffffd, x26);
3771*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x27);
3772*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x28);
3773*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffffffffc00, x29);
3774*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000400, x30);
3775*f5c631daSSadaf Ebrahimi   }
3776*f5c631daSSadaf Ebrahimi }
3777*f5c631daSSadaf Ebrahimi 
3778*f5c631daSSadaf Ebrahimi 
TEST(fcvtau)3779*f5c631daSSadaf Ebrahimi TEST(fcvtau) {
3780*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3781*f5c631daSSadaf Ebrahimi 
3782*f5c631daSSadaf Ebrahimi   START();
3783*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
3784*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
3785*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 2.5);
3786*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -2.5);
3787*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
3788*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
3789*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0xffffff00);  // Largest float < UINT32_MAX.
3790*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
3791*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
3792*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 2.5);
3793*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -2.5);
3794*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
3795*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
3796*f5c631daSSadaf Ebrahimi   __ Fmov(d14, 0xfffffffe);
3797*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
3798*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3799*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 2.5);
3800*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -2.5);
3801*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
3802*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
3803*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0xffffff0000000000);  // Largest float < UINT64_MAX.
3804*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
3805*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 2.5);
3806*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -2.5);
3807*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
3808*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
3809*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0xfffffffffffff800);  // Largest double < UINT64_MAX.
3810*f5c631daSSadaf Ebrahimi   __ Fmov(s30, 0x100000000);
3811*f5c631daSSadaf Ebrahimi 
3812*f5c631daSSadaf Ebrahimi   __ Fcvtau(w0, s0);
3813*f5c631daSSadaf Ebrahimi   __ Fcvtau(w1, s1);
3814*f5c631daSSadaf Ebrahimi   __ Fcvtau(w2, s2);
3815*f5c631daSSadaf Ebrahimi   __ Fcvtau(w3, s3);
3816*f5c631daSSadaf Ebrahimi   __ Fcvtau(w4, s4);
3817*f5c631daSSadaf Ebrahimi   __ Fcvtau(w5, s5);
3818*f5c631daSSadaf Ebrahimi   __ Fcvtau(w6, s6);
3819*f5c631daSSadaf Ebrahimi   __ Fcvtau(w8, d8);
3820*f5c631daSSadaf Ebrahimi   __ Fcvtau(w9, d9);
3821*f5c631daSSadaf Ebrahimi   __ Fcvtau(w10, d10);
3822*f5c631daSSadaf Ebrahimi   __ Fcvtau(w11, d11);
3823*f5c631daSSadaf Ebrahimi   __ Fcvtau(w12, d12);
3824*f5c631daSSadaf Ebrahimi   __ Fcvtau(w13, d13);
3825*f5c631daSSadaf Ebrahimi   __ Fcvtau(w14, d14);
3826*f5c631daSSadaf Ebrahimi   __ Fcvtau(w15, d15);
3827*f5c631daSSadaf Ebrahimi   __ Fcvtau(x16, s16);
3828*f5c631daSSadaf Ebrahimi   __ Fcvtau(x17, s17);
3829*f5c631daSSadaf Ebrahimi   __ Fcvtau(x18, s18);
3830*f5c631daSSadaf Ebrahimi   __ Fcvtau(x19, s19);
3831*f5c631daSSadaf Ebrahimi   __ Fcvtau(x20, s20);
3832*f5c631daSSadaf Ebrahimi   __ Fcvtau(x21, s21);
3833*f5c631daSSadaf Ebrahimi   __ Fcvtau(x22, s22);
3834*f5c631daSSadaf Ebrahimi   __ Fcvtau(x24, d24);
3835*f5c631daSSadaf Ebrahimi   __ Fcvtau(x25, d25);
3836*f5c631daSSadaf Ebrahimi   __ Fcvtau(x26, d26);
3837*f5c631daSSadaf Ebrahimi   __ Fcvtau(x27, d27);
3838*f5c631daSSadaf Ebrahimi   __ Fcvtau(x28, d28);
3839*f5c631daSSadaf Ebrahimi   __ Fcvtau(x29, d29);
3840*f5c631daSSadaf Ebrahimi   __ Fcvtau(w30, s30);
3841*f5c631daSSadaf Ebrahimi   END();
3842*f5c631daSSadaf Ebrahimi 
3843*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3844*f5c631daSSadaf Ebrahimi     RUN();
3845*f5c631daSSadaf Ebrahimi 
3846*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
3847*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
3848*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x2);
3849*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x3);
3850*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x4);
3851*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x5);
3852*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffff00, x6);
3853*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
3854*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
3855*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x10);
3856*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x11);
3857*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x12);
3858*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x13);
3859*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffe, x14);
3860*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x16);
3861*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
3862*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x18);
3863*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x19);
3864*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x20);
3865*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x21);
3866*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffff0000000000, x22);
3867*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
3868*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(3, x25);
3869*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x26);
3870*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x27);
3871*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x28);
3872*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffff800, x29);
3873*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x30);
3874*f5c631daSSadaf Ebrahimi   }
3875*f5c631daSSadaf Ebrahimi }
3876*f5c631daSSadaf Ebrahimi 
3877*f5c631daSSadaf Ebrahimi 
TEST(fcvtms)3878*f5c631daSSadaf Ebrahimi TEST(fcvtms) {
3879*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3880*f5c631daSSadaf Ebrahimi 
3881*f5c631daSSadaf Ebrahimi   START();
3882*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
3883*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
3884*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 1.5);
3885*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -1.5);
3886*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
3887*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
3888*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0x7fffff80);  // Largest float < INT32_MAX.
3889*f5c631daSSadaf Ebrahimi   __ Fneg(s7, s6);          // Smallest float > INT32_MIN.
3890*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
3891*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
3892*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 1.5);
3893*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -1.5);
3894*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
3895*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
3896*f5c631daSSadaf Ebrahimi   __ Fmov(d14, kWMaxInt - 1);
3897*f5c631daSSadaf Ebrahimi   __ Fmov(d15, kWMinInt + 1);
3898*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
3899*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
3900*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -1.5);
3901*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
3902*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
3903*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0x7fffff8000000000);  // Largest float < INT64_MAX.
3904*f5c631daSSadaf Ebrahimi   __ Fneg(s23, s22);                 // Smallest float > INT64_MIN.
3905*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
3906*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 1.5);
3907*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -1.5);
3908*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
3909*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
3910*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7ffffffffffffc00);  // Largest double < INT64_MAX.
3911*f5c631daSSadaf Ebrahimi   __ Fneg(d30, d29);                 // Smallest double > INT64_MIN.
3912*f5c631daSSadaf Ebrahimi 
3913*f5c631daSSadaf Ebrahimi   __ Fcvtms(w0, s0);
3914*f5c631daSSadaf Ebrahimi   __ Fcvtms(w1, s1);
3915*f5c631daSSadaf Ebrahimi   __ Fcvtms(w2, s2);
3916*f5c631daSSadaf Ebrahimi   __ Fcvtms(w3, s3);
3917*f5c631daSSadaf Ebrahimi   __ Fcvtms(w4, s4);
3918*f5c631daSSadaf Ebrahimi   __ Fcvtms(w5, s5);
3919*f5c631daSSadaf Ebrahimi   __ Fcvtms(w6, s6);
3920*f5c631daSSadaf Ebrahimi   __ Fcvtms(w7, s7);
3921*f5c631daSSadaf Ebrahimi   __ Fcvtms(w8, d8);
3922*f5c631daSSadaf Ebrahimi   __ Fcvtms(w9, d9);
3923*f5c631daSSadaf Ebrahimi   __ Fcvtms(w10, d10);
3924*f5c631daSSadaf Ebrahimi   __ Fcvtms(w11, d11);
3925*f5c631daSSadaf Ebrahimi   __ Fcvtms(w12, d12);
3926*f5c631daSSadaf Ebrahimi   __ Fcvtms(w13, d13);
3927*f5c631daSSadaf Ebrahimi   __ Fcvtms(w14, d14);
3928*f5c631daSSadaf Ebrahimi   __ Fcvtms(w15, d15);
3929*f5c631daSSadaf Ebrahimi   __ Fcvtms(x17, s17);
3930*f5c631daSSadaf Ebrahimi   __ Fcvtms(x18, s18);
3931*f5c631daSSadaf Ebrahimi   __ Fcvtms(x19, s19);
3932*f5c631daSSadaf Ebrahimi   __ Fcvtms(x20, s20);
3933*f5c631daSSadaf Ebrahimi   __ Fcvtms(x21, s21);
3934*f5c631daSSadaf Ebrahimi   __ Fcvtms(x22, s22);
3935*f5c631daSSadaf Ebrahimi   __ Fcvtms(x23, s23);
3936*f5c631daSSadaf Ebrahimi   __ Fcvtms(x24, d24);
3937*f5c631daSSadaf Ebrahimi   __ Fcvtms(x25, d25);
3938*f5c631daSSadaf Ebrahimi   __ Fcvtms(x26, d26);
3939*f5c631daSSadaf Ebrahimi   __ Fcvtms(x27, d27);
3940*f5c631daSSadaf Ebrahimi   __ Fcvtms(x28, d28);
3941*f5c631daSSadaf Ebrahimi   __ Fcvtms(x29, d29);
3942*f5c631daSSadaf Ebrahimi   __ Fcvtms(x30, d30);
3943*f5c631daSSadaf Ebrahimi   END();
3944*f5c631daSSadaf Ebrahimi 
3945*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
3946*f5c631daSSadaf Ebrahimi     RUN();
3947*f5c631daSSadaf Ebrahimi 
3948*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
3949*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
3950*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x2);
3951*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffe, x3);
3952*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x4);
3953*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x5);
3954*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff80, x6);
3955*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000080, x7);
3956*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
3957*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
3958*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x10);
3959*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffe, x11);
3960*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x12);
3961*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x13);
3962*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffe, x14);
3963*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000001, x15);
3964*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
3965*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x18);
3966*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffffffe, x19);
3967*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x20);
3968*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x21);
3969*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff8000000000, x22);
3970*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000008000000000, x23);
3971*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
3972*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x25);
3973*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffffffe, x26);
3974*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x27);
3975*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x28);
3976*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffffffffc00, x29);
3977*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000400, x30);
3978*f5c631daSSadaf Ebrahimi   }
3979*f5c631daSSadaf Ebrahimi }
3980*f5c631daSSadaf Ebrahimi 
3981*f5c631daSSadaf Ebrahimi 
TEST(fcvtmu)3982*f5c631daSSadaf Ebrahimi TEST(fcvtmu) {
3983*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
3984*f5c631daSSadaf Ebrahimi 
3985*f5c631daSSadaf Ebrahimi   START();
3986*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
3987*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
3988*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 1.5);
3989*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -1.5);
3990*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
3991*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
3992*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0x7fffff80);  // Largest float < INT32_MAX.
3993*f5c631daSSadaf Ebrahimi   __ Fneg(s7, s6);          // Smallest float > INT32_MIN.
3994*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
3995*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
3996*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 1.5);
3997*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -1.5);
3998*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
3999*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
4000*f5c631daSSadaf Ebrahimi   __ Fmov(d14, kWMaxInt - 1);
4001*f5c631daSSadaf Ebrahimi   __ Fmov(d15, kWMinInt + 1);
4002*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
4003*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
4004*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -1.5);
4005*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
4006*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
4007*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0x7fffff8000000000);  // Largest float < INT64_MAX.
4008*f5c631daSSadaf Ebrahimi   __ Fneg(s23, s22);                 // Smallest float > INT64_MIN.
4009*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
4010*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 1.5);
4011*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -1.5);
4012*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
4013*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
4014*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7ffffffffffffc00);  // Largest double < INT64_MAX.
4015*f5c631daSSadaf Ebrahimi   __ Fneg(d30, d29);                 // Smallest double > INT64_MIN.
4016*f5c631daSSadaf Ebrahimi 
4017*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w0, s0);
4018*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w1, s1);
4019*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w2, s2);
4020*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w3, s3);
4021*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w4, s4);
4022*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w5, s5);
4023*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w6, s6);
4024*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w7, s7);
4025*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w8, d8);
4026*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w9, d9);
4027*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w10, d10);
4028*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w11, d11);
4029*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w12, d12);
4030*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w13, d13);
4031*f5c631daSSadaf Ebrahimi   __ Fcvtmu(w14, d14);
4032*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x17, s17);
4033*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x18, s18);
4034*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x19, s19);
4035*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x20, s20);
4036*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x21, s21);
4037*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x22, s22);
4038*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x23, s23);
4039*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x24, d24);
4040*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x25, d25);
4041*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x26, d26);
4042*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x27, d27);
4043*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x28, d28);
4044*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x29, d29);
4045*f5c631daSSadaf Ebrahimi   __ Fcvtmu(x30, d30);
4046*f5c631daSSadaf Ebrahimi   END();
4047*f5c631daSSadaf Ebrahimi 
4048*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4049*f5c631daSSadaf Ebrahimi     RUN();
4050*f5c631daSSadaf Ebrahimi 
4051*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
4052*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
4053*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x2);
4054*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x3);
4055*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x4);
4056*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x5);
4057*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff80, x6);
4058*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x7);
4059*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
4060*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
4061*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x10);
4062*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x11);
4063*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x12);
4064*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x13);
4065*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffe, x14);
4066*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
4067*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x18);
4068*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x19);
4069*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x20);
4070*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x21);
4071*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff8000000000, x22);
4072*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x23);
4073*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
4074*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x25);
4075*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x26);
4076*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x27);
4077*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x28);
4078*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffffffffc00, x29);
4079*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x30);
4080*f5c631daSSadaf Ebrahimi   }
4081*f5c631daSSadaf Ebrahimi }
4082*f5c631daSSadaf Ebrahimi 
4083*f5c631daSSadaf Ebrahimi 
TEST(fcvtns)4084*f5c631daSSadaf Ebrahimi TEST(fcvtns) {
4085*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4086*f5c631daSSadaf Ebrahimi 
4087*f5c631daSSadaf Ebrahimi   START();
4088*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
4089*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
4090*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 1.5);
4091*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -1.5);
4092*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
4093*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
4094*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0x7fffff80);  // Largest float < INT32_MAX.
4095*f5c631daSSadaf Ebrahimi   __ Fneg(s7, s6);          // Smallest float > INT32_MIN.
4096*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
4097*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
4098*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 1.5);
4099*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -1.5);
4100*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
4101*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
4102*f5c631daSSadaf Ebrahimi   __ Fmov(d14, kWMaxInt - 1);
4103*f5c631daSSadaf Ebrahimi   __ Fmov(d15, kWMinInt + 1);
4104*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
4105*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
4106*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -1.5);
4107*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
4108*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
4109*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0x7fffff8000000000);  // Largest float < INT64_MAX.
4110*f5c631daSSadaf Ebrahimi   __ Fneg(s23, s22);                 // Smallest float > INT64_MIN.
4111*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
4112*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 1.5);
4113*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -1.5);
4114*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
4115*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
4116*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7ffffffffffffc00);  // Largest double < INT64_MAX.
4117*f5c631daSSadaf Ebrahimi   __ Fneg(d30, d29);                 // Smallest double > INT64_MIN.
4118*f5c631daSSadaf Ebrahimi 
4119*f5c631daSSadaf Ebrahimi   __ Fcvtns(w0, s0);
4120*f5c631daSSadaf Ebrahimi   __ Fcvtns(w1, s1);
4121*f5c631daSSadaf Ebrahimi   __ Fcvtns(w2, s2);
4122*f5c631daSSadaf Ebrahimi   __ Fcvtns(w3, s3);
4123*f5c631daSSadaf Ebrahimi   __ Fcvtns(w4, s4);
4124*f5c631daSSadaf Ebrahimi   __ Fcvtns(w5, s5);
4125*f5c631daSSadaf Ebrahimi   __ Fcvtns(w6, s6);
4126*f5c631daSSadaf Ebrahimi   __ Fcvtns(w7, s7);
4127*f5c631daSSadaf Ebrahimi   __ Fcvtns(w8, d8);
4128*f5c631daSSadaf Ebrahimi   __ Fcvtns(w9, d9);
4129*f5c631daSSadaf Ebrahimi   __ Fcvtns(w10, d10);
4130*f5c631daSSadaf Ebrahimi   __ Fcvtns(w11, d11);
4131*f5c631daSSadaf Ebrahimi   __ Fcvtns(w12, d12);
4132*f5c631daSSadaf Ebrahimi   __ Fcvtns(w13, d13);
4133*f5c631daSSadaf Ebrahimi   __ Fcvtns(w14, d14);
4134*f5c631daSSadaf Ebrahimi   __ Fcvtns(w15, d15);
4135*f5c631daSSadaf Ebrahimi   __ Fcvtns(x17, s17);
4136*f5c631daSSadaf Ebrahimi   __ Fcvtns(x18, s18);
4137*f5c631daSSadaf Ebrahimi   __ Fcvtns(x19, s19);
4138*f5c631daSSadaf Ebrahimi   __ Fcvtns(x20, s20);
4139*f5c631daSSadaf Ebrahimi   __ Fcvtns(x21, s21);
4140*f5c631daSSadaf Ebrahimi   __ Fcvtns(x22, s22);
4141*f5c631daSSadaf Ebrahimi   __ Fcvtns(x23, s23);
4142*f5c631daSSadaf Ebrahimi   __ Fcvtns(x24, d24);
4143*f5c631daSSadaf Ebrahimi   __ Fcvtns(x25, d25);
4144*f5c631daSSadaf Ebrahimi   __ Fcvtns(x26, d26);
4145*f5c631daSSadaf Ebrahimi   __ Fcvtns(x27, d27);
4146*f5c631daSSadaf Ebrahimi   __ Fcvtns(x28, d28);
4147*f5c631daSSadaf Ebrahimi   __ Fcvtns(x29, d29);
4148*f5c631daSSadaf Ebrahimi   __ Fcvtns(x30, d30);
4149*f5c631daSSadaf Ebrahimi   END();
4150*f5c631daSSadaf Ebrahimi 
4151*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4152*f5c631daSSadaf Ebrahimi     RUN();
4153*f5c631daSSadaf Ebrahimi 
4154*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
4155*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
4156*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x2);
4157*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffe, x3);
4158*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x4);
4159*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x5);
4160*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff80, x6);
4161*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000080, x7);
4162*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
4163*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
4164*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x10);
4165*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffe, x11);
4166*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x12);
4167*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x13);
4168*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffe, x14);
4169*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000001, x15);
4170*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
4171*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x18);
4172*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffffffe, x19);
4173*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x20);
4174*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x21);
4175*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff8000000000, x22);
4176*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000008000000000, x23);
4177*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
4178*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x25);
4179*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffffffe, x26);
4180*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x27);
4181*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x28);
4182*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffffffffc00, x29);
4183*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000400, x30);
4184*f5c631daSSadaf Ebrahimi   }
4185*f5c631daSSadaf Ebrahimi }
4186*f5c631daSSadaf Ebrahimi 
4187*f5c631daSSadaf Ebrahimi 
TEST(fcvtnu)4188*f5c631daSSadaf Ebrahimi TEST(fcvtnu) {
4189*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4190*f5c631daSSadaf Ebrahimi 
4191*f5c631daSSadaf Ebrahimi   START();
4192*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
4193*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
4194*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 1.5);
4195*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -1.5);
4196*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
4197*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
4198*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0xffffff00);  // Largest float < UINT32_MAX.
4199*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
4200*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
4201*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 1.5);
4202*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -1.5);
4203*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
4204*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
4205*f5c631daSSadaf Ebrahimi   __ Fmov(d14, 0xfffffffe);
4206*f5c631daSSadaf Ebrahimi   __ Fmov(s16, 1.0);
4207*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
4208*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
4209*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -1.5);
4210*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
4211*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
4212*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0xffffff0000000000);  // Largest float < UINT64_MAX.
4213*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
4214*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 1.5);
4215*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -1.5);
4216*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
4217*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
4218*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0xfffffffffffff800);  // Largest double < UINT64_MAX.
4219*f5c631daSSadaf Ebrahimi   __ Fmov(s30, 0x100000000);
4220*f5c631daSSadaf Ebrahimi 
4221*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w0, s0);
4222*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w1, s1);
4223*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w2, s2);
4224*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w3, s3);
4225*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w4, s4);
4226*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w5, s5);
4227*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w6, s6);
4228*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w8, d8);
4229*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w9, d9);
4230*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w10, d10);
4231*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w11, d11);
4232*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w12, d12);
4233*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w13, d13);
4234*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w14, d14);
4235*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w15, d15);
4236*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x16, s16);
4237*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x17, s17);
4238*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x18, s18);
4239*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x19, s19);
4240*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x20, s20);
4241*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x21, s21);
4242*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x22, s22);
4243*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x24, d24);
4244*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x25, d25);
4245*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x26, d26);
4246*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x27, d27);
4247*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x28, d28);
4248*f5c631daSSadaf Ebrahimi   __ Fcvtnu(x29, d29);
4249*f5c631daSSadaf Ebrahimi   __ Fcvtnu(w30, s30);
4250*f5c631daSSadaf Ebrahimi   END();
4251*f5c631daSSadaf Ebrahimi 
4252*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4253*f5c631daSSadaf Ebrahimi     RUN();
4254*f5c631daSSadaf Ebrahimi 
4255*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
4256*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
4257*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x2);
4258*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x3);
4259*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x4);
4260*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x5);
4261*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffff00, x6);
4262*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
4263*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
4264*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x10);
4265*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x11);
4266*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x12);
4267*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x13);
4268*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffe, x14);
4269*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x16);
4270*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
4271*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x18);
4272*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x19);
4273*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x20);
4274*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x21);
4275*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffff0000000000, x22);
4276*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
4277*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(2, x25);
4278*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x26);
4279*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x27);
4280*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x28);
4281*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xfffffffffffff800, x29);
4282*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x30);
4283*f5c631daSSadaf Ebrahimi   }
4284*f5c631daSSadaf Ebrahimi }
4285*f5c631daSSadaf Ebrahimi 
4286*f5c631daSSadaf Ebrahimi 
TEST(fcvtzs)4287*f5c631daSSadaf Ebrahimi TEST(fcvtzs) {
4288*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4289*f5c631daSSadaf Ebrahimi 
4290*f5c631daSSadaf Ebrahimi   START();
4291*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
4292*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
4293*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 1.5);
4294*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -1.5);
4295*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
4296*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
4297*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0x7fffff80);  // Largest float < INT32_MAX.
4298*f5c631daSSadaf Ebrahimi   __ Fneg(s7, s6);          // Smallest float > INT32_MIN.
4299*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
4300*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
4301*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 1.5);
4302*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -1.5);
4303*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
4304*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
4305*f5c631daSSadaf Ebrahimi   __ Fmov(d14, kWMaxInt - 1);
4306*f5c631daSSadaf Ebrahimi   __ Fmov(d15, kWMinInt + 1);
4307*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
4308*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
4309*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -1.5);
4310*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
4311*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
4312*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0x7fffff8000000000);  // Largest float < INT64_MAX.
4313*f5c631daSSadaf Ebrahimi   __ Fneg(s23, s22);                 // Smallest float > INT64_MIN.
4314*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
4315*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 1.5);
4316*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -1.5);
4317*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
4318*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
4319*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7ffffffffffffc00);  // Largest double < INT64_MAX.
4320*f5c631daSSadaf Ebrahimi   __ Fneg(d30, d29);                 // Smallest double > INT64_MIN.
4321*f5c631daSSadaf Ebrahimi 
4322*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w0, s0);
4323*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w1, s1);
4324*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w2, s2);
4325*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w3, s3);
4326*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w4, s4);
4327*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w5, s5);
4328*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w6, s6);
4329*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w7, s7);
4330*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w8, d8);
4331*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w9, d9);
4332*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w10, d10);
4333*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w11, d11);
4334*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w12, d12);
4335*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w13, d13);
4336*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w14, d14);
4337*f5c631daSSadaf Ebrahimi   __ Fcvtzs(w15, d15);
4338*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x17, s17);
4339*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x18, s18);
4340*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x19, s19);
4341*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x20, s20);
4342*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x21, s21);
4343*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x22, s22);
4344*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x23, s23);
4345*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x24, d24);
4346*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x25, d25);
4347*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x26, d26);
4348*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x27, d27);
4349*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x28, d28);
4350*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x29, d29);
4351*f5c631daSSadaf Ebrahimi   __ Fcvtzs(x30, d30);
4352*f5c631daSSadaf Ebrahimi   END();
4353*f5c631daSSadaf Ebrahimi 
4354*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4355*f5c631daSSadaf Ebrahimi     RUN();
4356*f5c631daSSadaf Ebrahimi 
4357*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
4358*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
4359*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x2);
4360*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x3);
4361*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x4);
4362*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x5);
4363*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff80, x6);
4364*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000080, x7);
4365*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
4366*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
4367*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x10);
4368*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x11);
4369*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffff, x12);
4370*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000000, x13);
4371*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffe, x14);
4372*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x80000001, x15);
4373*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
4374*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x18);
4375*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x19);
4376*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x20);
4377*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x21);
4378*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff8000000000, x22);
4379*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000008000000000, x23);
4380*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
4381*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x25);
4382*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x26);
4383*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffffffffffffff, x27);
4384*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000000, x28);
4385*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffffffffc00, x29);
4386*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x8000000000000400, x30);
4387*f5c631daSSadaf Ebrahimi   }
4388*f5c631daSSadaf Ebrahimi }
4389*f5c631daSSadaf Ebrahimi 
FjcvtzsHelper(uint64_t value,uint64_t expected,uint32_t expected_z)4390*f5c631daSSadaf Ebrahimi void FjcvtzsHelper(uint64_t value, uint64_t expected, uint32_t expected_z) {
4391*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kJSCVT);
4392*f5c631daSSadaf Ebrahimi   START();
4393*f5c631daSSadaf Ebrahimi   __ Fmov(d0, RawbitsToDouble(value));
4394*f5c631daSSadaf Ebrahimi   __ Fjcvtzs(w0, d0);
4395*f5c631daSSadaf Ebrahimi   __ Mrs(x1, NZCV);
4396*f5c631daSSadaf Ebrahimi   END();
4397*f5c631daSSadaf Ebrahimi 
4398*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4399*f5c631daSSadaf Ebrahimi     RUN();
4400*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(expected, x0);
4401*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_32(expected_z, w1);
4402*f5c631daSSadaf Ebrahimi   }
4403*f5c631daSSadaf Ebrahimi }
4404*f5c631daSSadaf Ebrahimi 
TEST(fjcvtzs)4405*f5c631daSSadaf Ebrahimi TEST(fjcvtzs) {
4406*f5c631daSSadaf Ebrahimi   /* Simple values. */
4407*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x0000000000000000, 0, ZFlag);   // 0.0
4408*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x0010000000000000, 0, NoFlag);  // The smallest normal value.
4409*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3fdfffffffffffff, 0, NoFlag);  // The value just below 0.5.
4410*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3fe0000000000000, 0, NoFlag);  // 0.5
4411*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3fe0000000000001, 0, NoFlag);  // The value just above 0.5.
4412*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3fefffffffffffff, 0, NoFlag);  // The value just below 1.0.
4413*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3ff0000000000000, 1, ZFlag);   // 1.0
4414*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3ff0000000000001, 1, NoFlag);  // The value just above 1.0.
4415*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x3ff8000000000000, 1, NoFlag);  // 1.5
4416*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x4024000000000000, 10, ZFlag);  // 10
4417*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x7fefffffffffffff, 0, NoFlag);  // The largest finite value.
4418*f5c631daSSadaf Ebrahimi 
4419*f5c631daSSadaf Ebrahimi   /* Infinity. */
4420*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x7ff0000000000000, 0, NoFlag);
4421*f5c631daSSadaf Ebrahimi 
4422*f5c631daSSadaf Ebrahimi   /* NaNs. */
4423*f5c631daSSadaf Ebrahimi   /*  - Quiet NaNs */
4424*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x7ff923456789abcd, 0, NoFlag);
4425*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x7ff8000000000000, 0, NoFlag);
4426*f5c631daSSadaf Ebrahimi   /*  - Signalling NaNs */
4427*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x7ff123456789abcd, 0, NoFlag);
4428*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x7ff0000000000001, 0, NoFlag);
4429*f5c631daSSadaf Ebrahimi 
4430*f5c631daSSadaf Ebrahimi   /* Subnormals. */
4431*f5c631daSSadaf Ebrahimi   /*  - A recognisable bit pattern. */
4432*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x000123456789abcd, 0, NoFlag);
4433*f5c631daSSadaf Ebrahimi   /*  - The largest subnormal value. */
4434*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x000fffffffffffff, 0, NoFlag);
4435*f5c631daSSadaf Ebrahimi   /*  - The smallest subnormal value. */
4436*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x0000000000000001, 0, NoFlag);
4437*f5c631daSSadaf Ebrahimi 
4438*f5c631daSSadaf Ebrahimi   /* The same values again, but negated. */
4439*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x8000000000000000, 0, NoFlag);
4440*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x8010000000000000, 0, NoFlag);
4441*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbfdfffffffffffff, 0, NoFlag);
4442*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbfe0000000000000, 0, NoFlag);
4443*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbfe0000000000001, 0, NoFlag);
4444*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbfefffffffffffff, 0, NoFlag);
4445*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbff0000000000000, 0xffffffff, ZFlag);
4446*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbff0000000000001, 0xffffffff, NoFlag);
4447*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xbff8000000000000, 0xffffffff, NoFlag);
4448*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xc024000000000000, 0xfffffff6, ZFlag);
4449*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xffefffffffffffff, 0, NoFlag);
4450*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xfff0000000000000, 0, NoFlag);
4451*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xfff923456789abcd, 0, NoFlag);
4452*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xfff8000000000000, 0, NoFlag);
4453*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xfff123456789abcd, 0, NoFlag);
4454*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0xfff0000000000001, 0, NoFlag);
4455*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x800123456789abcd, 0, NoFlag);
4456*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x800fffffffffffff, 0, NoFlag);
4457*f5c631daSSadaf Ebrahimi   FjcvtzsHelper(0x8000000000000001, 0, NoFlag);
4458*f5c631daSSadaf Ebrahimi 
4459*f5c631daSSadaf Ebrahimi   // Test floating-point numbers of every possible exponent, most of the
4460*f5c631daSSadaf Ebrahimi   // expected values are zero but there is a range of exponents where the
4461*f5c631daSSadaf Ebrahimi   // results are shifted parts of this mantissa.
4462*f5c631daSSadaf Ebrahimi   uint64_t mantissa = 0x0001234567890abc;
4463*f5c631daSSadaf Ebrahimi 
4464*f5c631daSSadaf Ebrahimi   // Between an exponent of 0 and 52, only some of the top bits of the
4465*f5c631daSSadaf Ebrahimi   // mantissa are above the decimal position of doubles so the mantissa is
4466*f5c631daSSadaf Ebrahimi   // shifted to the right down to just those top bits. Above 52, all bits
4467*f5c631daSSadaf Ebrahimi   // of the mantissa are shifted left above the decimal position until it
4468*f5c631daSSadaf Ebrahimi   // reaches 52 + 64 where all the bits are shifted out of the range of 64-bit
4469*f5c631daSSadaf Ebrahimi   // integers.
4470*f5c631daSSadaf Ebrahimi   int first_exp_boundary = 52;
4471*f5c631daSSadaf Ebrahimi   int second_exp_boundary = first_exp_boundary + 64;
4472*f5c631daSSadaf Ebrahimi   for (int exponent = 0; exponent < 2048; exponent += 8) {
4473*f5c631daSSadaf Ebrahimi     int e = exponent - 1023;
4474*f5c631daSSadaf Ebrahimi 
4475*f5c631daSSadaf Ebrahimi     uint64_t expected = 0;
4476*f5c631daSSadaf Ebrahimi     if (e < 0) {
4477*f5c631daSSadaf Ebrahimi       expected = 0;
4478*f5c631daSSadaf Ebrahimi     } else if (e <= first_exp_boundary) {
4479*f5c631daSSadaf Ebrahimi       expected = (UINT64_C(1) << e) | (mantissa >> (52 - e));
4480*f5c631daSSadaf Ebrahimi       expected &= 0xffffffff;
4481*f5c631daSSadaf Ebrahimi     } else if (e < second_exp_boundary) {
4482*f5c631daSSadaf Ebrahimi       expected = (mantissa << (e - 52)) & 0xffffffff;
4483*f5c631daSSadaf Ebrahimi     } else {
4484*f5c631daSSadaf Ebrahimi       expected = 0;
4485*f5c631daSSadaf Ebrahimi     }
4486*f5c631daSSadaf Ebrahimi 
4487*f5c631daSSadaf Ebrahimi     uint64_t value = (static_cast<uint64_t>(exponent) << 52) | mantissa;
4488*f5c631daSSadaf Ebrahimi     FjcvtzsHelper(value, expected, NoFlag);
4489*f5c631daSSadaf Ebrahimi     FjcvtzsHelper(value | kDSignMask, (-expected) & 0xffffffff, NoFlag);
4490*f5c631daSSadaf Ebrahimi   }
4491*f5c631daSSadaf Ebrahimi }
4492*f5c631daSSadaf Ebrahimi 
TEST(fcvtzu)4493*f5c631daSSadaf Ebrahimi TEST(fcvtzu) {
4494*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4495*f5c631daSSadaf Ebrahimi 
4496*f5c631daSSadaf Ebrahimi   START();
4497*f5c631daSSadaf Ebrahimi   __ Fmov(s0, 1.0);
4498*f5c631daSSadaf Ebrahimi   __ Fmov(s1, 1.1);
4499*f5c631daSSadaf Ebrahimi   __ Fmov(s2, 1.5);
4500*f5c631daSSadaf Ebrahimi   __ Fmov(s3, -1.5);
4501*f5c631daSSadaf Ebrahimi   __ Fmov(s4, kFP32PositiveInfinity);
4502*f5c631daSSadaf Ebrahimi   __ Fmov(s5, kFP32NegativeInfinity);
4503*f5c631daSSadaf Ebrahimi   __ Fmov(s6, 0x7fffff80);  // Largest float < INT32_MAX.
4504*f5c631daSSadaf Ebrahimi   __ Fneg(s7, s6);          // Smallest float > INT32_MIN.
4505*f5c631daSSadaf Ebrahimi   __ Fmov(d8, 1.0);
4506*f5c631daSSadaf Ebrahimi   __ Fmov(d9, 1.1);
4507*f5c631daSSadaf Ebrahimi   __ Fmov(d10, 1.5);
4508*f5c631daSSadaf Ebrahimi   __ Fmov(d11, -1.5);
4509*f5c631daSSadaf Ebrahimi   __ Fmov(d12, kFP64PositiveInfinity);
4510*f5c631daSSadaf Ebrahimi   __ Fmov(d13, kFP64NegativeInfinity);
4511*f5c631daSSadaf Ebrahimi   __ Fmov(d14, kWMaxInt - 1);
4512*f5c631daSSadaf Ebrahimi   __ Fmov(d15, kWMinInt + 1);
4513*f5c631daSSadaf Ebrahimi   __ Fmov(s17, 1.1);
4514*f5c631daSSadaf Ebrahimi   __ Fmov(s18, 1.5);
4515*f5c631daSSadaf Ebrahimi   __ Fmov(s19, -1.5);
4516*f5c631daSSadaf Ebrahimi   __ Fmov(s20, kFP32PositiveInfinity);
4517*f5c631daSSadaf Ebrahimi   __ Fmov(s21, kFP32NegativeInfinity);
4518*f5c631daSSadaf Ebrahimi   __ Fmov(s22, 0x7fffff8000000000);  // Largest float < INT64_MAX.
4519*f5c631daSSadaf Ebrahimi   __ Fneg(s23, s22);                 // Smallest float > INT64_MIN.
4520*f5c631daSSadaf Ebrahimi   __ Fmov(d24, 1.1);
4521*f5c631daSSadaf Ebrahimi   __ Fmov(d25, 1.5);
4522*f5c631daSSadaf Ebrahimi   __ Fmov(d26, -1.5);
4523*f5c631daSSadaf Ebrahimi   __ Fmov(d27, kFP64PositiveInfinity);
4524*f5c631daSSadaf Ebrahimi   __ Fmov(d28, kFP64NegativeInfinity);
4525*f5c631daSSadaf Ebrahimi   __ Fmov(d29, 0x7ffffffffffffc00);  // Largest double < INT64_MAX.
4526*f5c631daSSadaf Ebrahimi   __ Fneg(d30, d29);                 // Smallest double > INT64_MIN.
4527*f5c631daSSadaf Ebrahimi 
4528*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w0, s0);
4529*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w1, s1);
4530*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w2, s2);
4531*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w3, s3);
4532*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w4, s4);
4533*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w5, s5);
4534*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w6, s6);
4535*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w7, s7);
4536*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w8, d8);
4537*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w9, d9);
4538*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w10, d10);
4539*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w11, d11);
4540*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w12, d12);
4541*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w13, d13);
4542*f5c631daSSadaf Ebrahimi   __ Fcvtzu(w14, d14);
4543*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x17, s17);
4544*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x18, s18);
4545*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x19, s19);
4546*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x20, s20);
4547*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x21, s21);
4548*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x22, s22);
4549*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x23, s23);
4550*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x24, d24);
4551*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x25, d25);
4552*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x26, d26);
4553*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x27, d27);
4554*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x28, d28);
4555*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x29, d29);
4556*f5c631daSSadaf Ebrahimi   __ Fcvtzu(x30, d30);
4557*f5c631daSSadaf Ebrahimi   END();
4558*f5c631daSSadaf Ebrahimi 
4559*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4560*f5c631daSSadaf Ebrahimi     RUN();
4561*f5c631daSSadaf Ebrahimi 
4562*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x0);
4563*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x1);
4564*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x2);
4565*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x3);
4566*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x4);
4567*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x5);
4568*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff80, x6);
4569*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x7);
4570*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x8);
4571*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x9);
4572*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x10);
4573*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x11);
4574*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffff, x12);
4575*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x13);
4576*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffe, x14);
4577*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x17);
4578*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x18);
4579*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x19);
4580*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x20);
4581*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x21);
4582*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7fffff8000000000, x22);
4583*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x23);
4584*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x24);
4585*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(1, x25);
4586*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x26);
4587*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0xffffffffffffffff, x27);
4588*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x28);
4589*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0x7ffffffffffffc00, x29);
4590*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_64(0, x30);
4591*f5c631daSSadaf Ebrahimi   }
4592*f5c631daSSadaf Ebrahimi }
4593*f5c631daSSadaf Ebrahimi 
4594*f5c631daSSadaf Ebrahimi // Test that scvtf and ucvtf can convert the 64-bit input into the expected
4595*f5c631daSSadaf Ebrahimi // value. All possible values of 'fbits' are tested. The expected value is
4596*f5c631daSSadaf Ebrahimi // modified accordingly in each case.
4597*f5c631daSSadaf Ebrahimi //
4598*f5c631daSSadaf Ebrahimi // The expected value is specified as the bit encoding of the expected double
4599*f5c631daSSadaf Ebrahimi // produced by scvtf (expected_scvtf_bits) as well as ucvtf
4600*f5c631daSSadaf Ebrahimi // (expected_ucvtf_bits).
4601*f5c631daSSadaf Ebrahimi //
4602*f5c631daSSadaf Ebrahimi // Where the input value is representable by int32_t or uint32_t, conversions
4603*f5c631daSSadaf Ebrahimi // from W registers will also be tested.
TestUScvtfHelper(uint64_t in,uint64_t expected_scvtf_bits,uint64_t expected_ucvtf_bits)4604*f5c631daSSadaf Ebrahimi static void TestUScvtfHelper(uint64_t in,
4605*f5c631daSSadaf Ebrahimi                              uint64_t expected_scvtf_bits,
4606*f5c631daSSadaf Ebrahimi                              uint64_t expected_ucvtf_bits) {
4607*f5c631daSSadaf Ebrahimi   uint64_t u64 = in;
4608*f5c631daSSadaf Ebrahimi   uint32_t u32 = u64 & 0xffffffff;
4609*f5c631daSSadaf Ebrahimi   int64_t s64 = static_cast<int64_t>(in);
4610*f5c631daSSadaf Ebrahimi   int32_t s32 = s64 & 0x7fffffff;
4611*f5c631daSSadaf Ebrahimi 
4612*f5c631daSSadaf Ebrahimi   bool cvtf_s32 = (s64 == s32);
4613*f5c631daSSadaf Ebrahimi   bool cvtf_u32 = (u64 == u32);
4614*f5c631daSSadaf Ebrahimi 
4615*f5c631daSSadaf Ebrahimi   double results_scvtf_x[65];
4616*f5c631daSSadaf Ebrahimi   double results_ucvtf_x[65];
4617*f5c631daSSadaf Ebrahimi   double results_scvtf_w[33];
4618*f5c631daSSadaf Ebrahimi   double results_ucvtf_w[33];
4619*f5c631daSSadaf Ebrahimi 
4620*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4621*f5c631daSSadaf Ebrahimi 
4622*f5c631daSSadaf Ebrahimi   START();
4623*f5c631daSSadaf Ebrahimi 
4624*f5c631daSSadaf Ebrahimi   __ Mov(x0, reinterpret_cast<uintptr_t>(results_scvtf_x));
4625*f5c631daSSadaf Ebrahimi   __ Mov(x1, reinterpret_cast<uintptr_t>(results_ucvtf_x));
4626*f5c631daSSadaf Ebrahimi   __ Mov(x2, reinterpret_cast<uintptr_t>(results_scvtf_w));
4627*f5c631daSSadaf Ebrahimi   __ Mov(x3, reinterpret_cast<uintptr_t>(results_ucvtf_w));
4628*f5c631daSSadaf Ebrahimi 
4629*f5c631daSSadaf Ebrahimi   __ Mov(x10, s64);
4630*f5c631daSSadaf Ebrahimi 
4631*f5c631daSSadaf Ebrahimi   // Corrupt the top word, in case it is accidentally used during W-register
4632*f5c631daSSadaf Ebrahimi   // conversions.
4633*f5c631daSSadaf Ebrahimi   __ Mov(x11, 0x5555555555555555);
4634*f5c631daSSadaf Ebrahimi   __ Bfi(x11, x10, 0, kWRegSize);
4635*f5c631daSSadaf Ebrahimi 
4636*f5c631daSSadaf Ebrahimi   // Test integer conversions.
4637*f5c631daSSadaf Ebrahimi   __ Scvtf(d0, x10);
4638*f5c631daSSadaf Ebrahimi   __ Ucvtf(d1, x10);
4639*f5c631daSSadaf Ebrahimi   __ Scvtf(d2, w11);
4640*f5c631daSSadaf Ebrahimi   __ Ucvtf(d3, w11);
4641*f5c631daSSadaf Ebrahimi   __ Str(d0, MemOperand(x0));
4642*f5c631daSSadaf Ebrahimi   __ Str(d1, MemOperand(x1));
4643*f5c631daSSadaf Ebrahimi   __ Str(d2, MemOperand(x2));
4644*f5c631daSSadaf Ebrahimi   __ Str(d3, MemOperand(x3));
4645*f5c631daSSadaf Ebrahimi 
4646*f5c631daSSadaf Ebrahimi   // Test all possible values of fbits.
4647*f5c631daSSadaf Ebrahimi   for (int fbits = 1; fbits <= 32; fbits++) {
4648*f5c631daSSadaf Ebrahimi     __ Scvtf(d0, x10, fbits);
4649*f5c631daSSadaf Ebrahimi     __ Ucvtf(d1, x10, fbits);
4650*f5c631daSSadaf Ebrahimi     __ Scvtf(d2, w11, fbits);
4651*f5c631daSSadaf Ebrahimi     __ Ucvtf(d3, w11, fbits);
4652*f5c631daSSadaf Ebrahimi     __ Str(d0, MemOperand(x0, fbits * kDRegSizeInBytes));
4653*f5c631daSSadaf Ebrahimi     __ Str(d1, MemOperand(x1, fbits * kDRegSizeInBytes));
4654*f5c631daSSadaf Ebrahimi     __ Str(d2, MemOperand(x2, fbits * kDRegSizeInBytes));
4655*f5c631daSSadaf Ebrahimi     __ Str(d3, MemOperand(x3, fbits * kDRegSizeInBytes));
4656*f5c631daSSadaf Ebrahimi   }
4657*f5c631daSSadaf Ebrahimi 
4658*f5c631daSSadaf Ebrahimi   // Conversions from W registers can only handle fbits values <= 32, so just
4659*f5c631daSSadaf Ebrahimi   // test conversions from X registers for 32 < fbits <= 64.
4660*f5c631daSSadaf Ebrahimi   for (int fbits = 33; fbits <= 64; fbits++) {
4661*f5c631daSSadaf Ebrahimi     __ Scvtf(d0, x10, fbits);
4662*f5c631daSSadaf Ebrahimi     __ Ucvtf(d1, x10, fbits);
4663*f5c631daSSadaf Ebrahimi     __ Str(d0, MemOperand(x0, fbits * kDRegSizeInBytes));
4664*f5c631daSSadaf Ebrahimi     __ Str(d1, MemOperand(x1, fbits * kDRegSizeInBytes));
4665*f5c631daSSadaf Ebrahimi   }
4666*f5c631daSSadaf Ebrahimi 
4667*f5c631daSSadaf Ebrahimi   END();
4668*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4669*f5c631daSSadaf Ebrahimi     RUN();
4670*f5c631daSSadaf Ebrahimi 
4671*f5c631daSSadaf Ebrahimi     // Check the results.
4672*f5c631daSSadaf Ebrahimi     double expected_scvtf_base = RawbitsToDouble(expected_scvtf_bits);
4673*f5c631daSSadaf Ebrahimi     double expected_ucvtf_base = RawbitsToDouble(expected_ucvtf_bits);
4674*f5c631daSSadaf Ebrahimi 
4675*f5c631daSSadaf Ebrahimi     for (int fbits = 0; fbits <= 32; fbits++) {
4676*f5c631daSSadaf Ebrahimi       double expected_scvtf = expected_scvtf_base / std::pow(2, fbits);
4677*f5c631daSSadaf Ebrahimi       double expected_ucvtf = expected_ucvtf_base / std::pow(2, fbits);
4678*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(expected_scvtf, results_scvtf_x[fbits]);
4679*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(expected_ucvtf, results_ucvtf_x[fbits]);
4680*f5c631daSSadaf Ebrahimi       if (cvtf_s32) ASSERT_EQUAL_FP64(expected_scvtf, results_scvtf_w[fbits]);
4681*f5c631daSSadaf Ebrahimi       if (cvtf_u32) ASSERT_EQUAL_FP64(expected_ucvtf, results_ucvtf_w[fbits]);
4682*f5c631daSSadaf Ebrahimi     }
4683*f5c631daSSadaf Ebrahimi     for (int fbits = 33; fbits <= 64; fbits++) {
4684*f5c631daSSadaf Ebrahimi       double expected_scvtf = expected_scvtf_base / std::pow(2, fbits);
4685*f5c631daSSadaf Ebrahimi       double expected_ucvtf = expected_ucvtf_base / std::pow(2, fbits);
4686*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(expected_scvtf, results_scvtf_x[fbits]);
4687*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(expected_ucvtf, results_ucvtf_x[fbits]);
4688*f5c631daSSadaf Ebrahimi     }
4689*f5c631daSSadaf Ebrahimi   }
4690*f5c631daSSadaf Ebrahimi }
4691*f5c631daSSadaf Ebrahimi 
4692*f5c631daSSadaf Ebrahimi 
TEST(scvtf_ucvtf_double)4693*f5c631daSSadaf Ebrahimi TEST(scvtf_ucvtf_double) {
4694*f5c631daSSadaf Ebrahimi   // Simple conversions of positive numbers which require no rounding; the
4695*f5c631daSSadaf Ebrahimi   // results should not depened on the rounding mode, and ucvtf and scvtf should
4696*f5c631daSSadaf Ebrahimi   // produce the same result.
4697*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000000000000, 0x0000000000000000, 0x0000000000000000);
4698*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000000000001, 0x3ff0000000000000, 0x3ff0000000000000);
4699*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000040000000, 0x41d0000000000000, 0x41d0000000000000);
4700*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000100000000, 0x41f0000000000000, 0x41f0000000000000);
4701*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x4000000000000000, 0x43d0000000000000, 0x43d0000000000000);
4702*f5c631daSSadaf Ebrahimi   // Test mantissa extremities.
4703*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x4000000000000400, 0x43d0000000000001, 0x43d0000000000001);
4704*f5c631daSSadaf Ebrahimi   // The largest int32_t that fits in a double.
4705*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x000000007fffffff, 0x41dfffffffc00000, 0x41dfffffffc00000);
4706*f5c631daSSadaf Ebrahimi   // Values that would be negative if treated as an int32_t.
4707*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x00000000ffffffff, 0x41efffffffe00000, 0x41efffffffe00000);
4708*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000080000000, 0x41e0000000000000, 0x41e0000000000000);
4709*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000080000001, 0x41e0000000200000, 0x41e0000000200000);
4710*f5c631daSSadaf Ebrahimi   // The largest int64_t that fits in a double.
4711*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x7ffffffffffffc00, 0x43dfffffffffffff, 0x43dfffffffffffff);
4712*f5c631daSSadaf Ebrahimi   // Check for bit pattern reproduction.
4713*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0123456789abcde0, 0x43723456789abcde, 0x43723456789abcde);
4714*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x0000000012345678, 0x41b2345678000000, 0x41b2345678000000);
4715*f5c631daSSadaf Ebrahimi 
4716*f5c631daSSadaf Ebrahimi   // Simple conversions of negative int64_t values. These require no rounding,
4717*f5c631daSSadaf Ebrahimi   // and the results should not depend on the rounding mode.
4718*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0xffffffffc0000000, 0xc1d0000000000000, 0x43effffffff80000);
4719*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0xffffffff00000000, 0xc1f0000000000000, 0x43efffffffe00000);
4720*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0xc000000000000000, 0xc3d0000000000000, 0x43e8000000000000);
4721*f5c631daSSadaf Ebrahimi 
4722*f5c631daSSadaf Ebrahimi   // Conversions which require rounding.
4723*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000000, 0x43b0000000000000, 0x43b0000000000000);
4724*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000001, 0x43b0000000000000, 0x43b0000000000000);
4725*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000080, 0x43b0000000000000, 0x43b0000000000000);
4726*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000081, 0x43b0000000000001, 0x43b0000000000001);
4727*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000100, 0x43b0000000000001, 0x43b0000000000001);
4728*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000101, 0x43b0000000000001, 0x43b0000000000001);
4729*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000180, 0x43b0000000000002, 0x43b0000000000002);
4730*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000181, 0x43b0000000000002, 0x43b0000000000002);
4731*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000200, 0x43b0000000000002, 0x43b0000000000002);
4732*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000201, 0x43b0000000000002, 0x43b0000000000002);
4733*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000280, 0x43b0000000000002, 0x43b0000000000002);
4734*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000281, 0x43b0000000000003, 0x43b0000000000003);
4735*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x1000000000000300, 0x43b0000000000003, 0x43b0000000000003);
4736*f5c631daSSadaf Ebrahimi   // Check rounding of negative int64_t values (and large uint64_t values).
4737*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000000, 0xc3e0000000000000, 0x43e0000000000000);
4738*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000001, 0xc3e0000000000000, 0x43e0000000000000);
4739*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000200, 0xc3e0000000000000, 0x43e0000000000000);
4740*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000201, 0xc3dfffffffffffff, 0x43e0000000000000);
4741*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000400, 0xc3dfffffffffffff, 0x43e0000000000000);
4742*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000401, 0xc3dfffffffffffff, 0x43e0000000000001);
4743*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000600, 0xc3dffffffffffffe, 0x43e0000000000001);
4744*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000601, 0xc3dffffffffffffe, 0x43e0000000000001);
4745*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000800, 0xc3dffffffffffffe, 0x43e0000000000001);
4746*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000801, 0xc3dffffffffffffe, 0x43e0000000000001);
4747*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000a00, 0xc3dffffffffffffe, 0x43e0000000000001);
4748*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000a01, 0xc3dffffffffffffd, 0x43e0000000000001);
4749*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x8000000000000c00, 0xc3dffffffffffffd, 0x43e0000000000002);
4750*f5c631daSSadaf Ebrahimi   // Round up to produce a result that's too big for the input to represent.
4751*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x7ffffffffffffe00, 0x43e0000000000000, 0x43e0000000000000);
4752*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0x7fffffffffffffff, 0x43e0000000000000, 0x43e0000000000000);
4753*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0xfffffffffffffc00, 0xc090000000000000, 0x43f0000000000000);
4754*f5c631daSSadaf Ebrahimi   TestUScvtfHelper(0xffffffffffffffff, 0xbff0000000000000, 0x43f0000000000000);
4755*f5c631daSSadaf Ebrahimi }
4756*f5c631daSSadaf Ebrahimi 
4757*f5c631daSSadaf Ebrahimi 
4758*f5c631daSSadaf Ebrahimi // The same as TestUScvtfHelper, but convert to floats.
TestUScvtf32Helper(uint64_t in,uint32_t expected_scvtf_bits,uint32_t expected_ucvtf_bits)4759*f5c631daSSadaf Ebrahimi static void TestUScvtf32Helper(uint64_t in,
4760*f5c631daSSadaf Ebrahimi                                uint32_t expected_scvtf_bits,
4761*f5c631daSSadaf Ebrahimi                                uint32_t expected_ucvtf_bits) {
4762*f5c631daSSadaf Ebrahimi   uint64_t u64 = in;
4763*f5c631daSSadaf Ebrahimi   uint32_t u32 = u64 & 0xffffffff;
4764*f5c631daSSadaf Ebrahimi   int64_t s64 = static_cast<int64_t>(in);
4765*f5c631daSSadaf Ebrahimi   int32_t s32 = s64 & 0x7fffffff;
4766*f5c631daSSadaf Ebrahimi 
4767*f5c631daSSadaf Ebrahimi   bool cvtf_s32 = (s64 == s32);
4768*f5c631daSSadaf Ebrahimi   bool cvtf_u32 = (u64 == u32);
4769*f5c631daSSadaf Ebrahimi 
4770*f5c631daSSadaf Ebrahimi   float results_scvtf_x[65];
4771*f5c631daSSadaf Ebrahimi   float results_ucvtf_x[65];
4772*f5c631daSSadaf Ebrahimi   float results_scvtf_w[33];
4773*f5c631daSSadaf Ebrahimi   float results_ucvtf_w[33];
4774*f5c631daSSadaf Ebrahimi 
4775*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4776*f5c631daSSadaf Ebrahimi 
4777*f5c631daSSadaf Ebrahimi   START();
4778*f5c631daSSadaf Ebrahimi 
4779*f5c631daSSadaf Ebrahimi   __ Mov(x0, reinterpret_cast<uintptr_t>(results_scvtf_x));
4780*f5c631daSSadaf Ebrahimi   __ Mov(x1, reinterpret_cast<uintptr_t>(results_ucvtf_x));
4781*f5c631daSSadaf Ebrahimi   __ Mov(x2, reinterpret_cast<uintptr_t>(results_scvtf_w));
4782*f5c631daSSadaf Ebrahimi   __ Mov(x3, reinterpret_cast<uintptr_t>(results_ucvtf_w));
4783*f5c631daSSadaf Ebrahimi 
4784*f5c631daSSadaf Ebrahimi   __ Mov(x10, s64);
4785*f5c631daSSadaf Ebrahimi 
4786*f5c631daSSadaf Ebrahimi   // Corrupt the top word, in case it is accidentally used during W-register
4787*f5c631daSSadaf Ebrahimi   // conversions.
4788*f5c631daSSadaf Ebrahimi   __ Mov(x11, 0x5555555555555555);
4789*f5c631daSSadaf Ebrahimi   __ Bfi(x11, x10, 0, kWRegSize);
4790*f5c631daSSadaf Ebrahimi 
4791*f5c631daSSadaf Ebrahimi   // Test integer conversions.
4792*f5c631daSSadaf Ebrahimi   __ Scvtf(s0, x10);
4793*f5c631daSSadaf Ebrahimi   __ Ucvtf(s1, x10);
4794*f5c631daSSadaf Ebrahimi   __ Scvtf(s2, w11);
4795*f5c631daSSadaf Ebrahimi   __ Ucvtf(s3, w11);
4796*f5c631daSSadaf Ebrahimi   __ Str(s0, MemOperand(x0));
4797*f5c631daSSadaf Ebrahimi   __ Str(s1, MemOperand(x1));
4798*f5c631daSSadaf Ebrahimi   __ Str(s2, MemOperand(x2));
4799*f5c631daSSadaf Ebrahimi   __ Str(s3, MemOperand(x3));
4800*f5c631daSSadaf Ebrahimi 
4801*f5c631daSSadaf Ebrahimi   // Test all possible values of fbits.
4802*f5c631daSSadaf Ebrahimi   for (int fbits = 1; fbits <= 32; fbits++) {
4803*f5c631daSSadaf Ebrahimi     __ Scvtf(s0, x10, fbits);
4804*f5c631daSSadaf Ebrahimi     __ Ucvtf(s1, x10, fbits);
4805*f5c631daSSadaf Ebrahimi     __ Scvtf(s2, w11, fbits);
4806*f5c631daSSadaf Ebrahimi     __ Ucvtf(s3, w11, fbits);
4807*f5c631daSSadaf Ebrahimi     __ Str(s0, MemOperand(x0, fbits * kSRegSizeInBytes));
4808*f5c631daSSadaf Ebrahimi     __ Str(s1, MemOperand(x1, fbits * kSRegSizeInBytes));
4809*f5c631daSSadaf Ebrahimi     __ Str(s2, MemOperand(x2, fbits * kSRegSizeInBytes));
4810*f5c631daSSadaf Ebrahimi     __ Str(s3, MemOperand(x3, fbits * kSRegSizeInBytes));
4811*f5c631daSSadaf Ebrahimi   }
4812*f5c631daSSadaf Ebrahimi 
4813*f5c631daSSadaf Ebrahimi   // Conversions from W registers can only handle fbits values <= 32, so just
4814*f5c631daSSadaf Ebrahimi   // test conversions from X registers for 32 < fbits <= 64.
4815*f5c631daSSadaf Ebrahimi   for (int fbits = 33; fbits <= 64; fbits++) {
4816*f5c631daSSadaf Ebrahimi     __ Scvtf(s0, x10, fbits);
4817*f5c631daSSadaf Ebrahimi     __ Ucvtf(s1, x10, fbits);
4818*f5c631daSSadaf Ebrahimi     __ Str(s0, MemOperand(x0, fbits * kSRegSizeInBytes));
4819*f5c631daSSadaf Ebrahimi     __ Str(s1, MemOperand(x1, fbits * kSRegSizeInBytes));
4820*f5c631daSSadaf Ebrahimi   }
4821*f5c631daSSadaf Ebrahimi 
4822*f5c631daSSadaf Ebrahimi   END();
4823*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4824*f5c631daSSadaf Ebrahimi     RUN();
4825*f5c631daSSadaf Ebrahimi 
4826*f5c631daSSadaf Ebrahimi     // Check the results.
4827*f5c631daSSadaf Ebrahimi     float expected_scvtf_base = RawbitsToFloat(expected_scvtf_bits);
4828*f5c631daSSadaf Ebrahimi     float expected_ucvtf_base = RawbitsToFloat(expected_ucvtf_bits);
4829*f5c631daSSadaf Ebrahimi 
4830*f5c631daSSadaf Ebrahimi     for (int fbits = 0; fbits <= 32; fbits++) {
4831*f5c631daSSadaf Ebrahimi       float expected_scvtf = expected_scvtf_base / std::pow(2.0f, fbits);
4832*f5c631daSSadaf Ebrahimi       float expected_ucvtf = expected_ucvtf_base / std::pow(2.0f, fbits);
4833*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(expected_scvtf, results_scvtf_x[fbits]);
4834*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(expected_ucvtf, results_ucvtf_x[fbits]);
4835*f5c631daSSadaf Ebrahimi       if (cvtf_s32) ASSERT_EQUAL_FP32(expected_scvtf, results_scvtf_w[fbits]);
4836*f5c631daSSadaf Ebrahimi       if (cvtf_u32) ASSERT_EQUAL_FP32(expected_ucvtf, results_ucvtf_w[fbits]);
4837*f5c631daSSadaf Ebrahimi     }
4838*f5c631daSSadaf Ebrahimi     for (int fbits = 33; fbits <= 64; fbits++) {
4839*f5c631daSSadaf Ebrahimi       float expected_scvtf = expected_scvtf_base / std::pow(2.0f, fbits);
4840*f5c631daSSadaf Ebrahimi       float expected_ucvtf = expected_ucvtf_base / std::pow(2.0f, fbits);
4841*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(expected_scvtf, results_scvtf_x[fbits]);
4842*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(expected_ucvtf, results_ucvtf_x[fbits]);
4843*f5c631daSSadaf Ebrahimi     }
4844*f5c631daSSadaf Ebrahimi   }
4845*f5c631daSSadaf Ebrahimi }
4846*f5c631daSSadaf Ebrahimi 
4847*f5c631daSSadaf Ebrahimi 
TEST(scvtf_ucvtf_float)4848*f5c631daSSadaf Ebrahimi TEST(scvtf_ucvtf_float) {
4849*f5c631daSSadaf Ebrahimi   // Simple conversions of positive numbers which require no rounding; the
4850*f5c631daSSadaf Ebrahimi   // results should not depened on the rounding mode, and ucvtf and scvtf should
4851*f5c631daSSadaf Ebrahimi   // produce the same result.
4852*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000000000000, 0x00000000, 0x00000000);
4853*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000000000001, 0x3f800000, 0x3f800000);
4854*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000040000000, 0x4e800000, 0x4e800000);
4855*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000100000000, 0x4f800000, 0x4f800000);
4856*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x4000000000000000, 0x5e800000, 0x5e800000);
4857*f5c631daSSadaf Ebrahimi   // Test mantissa extremities.
4858*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000000800001, 0x4b000001, 0x4b000001);
4859*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x4000008000000000, 0x5e800001, 0x5e800001);
4860*f5c631daSSadaf Ebrahimi   // The largest int32_t that fits in a float.
4861*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x000000007fffff80, 0x4effffff, 0x4effffff);
4862*f5c631daSSadaf Ebrahimi   // Values that would be negative if treated as an int32_t.
4863*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x00000000ffffff00, 0x4f7fffff, 0x4f7fffff);
4864*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000080000000, 0x4f000000, 0x4f000000);
4865*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000080000100, 0x4f000001, 0x4f000001);
4866*f5c631daSSadaf Ebrahimi   // The largest int64_t that fits in a float.
4867*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x7fffff8000000000, 0x5effffff, 0x5effffff);
4868*f5c631daSSadaf Ebrahimi   // Check for bit pattern reproduction.
4869*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000000000876543, 0x4b076543, 0x4b076543);
4870*f5c631daSSadaf Ebrahimi 
4871*f5c631daSSadaf Ebrahimi   // Simple conversions of negative int64_t values. These require no rounding,
4872*f5c631daSSadaf Ebrahimi   // and the results should not depend on the rounding mode.
4873*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0xfffffc0000000000, 0xd4800000, 0x5f7ffffc);
4874*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0xc000000000000000, 0xde800000, 0x5f400000);
4875*f5c631daSSadaf Ebrahimi 
4876*f5c631daSSadaf Ebrahimi   // Conversions which require rounding.
4877*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800000000000, 0x57000000, 0x57000000);
4878*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800000000001, 0x57000000, 0x57000000);
4879*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800000800000, 0x57000000, 0x57000000);
4880*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800000800001, 0x57000001, 0x57000001);
4881*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800001000000, 0x57000001, 0x57000001);
4882*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800001000001, 0x57000001, 0x57000001);
4883*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800001800000, 0x57000002, 0x57000002);
4884*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800001800001, 0x57000002, 0x57000002);
4885*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800002000000, 0x57000002, 0x57000002);
4886*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800002000001, 0x57000002, 0x57000002);
4887*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800002800000, 0x57000002, 0x57000002);
4888*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800002800001, 0x57000003, 0x57000003);
4889*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x0000800003000000, 0x57000003, 0x57000003);
4890*f5c631daSSadaf Ebrahimi   // Check rounding of negative int64_t values (and large uint64_t values).
4891*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000000000000000, 0xdf000000, 0x5f000000);
4892*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000000000000001, 0xdf000000, 0x5f000000);
4893*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000004000000000, 0xdf000000, 0x5f000000);
4894*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000004000000001, 0xdeffffff, 0x5f000000);
4895*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000008000000000, 0xdeffffff, 0x5f000000);
4896*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000008000000001, 0xdeffffff, 0x5f000001);
4897*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x800000c000000000, 0xdefffffe, 0x5f000001);
4898*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x800000c000000001, 0xdefffffe, 0x5f000001);
4899*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000010000000000, 0xdefffffe, 0x5f000001);
4900*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000010000000001, 0xdefffffe, 0x5f000001);
4901*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000014000000000, 0xdefffffe, 0x5f000001);
4902*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000014000000001, 0xdefffffd, 0x5f000001);
4903*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x8000018000000000, 0xdefffffd, 0x5f000002);
4904*f5c631daSSadaf Ebrahimi   // Round up to produce a result that's too big for the input to represent.
4905*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x000000007fffffc0, 0x4f000000, 0x4f000000);
4906*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x000000007fffffff, 0x4f000000, 0x4f000000);
4907*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x00000000ffffff80, 0x4f800000, 0x4f800000);
4908*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x00000000ffffffff, 0x4f800000, 0x4f800000);
4909*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x7fffffc000000000, 0x5f000000, 0x5f000000);
4910*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0x7fffffffffffffff, 0x5f000000, 0x5f000000);
4911*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0xffffff8000000000, 0xd3000000, 0x5f800000);
4912*f5c631daSSadaf Ebrahimi   TestUScvtf32Helper(0xffffffffffffffff, 0xbf800000, 0x5f800000);
4913*f5c631daSSadaf Ebrahimi }
4914*f5c631daSSadaf Ebrahimi 
TEST(process_nan_double)4915*f5c631daSSadaf Ebrahimi TEST(process_nan_double) {
4916*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
4917*f5c631daSSadaf Ebrahimi   double sn = RawbitsToDouble(0x7ff5555511111111);
4918*f5c631daSSadaf Ebrahimi   double qn = RawbitsToDouble(0x7ffaaaaa11111111);
4919*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
4920*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
4921*f5c631daSSadaf Ebrahimi 
4922*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
4923*f5c631daSSadaf Ebrahimi   double sn_proc = RawbitsToDouble(0x7ffd555511111111);
4924*f5c631daSSadaf Ebrahimi   double qn_proc = qn;
4925*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sn_proc));
4926*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn_proc));
4927*f5c631daSSadaf Ebrahimi 
4928*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
4929*f5c631daSSadaf Ebrahimi 
4930*f5c631daSSadaf Ebrahimi   START();
4931*f5c631daSSadaf Ebrahimi 
4932*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaN, and check that
4933*f5c631daSSadaf Ebrahimi   // they all handle the NaN correctly.
4934*f5c631daSSadaf Ebrahimi   __ Fmov(d0, sn);
4935*f5c631daSSadaf Ebrahimi   __ Fmov(d10, qn);
4936*f5c631daSSadaf Ebrahimi 
4937*f5c631daSSadaf Ebrahimi   // Operations that always propagate NaNs unchanged, even signalling NaNs.
4938*f5c631daSSadaf Ebrahimi   //   - Signalling NaN
4939*f5c631daSSadaf Ebrahimi   __ Fmov(d1, d0);
4940*f5c631daSSadaf Ebrahimi   __ Fabs(d2, d0);
4941*f5c631daSSadaf Ebrahimi   __ Fneg(d3, d0);
4942*f5c631daSSadaf Ebrahimi   //   - Quiet NaN
4943*f5c631daSSadaf Ebrahimi   __ Fmov(d11, d10);
4944*f5c631daSSadaf Ebrahimi   __ Fabs(d12, d10);
4945*f5c631daSSadaf Ebrahimi   __ Fneg(d13, d10);
4946*f5c631daSSadaf Ebrahimi 
4947*f5c631daSSadaf Ebrahimi   // Operations that use ProcessNaN.
4948*f5c631daSSadaf Ebrahimi   //   - Signalling NaN
4949*f5c631daSSadaf Ebrahimi   __ Fsqrt(d4, d0);
4950*f5c631daSSadaf Ebrahimi   __ Frinta(d5, d0);
4951*f5c631daSSadaf Ebrahimi   __ Frintn(d6, d0);
4952*f5c631daSSadaf Ebrahimi   __ Frintz(d7, d0);
4953*f5c631daSSadaf Ebrahimi   //   - Quiet NaN
4954*f5c631daSSadaf Ebrahimi   __ Fsqrt(d14, d10);
4955*f5c631daSSadaf Ebrahimi   __ Frinta(d15, d10);
4956*f5c631daSSadaf Ebrahimi   __ Frintn(d16, d10);
4957*f5c631daSSadaf Ebrahimi   __ Frintz(d17, d10);
4958*f5c631daSSadaf Ebrahimi 
4959*f5c631daSSadaf Ebrahimi   // The behaviour of fcvt is checked in TEST(fcvt_sd).
4960*f5c631daSSadaf Ebrahimi 
4961*f5c631daSSadaf Ebrahimi   END();
4962*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
4963*f5c631daSSadaf Ebrahimi     RUN();
4964*f5c631daSSadaf Ebrahimi 
4965*f5c631daSSadaf Ebrahimi     uint64_t qn_raw = DoubleToRawbits(qn);
4966*f5c631daSSadaf Ebrahimi     uint64_t sn_raw = DoubleToRawbits(sn);
4967*f5c631daSSadaf Ebrahimi 
4968*f5c631daSSadaf Ebrahimi     //   - Signalling NaN
4969*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(sn, d1);
4970*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(RawbitsToDouble(sn_raw & ~kDSignMask), d2);
4971*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(RawbitsToDouble(sn_raw ^ kDSignMask), d3);
4972*f5c631daSSadaf Ebrahimi     //   - Quiet NaN
4973*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(qn, d11);
4974*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(RawbitsToDouble(qn_raw & ~kDSignMask), d12);
4975*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(RawbitsToDouble(qn_raw ^ kDSignMask), d13);
4976*f5c631daSSadaf Ebrahimi 
4977*f5c631daSSadaf Ebrahimi     //   - Signalling NaN
4978*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(sn_proc, d4);
4979*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(sn_proc, d5);
4980*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(sn_proc, d6);
4981*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(sn_proc, d7);
4982*f5c631daSSadaf Ebrahimi     //   - Quiet NaN
4983*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(qn_proc, d14);
4984*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(qn_proc, d15);
4985*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(qn_proc, d16);
4986*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(qn_proc, d17);
4987*f5c631daSSadaf Ebrahimi   }
4988*f5c631daSSadaf Ebrahimi }
4989*f5c631daSSadaf Ebrahimi 
4990*f5c631daSSadaf Ebrahimi 
TEST(process_nan_float)4991*f5c631daSSadaf Ebrahimi TEST(process_nan_float) {
4992*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
4993*f5c631daSSadaf Ebrahimi   float sn = RawbitsToFloat(0x7f951111);
4994*f5c631daSSadaf Ebrahimi   float qn = RawbitsToFloat(0x7fea1111);
4995*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
4996*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
4997*f5c631daSSadaf Ebrahimi 
4998*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
4999*f5c631daSSadaf Ebrahimi   float sn_proc = RawbitsToFloat(0x7fd51111);
5000*f5c631daSSadaf Ebrahimi   float qn_proc = qn;
5001*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sn_proc));
5002*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn_proc));
5003*f5c631daSSadaf Ebrahimi 
5004*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
5005*f5c631daSSadaf Ebrahimi 
5006*f5c631daSSadaf Ebrahimi   START();
5007*f5c631daSSadaf Ebrahimi 
5008*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaN, and check that
5009*f5c631daSSadaf Ebrahimi   // they all handle the NaN correctly.
5010*f5c631daSSadaf Ebrahimi   __ Fmov(s0, sn);
5011*f5c631daSSadaf Ebrahimi   __ Fmov(s10, qn);
5012*f5c631daSSadaf Ebrahimi 
5013*f5c631daSSadaf Ebrahimi   // Operations that always propagate NaNs unchanged, even signalling NaNs.
5014*f5c631daSSadaf Ebrahimi   //   - Signalling NaN
5015*f5c631daSSadaf Ebrahimi   __ Fmov(s1, s0);
5016*f5c631daSSadaf Ebrahimi   __ Fabs(s2, s0);
5017*f5c631daSSadaf Ebrahimi   __ Fneg(s3, s0);
5018*f5c631daSSadaf Ebrahimi   //   - Quiet NaN
5019*f5c631daSSadaf Ebrahimi   __ Fmov(s11, s10);
5020*f5c631daSSadaf Ebrahimi   __ Fabs(s12, s10);
5021*f5c631daSSadaf Ebrahimi   __ Fneg(s13, s10);
5022*f5c631daSSadaf Ebrahimi 
5023*f5c631daSSadaf Ebrahimi   // Operations that use ProcessNaN.
5024*f5c631daSSadaf Ebrahimi   //   - Signalling NaN
5025*f5c631daSSadaf Ebrahimi   __ Fsqrt(s4, s0);
5026*f5c631daSSadaf Ebrahimi   __ Frinta(s5, s0);
5027*f5c631daSSadaf Ebrahimi   __ Frintn(s6, s0);
5028*f5c631daSSadaf Ebrahimi   __ Frintz(s7, s0);
5029*f5c631daSSadaf Ebrahimi   //   - Quiet NaN
5030*f5c631daSSadaf Ebrahimi   __ Fsqrt(s14, s10);
5031*f5c631daSSadaf Ebrahimi   __ Frinta(s15, s10);
5032*f5c631daSSadaf Ebrahimi   __ Frintn(s16, s10);
5033*f5c631daSSadaf Ebrahimi   __ Frintz(s17, s10);
5034*f5c631daSSadaf Ebrahimi 
5035*f5c631daSSadaf Ebrahimi   // The behaviour of fcvt is checked in TEST(fcvt_sd).
5036*f5c631daSSadaf Ebrahimi 
5037*f5c631daSSadaf Ebrahimi   END();
5038*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
5039*f5c631daSSadaf Ebrahimi     RUN();
5040*f5c631daSSadaf Ebrahimi 
5041*f5c631daSSadaf Ebrahimi     uint32_t qn_raw = FloatToRawbits(qn);
5042*f5c631daSSadaf Ebrahimi     uint32_t sn_raw = FloatToRawbits(sn);
5043*f5c631daSSadaf Ebrahimi 
5044*f5c631daSSadaf Ebrahimi     //   - Signalling NaN
5045*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(sn, s1);
5046*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(sn_raw & ~kSSignMask), s2);
5047*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(sn_raw ^ kSSignMask), s3);
5048*f5c631daSSadaf Ebrahimi     //   - Quiet NaN
5049*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(qn, s11);
5050*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(qn_raw & ~kSSignMask), s12);
5051*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(RawbitsToFloat(qn_raw ^ kSSignMask), s13);
5052*f5c631daSSadaf Ebrahimi 
5053*f5c631daSSadaf Ebrahimi     //   - Signalling NaN
5054*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(sn_proc, s4);
5055*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(sn_proc, s5);
5056*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(sn_proc, s6);
5057*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(sn_proc, s7);
5058*f5c631daSSadaf Ebrahimi     //   - Quiet NaN
5059*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(qn_proc, s14);
5060*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(qn_proc, s15);
5061*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(qn_proc, s16);
5062*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(qn_proc, s17);
5063*f5c631daSSadaf Ebrahimi   }
5064*f5c631daSSadaf Ebrahimi }
5065*f5c631daSSadaf Ebrahimi 
5066*f5c631daSSadaf Ebrahimi // TODO: TEST(process_nan_half) {}
5067*f5c631daSSadaf Ebrahimi 
ProcessNaNsHelper(double n,double m,double expected)5068*f5c631daSSadaf Ebrahimi static void ProcessNaNsHelper(double n, double m, double expected) {
5069*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(n) || IsNaN(m));
5070*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(expected));
5071*f5c631daSSadaf Ebrahimi 
5072*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
5073*f5c631daSSadaf Ebrahimi 
5074*f5c631daSSadaf Ebrahimi   START();
5075*f5c631daSSadaf Ebrahimi 
5076*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaNs, and check that
5077*f5c631daSSadaf Ebrahimi   // they all propagate NaNs correctly.
5078*f5c631daSSadaf Ebrahimi   __ Fmov(d0, n);
5079*f5c631daSSadaf Ebrahimi   __ Fmov(d1, m);
5080*f5c631daSSadaf Ebrahimi 
5081*f5c631daSSadaf Ebrahimi   __ Fadd(d2, d0, d1);
5082*f5c631daSSadaf Ebrahimi   __ Fsub(d3, d0, d1);
5083*f5c631daSSadaf Ebrahimi   __ Fmul(d4, d0, d1);
5084*f5c631daSSadaf Ebrahimi   __ Fdiv(d5, d0, d1);
5085*f5c631daSSadaf Ebrahimi   __ Fmax(d6, d0, d1);
5086*f5c631daSSadaf Ebrahimi   __ Fmin(d7, d0, d1);
5087*f5c631daSSadaf Ebrahimi 
5088*f5c631daSSadaf Ebrahimi   END();
5089*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
5090*f5c631daSSadaf Ebrahimi     RUN();
5091*f5c631daSSadaf Ebrahimi 
5092*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(expected, d2);
5093*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(expected, d3);
5094*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(expected, d4);
5095*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(expected, d5);
5096*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(expected, d6);
5097*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(expected, d7);
5098*f5c631daSSadaf Ebrahimi   }
5099*f5c631daSSadaf Ebrahimi }
5100*f5c631daSSadaf Ebrahimi 
5101*f5c631daSSadaf Ebrahimi 
TEST(process_nans_double)5102*f5c631daSSadaf Ebrahimi TEST(process_nans_double) {
5103*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
5104*f5c631daSSadaf Ebrahimi   double sn = RawbitsToDouble(0x7ff5555511111111);
5105*f5c631daSSadaf Ebrahimi   double sm = RawbitsToDouble(0x7ff5555522222222);
5106*f5c631daSSadaf Ebrahimi   double qn = RawbitsToDouble(0x7ffaaaaa11111111);
5107*f5c631daSSadaf Ebrahimi   double qm = RawbitsToDouble(0x7ffaaaaa22222222);
5108*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
5109*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sm));
5110*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
5111*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm));
5112*f5c631daSSadaf Ebrahimi 
5113*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
5114*f5c631daSSadaf Ebrahimi   double sn_proc = RawbitsToDouble(0x7ffd555511111111);
5115*f5c631daSSadaf Ebrahimi   double sm_proc = RawbitsToDouble(0x7ffd555522222222);
5116*f5c631daSSadaf Ebrahimi   double qn_proc = qn;
5117*f5c631daSSadaf Ebrahimi   double qm_proc = qm;
5118*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sn_proc));
5119*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sm_proc));
5120*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn_proc));
5121*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm_proc));
5122*f5c631daSSadaf Ebrahimi 
5123*f5c631daSSadaf Ebrahimi   // Quiet NaNs are propagated.
5124*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, 0, qn_proc);
5125*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(0, qm, qm_proc);
5126*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, qm, qn_proc);
5127*f5c631daSSadaf Ebrahimi 
5128*f5c631daSSadaf Ebrahimi   // Signalling NaNs are propagated, and made quiet.
5129*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, 0, sn_proc);
5130*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(0, sm, sm_proc);
5131*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, sm, sn_proc);
5132*f5c631daSSadaf Ebrahimi 
5133*f5c631daSSadaf Ebrahimi   // Signalling NaNs take precedence over quiet NaNs.
5134*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, qm, sn_proc);
5135*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, sm, sm_proc);
5136*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, sm, sn_proc);
5137*f5c631daSSadaf Ebrahimi }
5138*f5c631daSSadaf Ebrahimi 
5139*f5c631daSSadaf Ebrahimi 
ProcessNaNsHelper(float n,float m,float expected)5140*f5c631daSSadaf Ebrahimi static void ProcessNaNsHelper(float n, float m, float expected) {
5141*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(n) || IsNaN(m));
5142*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(expected));
5143*f5c631daSSadaf Ebrahimi 
5144*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
5145*f5c631daSSadaf Ebrahimi 
5146*f5c631daSSadaf Ebrahimi   START();
5147*f5c631daSSadaf Ebrahimi 
5148*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaNs, and check that
5149*f5c631daSSadaf Ebrahimi   // they all propagate NaNs correctly.
5150*f5c631daSSadaf Ebrahimi   __ Fmov(s0, n);
5151*f5c631daSSadaf Ebrahimi   __ Fmov(s1, m);
5152*f5c631daSSadaf Ebrahimi 
5153*f5c631daSSadaf Ebrahimi   __ Fadd(s2, s0, s1);
5154*f5c631daSSadaf Ebrahimi   __ Fsub(s3, s0, s1);
5155*f5c631daSSadaf Ebrahimi   __ Fmul(s4, s0, s1);
5156*f5c631daSSadaf Ebrahimi   __ Fdiv(s5, s0, s1);
5157*f5c631daSSadaf Ebrahimi   __ Fmax(s6, s0, s1);
5158*f5c631daSSadaf Ebrahimi   __ Fmin(s7, s0, s1);
5159*f5c631daSSadaf Ebrahimi 
5160*f5c631daSSadaf Ebrahimi   END();
5161*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
5162*f5c631daSSadaf Ebrahimi     RUN();
5163*f5c631daSSadaf Ebrahimi 
5164*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(expected, s2);
5165*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(expected, s3);
5166*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(expected, s4);
5167*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(expected, s5);
5168*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(expected, s6);
5169*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(expected, s7);
5170*f5c631daSSadaf Ebrahimi   }
5171*f5c631daSSadaf Ebrahimi }
5172*f5c631daSSadaf Ebrahimi 
5173*f5c631daSSadaf Ebrahimi 
TEST(process_nans_float)5174*f5c631daSSadaf Ebrahimi TEST(process_nans_float) {
5175*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
5176*f5c631daSSadaf Ebrahimi   float sn = RawbitsToFloat(0x7f951111);
5177*f5c631daSSadaf Ebrahimi   float sm = RawbitsToFloat(0x7f952222);
5178*f5c631daSSadaf Ebrahimi   float qn = RawbitsToFloat(0x7fea1111);
5179*f5c631daSSadaf Ebrahimi   float qm = RawbitsToFloat(0x7fea2222);
5180*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
5181*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sm));
5182*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
5183*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm));
5184*f5c631daSSadaf Ebrahimi 
5185*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
5186*f5c631daSSadaf Ebrahimi   float sn_proc = RawbitsToFloat(0x7fd51111);
5187*f5c631daSSadaf Ebrahimi   float sm_proc = RawbitsToFloat(0x7fd52222);
5188*f5c631daSSadaf Ebrahimi   float qn_proc = qn;
5189*f5c631daSSadaf Ebrahimi   float qm_proc = qm;
5190*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sn_proc));
5191*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sm_proc));
5192*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn_proc));
5193*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm_proc));
5194*f5c631daSSadaf Ebrahimi 
5195*f5c631daSSadaf Ebrahimi   // Quiet NaNs are propagated.
5196*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, 0, qn_proc);
5197*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(0, qm, qm_proc);
5198*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, qm, qn_proc);
5199*f5c631daSSadaf Ebrahimi 
5200*f5c631daSSadaf Ebrahimi   // Signalling NaNs are propagated, and made quiet.
5201*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, 0, sn_proc);
5202*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(0, sm, sm_proc);
5203*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, sm, sn_proc);
5204*f5c631daSSadaf Ebrahimi 
5205*f5c631daSSadaf Ebrahimi   // Signalling NaNs take precedence over quiet NaNs.
5206*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, qm, sn_proc);
5207*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, sm, sm_proc);
5208*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, sm, sn_proc);
5209*f5c631daSSadaf Ebrahimi }
5210*f5c631daSSadaf Ebrahimi 
5211*f5c631daSSadaf Ebrahimi 
ProcessNaNsHelper(Float16 n,Float16 m,Float16 expected)5212*f5c631daSSadaf Ebrahimi static void ProcessNaNsHelper(Float16 n, Float16 m, Float16 expected) {
5213*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(n) || IsNaN(m));
5214*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(expected));
5215*f5c631daSSadaf Ebrahimi 
5216*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP, CPUFeatures::kFPHalf);
5217*f5c631daSSadaf Ebrahimi 
5218*f5c631daSSadaf Ebrahimi   START();
5219*f5c631daSSadaf Ebrahimi 
5220*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaNs, and check that
5221*f5c631daSSadaf Ebrahimi   // they all propagate NaNs correctly.
5222*f5c631daSSadaf Ebrahimi   __ Fmov(h0, n);
5223*f5c631daSSadaf Ebrahimi   __ Fmov(h1, m);
5224*f5c631daSSadaf Ebrahimi 
5225*f5c631daSSadaf Ebrahimi   __ Fadd(h2, h0, h1);
5226*f5c631daSSadaf Ebrahimi   __ Fsub(h3, h0, h1);
5227*f5c631daSSadaf Ebrahimi   __ Fmul(h4, h0, h1);
5228*f5c631daSSadaf Ebrahimi   __ Fdiv(h5, h0, h1);
5229*f5c631daSSadaf Ebrahimi   __ Fmax(h6, h0, h1);
5230*f5c631daSSadaf Ebrahimi   __ Fmin(h7, h0, h1);
5231*f5c631daSSadaf Ebrahimi 
5232*f5c631daSSadaf Ebrahimi   END();
5233*f5c631daSSadaf Ebrahimi 
5234*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
5235*f5c631daSSadaf Ebrahimi     RUN();
5236*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(expected, h2);
5237*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(expected, h3);
5238*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(expected, h4);
5239*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(expected, h5);
5240*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(expected, h6);
5241*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP16(expected, h7);
5242*f5c631daSSadaf Ebrahimi   }
5243*f5c631daSSadaf Ebrahimi }
5244*f5c631daSSadaf Ebrahimi 
5245*f5c631daSSadaf Ebrahimi 
TEST(process_nans_half)5246*f5c631daSSadaf Ebrahimi TEST(process_nans_half) {
5247*f5c631daSSadaf Ebrahimi   // Make sure that NaN propagation works correctly.
5248*f5c631daSSadaf Ebrahimi   Float16 sn(RawbitsToFloat16(0x7c11));
5249*f5c631daSSadaf Ebrahimi   Float16 sm(RawbitsToFloat16(0xfc22));
5250*f5c631daSSadaf Ebrahimi   Float16 qn(RawbitsToFloat16(0x7e33));
5251*f5c631daSSadaf Ebrahimi   Float16 qm(RawbitsToFloat16(0xfe44));
5252*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
5253*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sm));
5254*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
5255*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm));
5256*f5c631daSSadaf Ebrahimi 
5257*f5c631daSSadaf Ebrahimi   // The input NaNs after passing through ProcessNaN.
5258*f5c631daSSadaf Ebrahimi   Float16 sn_proc(RawbitsToFloat16(0x7e11));
5259*f5c631daSSadaf Ebrahimi   Float16 sm_proc(RawbitsToFloat16(0xfe22));
5260*f5c631daSSadaf Ebrahimi   Float16 qn_proc = qn;
5261*f5c631daSSadaf Ebrahimi   Float16 qm_proc = qm;
5262*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sn_proc));
5263*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(sm_proc));
5264*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn_proc));
5265*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm_proc));
5266*f5c631daSSadaf Ebrahimi 
5267*f5c631daSSadaf Ebrahimi   // Quiet NaNs are propagated.
5268*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, Float16(), qn_proc);
5269*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(Float16(), qm, qm_proc);
5270*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, qm, qn_proc);
5271*f5c631daSSadaf Ebrahimi 
5272*f5c631daSSadaf Ebrahimi   // Signalling NaNs are propagated, and made quiet.
5273*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, Float16(), sn_proc);
5274*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(Float16(), sm, sm_proc);
5275*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, sm, sn_proc);
5276*f5c631daSSadaf Ebrahimi 
5277*f5c631daSSadaf Ebrahimi   // Signalling NaNs take precedence over quiet NaNs.
5278*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, qm, sn_proc);
5279*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(qn, sm, sm_proc);
5280*f5c631daSSadaf Ebrahimi   ProcessNaNsHelper(sn, sm, sn_proc);
5281*f5c631daSSadaf Ebrahimi }
5282*f5c631daSSadaf Ebrahimi 
5283*f5c631daSSadaf Ebrahimi 
DefaultNaNHelper(float n,float m,float a)5284*f5c631daSSadaf Ebrahimi static void DefaultNaNHelper(float n, float m, float a) {
5285*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(n) || IsNaN(m) || IsNaN(a));
5286*f5c631daSSadaf Ebrahimi 
5287*f5c631daSSadaf Ebrahimi   bool test_1op = IsNaN(n);
5288*f5c631daSSadaf Ebrahimi   bool test_2op = IsNaN(n) || IsNaN(m);
5289*f5c631daSSadaf Ebrahimi 
5290*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
5291*f5c631daSSadaf Ebrahimi   START();
5292*f5c631daSSadaf Ebrahimi 
5293*f5c631daSSadaf Ebrahimi   // Enable Default-NaN mode in the FPCR.
5294*f5c631daSSadaf Ebrahimi   __ Mrs(x0, FPCR);
5295*f5c631daSSadaf Ebrahimi   __ Orr(x1, x0, DN_mask);
5296*f5c631daSSadaf Ebrahimi   __ Msr(FPCR, x1);
5297*f5c631daSSadaf Ebrahimi 
5298*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaNs, and check that
5299*f5c631daSSadaf Ebrahimi   // they all produce the default NaN.
5300*f5c631daSSadaf Ebrahimi   __ Fmov(s0, n);
5301*f5c631daSSadaf Ebrahimi   __ Fmov(s1, m);
5302*f5c631daSSadaf Ebrahimi   __ Fmov(s2, a);
5303*f5c631daSSadaf Ebrahimi 
5304*f5c631daSSadaf Ebrahimi   if (test_1op) {
5305*f5c631daSSadaf Ebrahimi     // Operations that always propagate NaNs unchanged, even signalling NaNs.
5306*f5c631daSSadaf Ebrahimi     __ Fmov(s10, s0);
5307*f5c631daSSadaf Ebrahimi     __ Fabs(s11, s0);
5308*f5c631daSSadaf Ebrahimi     __ Fneg(s12, s0);
5309*f5c631daSSadaf Ebrahimi 
5310*f5c631daSSadaf Ebrahimi     // Operations that use ProcessNaN.
5311*f5c631daSSadaf Ebrahimi     __ Fsqrt(s13, s0);
5312*f5c631daSSadaf Ebrahimi     __ Frinta(s14, s0);
5313*f5c631daSSadaf Ebrahimi     __ Frintn(s15, s0);
5314*f5c631daSSadaf Ebrahimi     __ Frintz(s16, s0);
5315*f5c631daSSadaf Ebrahimi 
5316*f5c631daSSadaf Ebrahimi     // Fcvt usually has special NaN handling, but it respects default-NaN mode.
5317*f5c631daSSadaf Ebrahimi     __ Fcvt(d17, s0);
5318*f5c631daSSadaf Ebrahimi   }
5319*f5c631daSSadaf Ebrahimi 
5320*f5c631daSSadaf Ebrahimi   if (test_2op) {
5321*f5c631daSSadaf Ebrahimi     __ Fadd(s18, s0, s1);
5322*f5c631daSSadaf Ebrahimi     __ Fsub(s19, s0, s1);
5323*f5c631daSSadaf Ebrahimi     __ Fmul(s20, s0, s1);
5324*f5c631daSSadaf Ebrahimi     __ Fdiv(s21, s0, s1);
5325*f5c631daSSadaf Ebrahimi     __ Fmax(s22, s0, s1);
5326*f5c631daSSadaf Ebrahimi     __ Fmin(s23, s0, s1);
5327*f5c631daSSadaf Ebrahimi   }
5328*f5c631daSSadaf Ebrahimi 
5329*f5c631daSSadaf Ebrahimi   __ Fmadd(s24, s0, s1, s2);
5330*f5c631daSSadaf Ebrahimi   __ Fmsub(s25, s0, s1, s2);
5331*f5c631daSSadaf Ebrahimi   __ Fnmadd(s26, s0, s1, s2);
5332*f5c631daSSadaf Ebrahimi   __ Fnmsub(s27, s0, s1, s2);
5333*f5c631daSSadaf Ebrahimi 
5334*f5c631daSSadaf Ebrahimi   // Restore FPCR.
5335*f5c631daSSadaf Ebrahimi   __ Msr(FPCR, x0);
5336*f5c631daSSadaf Ebrahimi 
5337*f5c631daSSadaf Ebrahimi   END();
5338*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
5339*f5c631daSSadaf Ebrahimi     RUN();
5340*f5c631daSSadaf Ebrahimi 
5341*f5c631daSSadaf Ebrahimi     if (test_1op) {
5342*f5c631daSSadaf Ebrahimi       uint32_t n_raw = FloatToRawbits(n);
5343*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(n, s10);
5344*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(RawbitsToFloat(n_raw & ~kSSignMask), s11);
5345*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(RawbitsToFloat(n_raw ^ kSSignMask), s12);
5346*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s13);
5347*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s14);
5348*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s15);
5349*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s16);
5350*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d17);
5351*f5c631daSSadaf Ebrahimi     }
5352*f5c631daSSadaf Ebrahimi 
5353*f5c631daSSadaf Ebrahimi     if (test_2op) {
5354*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s18);
5355*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s19);
5356*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s20);
5357*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s21);
5358*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s22);
5359*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s23);
5360*f5c631daSSadaf Ebrahimi     }
5361*f5c631daSSadaf Ebrahimi 
5362*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s24);
5363*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s25);
5364*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s26);
5365*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP32(kFP32DefaultNaN, s27);
5366*f5c631daSSadaf Ebrahimi   }
5367*f5c631daSSadaf Ebrahimi }
5368*f5c631daSSadaf Ebrahimi 
5369*f5c631daSSadaf Ebrahimi 
TEST(default_nan_float)5370*f5c631daSSadaf Ebrahimi TEST(default_nan_float) {
5371*f5c631daSSadaf Ebrahimi   float sn = RawbitsToFloat(0x7f951111);
5372*f5c631daSSadaf Ebrahimi   float sm = RawbitsToFloat(0x7f952222);
5373*f5c631daSSadaf Ebrahimi   float sa = RawbitsToFloat(0x7f95aaaa);
5374*f5c631daSSadaf Ebrahimi   float qn = RawbitsToFloat(0x7fea1111);
5375*f5c631daSSadaf Ebrahimi   float qm = RawbitsToFloat(0x7fea2222);
5376*f5c631daSSadaf Ebrahimi   float qa = RawbitsToFloat(0x7feaaaaa);
5377*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
5378*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sm));
5379*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sa));
5380*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
5381*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm));
5382*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qa));
5383*f5c631daSSadaf Ebrahimi 
5384*f5c631daSSadaf Ebrahimi   //   - Signalling NaNs
5385*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, 0.0f, 0.0f);
5386*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0f, sm, 0.0f);
5387*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0f, 0.0f, sa);
5388*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, sm, 0.0f);
5389*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0f, sm, sa);
5390*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, 0.0f, sa);
5391*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, sm, sa);
5392*f5c631daSSadaf Ebrahimi   //   - Quiet NaNs
5393*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, 0.0f, 0.0f);
5394*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0f, qm, 0.0f);
5395*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0f, 0.0f, qa);
5396*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, 0.0f);
5397*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0f, qm, qa);
5398*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, 0.0f, qa);
5399*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, qa);
5400*f5c631daSSadaf Ebrahimi   //   - Mixed NaNs
5401*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, sm, sa);
5402*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, qm, sa);
5403*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, sm, qa);
5404*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, sa);
5405*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, qm, qa);
5406*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, sm, qa);
5407*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, qa);
5408*f5c631daSSadaf Ebrahimi }
5409*f5c631daSSadaf Ebrahimi 
5410*f5c631daSSadaf Ebrahimi 
DefaultNaNHelper(double n,double m,double a)5411*f5c631daSSadaf Ebrahimi static void DefaultNaNHelper(double n, double m, double a) {
5412*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsNaN(n) || IsNaN(m) || IsNaN(a));
5413*f5c631daSSadaf Ebrahimi 
5414*f5c631daSSadaf Ebrahimi   bool test_1op = IsNaN(n);
5415*f5c631daSSadaf Ebrahimi   bool test_2op = IsNaN(n) || IsNaN(m);
5416*f5c631daSSadaf Ebrahimi 
5417*f5c631daSSadaf Ebrahimi   SETUP_WITH_FEATURES(CPUFeatures::kFP);
5418*f5c631daSSadaf Ebrahimi 
5419*f5c631daSSadaf Ebrahimi   START();
5420*f5c631daSSadaf Ebrahimi 
5421*f5c631daSSadaf Ebrahimi   // Enable Default-NaN mode in the FPCR.
5422*f5c631daSSadaf Ebrahimi   __ Mrs(x0, FPCR);
5423*f5c631daSSadaf Ebrahimi   __ Orr(x1, x0, DN_mask);
5424*f5c631daSSadaf Ebrahimi   __ Msr(FPCR, x1);
5425*f5c631daSSadaf Ebrahimi 
5426*f5c631daSSadaf Ebrahimi   // Execute a number of instructions which all use ProcessNaNs, and check that
5427*f5c631daSSadaf Ebrahimi   // they all produce the default NaN.
5428*f5c631daSSadaf Ebrahimi   __ Fmov(d0, n);
5429*f5c631daSSadaf Ebrahimi   __ Fmov(d1, m);
5430*f5c631daSSadaf Ebrahimi   __ Fmov(d2, a);
5431*f5c631daSSadaf Ebrahimi 
5432*f5c631daSSadaf Ebrahimi   if (test_1op) {
5433*f5c631daSSadaf Ebrahimi     // Operations that always propagate NaNs unchanged, even signalling NaNs.
5434*f5c631daSSadaf Ebrahimi     __ Fmov(d10, d0);
5435*f5c631daSSadaf Ebrahimi     __ Fabs(d11, d0);
5436*f5c631daSSadaf Ebrahimi     __ Fneg(d12, d0);
5437*f5c631daSSadaf Ebrahimi 
5438*f5c631daSSadaf Ebrahimi     // Operations that use ProcessNaN.
5439*f5c631daSSadaf Ebrahimi     __ Fsqrt(d13, d0);
5440*f5c631daSSadaf Ebrahimi     __ Frinta(d14, d0);
5441*f5c631daSSadaf Ebrahimi     __ Frintn(d15, d0);
5442*f5c631daSSadaf Ebrahimi     __ Frintz(d16, d0);
5443*f5c631daSSadaf Ebrahimi 
5444*f5c631daSSadaf Ebrahimi     // Fcvt usually has special NaN handling, but it respects default-NaN mode.
5445*f5c631daSSadaf Ebrahimi     __ Fcvt(s17, d0);
5446*f5c631daSSadaf Ebrahimi   }
5447*f5c631daSSadaf Ebrahimi 
5448*f5c631daSSadaf Ebrahimi   if (test_2op) {
5449*f5c631daSSadaf Ebrahimi     __ Fadd(d18, d0, d1);
5450*f5c631daSSadaf Ebrahimi     __ Fsub(d19, d0, d1);
5451*f5c631daSSadaf Ebrahimi     __ Fmul(d20, d0, d1);
5452*f5c631daSSadaf Ebrahimi     __ Fdiv(d21, d0, d1);
5453*f5c631daSSadaf Ebrahimi     __ Fmax(d22, d0, d1);
5454*f5c631daSSadaf Ebrahimi     __ Fmin(d23, d0, d1);
5455*f5c631daSSadaf Ebrahimi   }
5456*f5c631daSSadaf Ebrahimi 
5457*f5c631daSSadaf Ebrahimi   __ Fmadd(d24, d0, d1, d2);
5458*f5c631daSSadaf Ebrahimi   __ Fmsub(d25, d0, d1, d2);
5459*f5c631daSSadaf Ebrahimi   __ Fnmadd(d26, d0, d1, d2);
5460*f5c631daSSadaf Ebrahimi   __ Fnmsub(d27, d0, d1, d2);
5461*f5c631daSSadaf Ebrahimi 
5462*f5c631daSSadaf Ebrahimi   // Restore FPCR.
5463*f5c631daSSadaf Ebrahimi   __ Msr(FPCR, x0);
5464*f5c631daSSadaf Ebrahimi 
5465*f5c631daSSadaf Ebrahimi   END();
5466*f5c631daSSadaf Ebrahimi   if (CAN_RUN()) {
5467*f5c631daSSadaf Ebrahimi     RUN();
5468*f5c631daSSadaf Ebrahimi 
5469*f5c631daSSadaf Ebrahimi     if (test_1op) {
5470*f5c631daSSadaf Ebrahimi       uint64_t n_raw = DoubleToRawbits(n);
5471*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(n, d10);
5472*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(RawbitsToDouble(n_raw & ~kDSignMask), d11);
5473*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(RawbitsToDouble(n_raw ^ kDSignMask), d12);
5474*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13);
5475*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d14);
5476*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d15);
5477*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d16);
5478*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP32(kFP32DefaultNaN, s17);
5479*f5c631daSSadaf Ebrahimi     }
5480*f5c631daSSadaf Ebrahimi 
5481*f5c631daSSadaf Ebrahimi     if (test_2op) {
5482*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d18);
5483*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d19);
5484*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d20);
5485*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d21);
5486*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d22);
5487*f5c631daSSadaf Ebrahimi       ASSERT_EQUAL_FP64(kFP64DefaultNaN, d23);
5488*f5c631daSSadaf Ebrahimi     }
5489*f5c631daSSadaf Ebrahimi 
5490*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d24);
5491*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d25);
5492*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d26);
5493*f5c631daSSadaf Ebrahimi     ASSERT_EQUAL_FP64(kFP64DefaultNaN, d27);
5494*f5c631daSSadaf Ebrahimi   }
5495*f5c631daSSadaf Ebrahimi }
5496*f5c631daSSadaf Ebrahimi 
5497*f5c631daSSadaf Ebrahimi 
TEST(default_nan_double)5498*f5c631daSSadaf Ebrahimi TEST(default_nan_double) {
5499*f5c631daSSadaf Ebrahimi   double sn = RawbitsToDouble(0x7ff5555511111111);
5500*f5c631daSSadaf Ebrahimi   double sm = RawbitsToDouble(0x7ff5555522222222);
5501*f5c631daSSadaf Ebrahimi   double sa = RawbitsToDouble(0x7ff55555aaaaaaaa);
5502*f5c631daSSadaf Ebrahimi   double qn = RawbitsToDouble(0x7ffaaaaa11111111);
5503*f5c631daSSadaf Ebrahimi   double qm = RawbitsToDouble(0x7ffaaaaa22222222);
5504*f5c631daSSadaf Ebrahimi   double qa = RawbitsToDouble(0x7ffaaaaaaaaaaaaa);
5505*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sn));
5506*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sm));
5507*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsSignallingNaN(sa));
5508*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qn));
5509*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qm));
5510*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsQuietNaN(qa));
5511*f5c631daSSadaf Ebrahimi 
5512*f5c631daSSadaf Ebrahimi   //   - Signalling NaNs
5513*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, 0.0, 0.0);
5514*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0, sm, 0.0);
5515*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0, 0.0, sa);
5516*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, sm, 0.0);
5517*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0, sm, sa);
5518*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, 0.0, sa);
5519*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, sm, sa);
5520*f5c631daSSadaf Ebrahimi   //   - Quiet NaNs
5521*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, 0.0, 0.0);
5522*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0, qm, 0.0);
5523*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0, 0.0, qa);
5524*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, 0.0);
5525*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(0.0, qm, qa);
5526*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, 0.0, qa);
5527*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, qa);
5528*f5c631daSSadaf Ebrahimi   //   - Mixed NaNs
5529*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, sm, sa);
5530*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, qm, sa);
5531*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, sm, qa);
5532*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, sa);
5533*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(sn, qm, qa);
5534*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, sm, qa);
5535*f5c631daSSadaf Ebrahimi   DefaultNaNHelper(qn, qm, qa);
5536*f5c631daSSadaf Ebrahimi }
5537*f5c631daSSadaf Ebrahimi 
5538*f5c631daSSadaf Ebrahimi }  // namespace aarch64
5539*f5c631daSSadaf Ebrahimi }  // namespace vixl
5540