1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright IBM Corp. 2024
4 *
5 * Authors:
6 * Hariharan Mari <[email protected]>
7 *
8 * The tests compare the result of the KVM ioctl for obtaining CPU subfunction data with those
9 * from an ASM block performing the same CPU subfunction. Currently KVM doesn't mask instruction
10 * query data reported via the CPU Model, allowing us to directly compare it with the data
11 * acquired through executing the queries in the test.
12 */
13
14 #include <stdio.h>
15 #include <stdlib.h>
16 #include <string.h>
17 #include <sys/ioctl.h>
18 #include "facility.h"
19
20 #include "kvm_util.h"
21
22 #define PLO_FUNCTION_MAX 256
23
24 /* Query available CPU subfunctions */
25 struct kvm_s390_vm_cpu_subfunc cpu_subfunc;
26
get_cpu_machine_subfuntions(struct kvm_vm * vm,struct kvm_s390_vm_cpu_subfunc * cpu_subfunc)27 static void get_cpu_machine_subfuntions(struct kvm_vm *vm,
28 struct kvm_s390_vm_cpu_subfunc *cpu_subfunc)
29 {
30 int r;
31
32 r = __kvm_device_attr_get(vm->fd, KVM_S390_VM_CPU_MODEL,
33 KVM_S390_VM_CPU_MACHINE_SUBFUNC, cpu_subfunc);
34
35 TEST_ASSERT(!r, "Get cpu subfunctions failed r=%d errno=%d", r, errno);
36 }
37
plo_test_bit(unsigned char nr)38 static inline int plo_test_bit(unsigned char nr)
39 {
40 unsigned long function = nr | 0x100;
41 int cc;
42
43 asm volatile(" lgr 0,%[function]\n"
44 /* Parameter registers are ignored for "test bit" */
45 " plo 0,0,0,0(0)\n"
46 " ipm %0\n"
47 " srl %0,28\n"
48 : "=d" (cc)
49 : [function] "d" (function)
50 : "cc", "0");
51 return cc == 0;
52 }
53
54 /* Testing Perform Locked Operation (PLO) CPU subfunction's ASM block */
test_plo_asm_block(u8 (* query)[32])55 static void test_plo_asm_block(u8 (*query)[32])
56 {
57 for (int i = 0; i < PLO_FUNCTION_MAX; ++i) {
58 if (plo_test_bit(i))
59 (*query)[i >> 3] |= 0x80 >> (i & 7);
60 }
61 }
62
63 /* Testing Crypto Compute Message Authentication Code (KMAC) CPU subfunction's ASM block */
test_kmac_asm_block(u8 (* query)[16])64 static void test_kmac_asm_block(u8 (*query)[16])
65 {
66 asm volatile(" la %%r1,%[query]\n"
67 " xgr %%r0,%%r0\n"
68 " .insn rre,0xb91e0000,0,2\n"
69 : [query] "=R" (*query)
70 :
71 : "cc", "r0", "r1");
72 }
73
74 /* Testing Crypto Cipher Message with Chaining (KMC) CPU subfunction's ASM block */
test_kmc_asm_block(u8 (* query)[16])75 static void test_kmc_asm_block(u8 (*query)[16])
76 {
77 asm volatile(" la %%r1,%[query]\n"
78 " xgr %%r0,%%r0\n"
79 " .insn rre,0xb92f0000,2,4\n"
80 : [query] "=R" (*query)
81 :
82 : "cc", "r0", "r1");
83 }
84
85 /* Testing Crypto Cipher Message (KM) CPU subfunction's ASM block */
test_km_asm_block(u8 (* query)[16])86 static void test_km_asm_block(u8 (*query)[16])
87 {
88 asm volatile(" la %%r1,%[query]\n"
89 " xgr %%r0,%%r0\n"
90 " .insn rre,0xb92e0000,2,4\n"
91 : [query] "=R" (*query)
92 :
93 : "cc", "r0", "r1");
94 }
95
96 /* Testing Crypto Compute Intermediate Message Digest (KIMD) CPU subfunction's ASM block */
test_kimd_asm_block(u8 (* query)[16])97 static void test_kimd_asm_block(u8 (*query)[16])
98 {
99 asm volatile(" la %%r1,%[query]\n"
100 " xgr %%r0,%%r0\n"
101 " .insn rre,0xb93e0000,0,2\n"
102 : [query] "=R" (*query)
103 :
104 : "cc", "r0", "r1");
105 }
106
107 /* Testing Crypto Compute Last Message Digest (KLMD) CPU subfunction's ASM block */
test_klmd_asm_block(u8 (* query)[16])108 static void test_klmd_asm_block(u8 (*query)[16])
109 {
110 asm volatile(" la %%r1,%[query]\n"
111 " xgr %%r0,%%r0\n"
112 " .insn rre,0xb93f0000,0,2\n"
113 : [query] "=R" (*query)
114 :
115 : "cc", "r0", "r1");
116 }
117
118 /* Testing Crypto Cipher Message with Counter (KMCTR) CPU subfunction's ASM block */
test_kmctr_asm_block(u8 (* query)[16])119 static void test_kmctr_asm_block(u8 (*query)[16])
120 {
121 asm volatile(" la %%r1,%[query]\n"
122 " xgr %%r0,%%r0\n"
123 " .insn rrf,0xb92d0000,2,4,6,0\n"
124 : [query] "=R" (*query)
125 :
126 : "cc", "r0", "r1");
127 }
128
129 /* Testing Crypto Cipher Message with Cipher Feedback (KMF) CPU subfunction's ASM block */
test_kmf_asm_block(u8 (* query)[16])130 static void test_kmf_asm_block(u8 (*query)[16])
131 {
132 asm volatile(" la %%r1,%[query]\n"
133 " xgr %%r0,%%r0\n"
134 " .insn rre,0xb92a0000,2,4\n"
135 : [query] "=R" (*query)
136 :
137 : "cc", "r0", "r1");
138 }
139
140 /* Testing Crypto Cipher Message with Output Feedback (KMO) CPU subfunction's ASM block */
test_kmo_asm_block(u8 (* query)[16])141 static void test_kmo_asm_block(u8 (*query)[16])
142 {
143 asm volatile(" la %%r1,%[query]\n"
144 " xgr %%r0,%%r0\n"
145 " .insn rre,0xb92b0000,2,4\n"
146 : [query] "=R" (*query)
147 :
148 : "cc", "r0", "r1");
149 }
150
151 /* Testing Crypto Perform Cryptographic Computation (PCC) CPU subfunction's ASM block */
test_pcc_asm_block(u8 (* query)[16])152 static void test_pcc_asm_block(u8 (*query)[16])
153 {
154 asm volatile(" la %%r1,%[query]\n"
155 " xgr %%r0,%%r0\n"
156 " .insn rre,0xb92c0000,0,0\n"
157 : [query] "=R" (*query)
158 :
159 : "cc", "r0", "r1");
160 }
161
162 /* Testing Crypto Perform Random Number Operation (PRNO) CPU subfunction's ASM block */
test_prno_asm_block(u8 (* query)[16])163 static void test_prno_asm_block(u8 (*query)[16])
164 {
165 asm volatile(" la %%r1,%[query]\n"
166 " xgr %%r0,%%r0\n"
167 " .insn rre,0xb93c0000,2,4\n"
168 : [query] "=R" (*query)
169 :
170 : "cc", "r0", "r1");
171 }
172
173 /* Testing Crypto Cipher Message with Authentication (KMA) CPU subfunction's ASM block */
test_kma_asm_block(u8 (* query)[16])174 static void test_kma_asm_block(u8 (*query)[16])
175 {
176 asm volatile(" la %%r1,%[query]\n"
177 " xgr %%r0,%%r0\n"
178 " .insn rrf,0xb9290000,2,4,6,0\n"
179 : [query] "=R" (*query)
180 :
181 : "cc", "r0", "r1");
182 }
183
184 /* Testing Crypto Compute Digital Signature Authentication (KDSA) CPU subfunction's ASM block */
test_kdsa_asm_block(u8 (* query)[16])185 static void test_kdsa_asm_block(u8 (*query)[16])
186 {
187 asm volatile(" la %%r1,%[query]\n"
188 " xgr %%r0,%%r0\n"
189 " .insn rre,0xb93a0000,0,2\n"
190 : [query] "=R" (*query)
191 :
192 : "cc", "r0", "r1");
193 }
194
195 /* Testing Sort Lists (SORTL) CPU subfunction's ASM block */
test_sortl_asm_block(u8 (* query)[32])196 static void test_sortl_asm_block(u8 (*query)[32])
197 {
198 asm volatile(" lghi 0,0\n"
199 " la 1,%[query]\n"
200 " .insn rre,0xb9380000,2,4\n"
201 : [query] "=R" (*query)
202 :
203 : "cc", "0", "1");
204 }
205
206 /* Testing Deflate Conversion Call (DFLTCC) CPU subfunction's ASM block */
test_dfltcc_asm_block(u8 (* query)[32])207 static void test_dfltcc_asm_block(u8 (*query)[32])
208 {
209 asm volatile(" lghi 0,0\n"
210 " la 1,%[query]\n"
211 " .insn rrf,0xb9390000,2,4,6,0\n"
212 : [query] "=R" (*query)
213 :
214 : "cc", "0", "1");
215 }
216
217 /*
218 * Testing Perform Function with Concurrent Results (PFCR)
219 * CPU subfunctions's ASM block
220 */
test_pfcr_asm_block(u8 (* query)[16])221 static void test_pfcr_asm_block(u8 (*query)[16])
222 {
223 asm volatile(" lghi 0,0\n"
224 " .insn rsy,0xeb0000000016,0,0,%[query]\n"
225 : [query] "=QS" (*query)
226 :
227 : "cc", "0");
228 }
229
230 typedef void (*testfunc_t)(u8 (*array)[]);
231
232 struct testdef {
233 const char *subfunc_name;
234 u8 *subfunc_array;
235 size_t array_size;
236 testfunc_t test;
237 int facility_bit;
238 } testlist[] = {
239 /*
240 * PLO was introduced in the very first 64-bit machine generation.
241 * Hence it is assumed PLO is always installed in Z Arch.
242 */
243 { "PLO", cpu_subfunc.plo, sizeof(cpu_subfunc.plo), test_plo_asm_block, 1 },
244 /* MSA - Facility bit 17 */
245 { "KMAC", cpu_subfunc.kmac, sizeof(cpu_subfunc.kmac), test_kmac_asm_block, 17 },
246 { "KMC", cpu_subfunc.kmc, sizeof(cpu_subfunc.kmc), test_kmc_asm_block, 17 },
247 { "KM", cpu_subfunc.km, sizeof(cpu_subfunc.km), test_km_asm_block, 17 },
248 { "KIMD", cpu_subfunc.kimd, sizeof(cpu_subfunc.kimd), test_kimd_asm_block, 17 },
249 { "KLMD", cpu_subfunc.klmd, sizeof(cpu_subfunc.klmd), test_klmd_asm_block, 17 },
250 /* MSA - Facility bit 77 */
251 { "KMCTR", cpu_subfunc.kmctr, sizeof(cpu_subfunc.kmctr), test_kmctr_asm_block, 77 },
252 { "KMF", cpu_subfunc.kmf, sizeof(cpu_subfunc.kmf), test_kmf_asm_block, 77 },
253 { "KMO", cpu_subfunc.kmo, sizeof(cpu_subfunc.kmo), test_kmo_asm_block, 77 },
254 { "PCC", cpu_subfunc.pcc, sizeof(cpu_subfunc.pcc), test_pcc_asm_block, 77 },
255 /* MSA5 - Facility bit 57 */
256 { "PPNO", cpu_subfunc.ppno, sizeof(cpu_subfunc.ppno), test_prno_asm_block, 57 },
257 /* MSA8 - Facility bit 146 */
258 { "KMA", cpu_subfunc.kma, sizeof(cpu_subfunc.kma), test_kma_asm_block, 146 },
259 /* MSA9 - Facility bit 155 */
260 { "KDSA", cpu_subfunc.kdsa, sizeof(cpu_subfunc.kdsa), test_kdsa_asm_block, 155 },
261 /* SORTL - Facility bit 150 */
262 { "SORTL", cpu_subfunc.sortl, sizeof(cpu_subfunc.sortl), test_sortl_asm_block, 150 },
263 /* DFLTCC - Facility bit 151 */
264 { "DFLTCC", cpu_subfunc.dfltcc, sizeof(cpu_subfunc.dfltcc), test_dfltcc_asm_block, 151 },
265 /* Concurrent-function facility - Facility bit 201 */
266 { "PFCR", cpu_subfunc.pfcr, sizeof(cpu_subfunc.pfcr), test_pfcr_asm_block, 201 },
267 };
268
main(int argc,char * argv[])269 int main(int argc, char *argv[])
270 {
271 struct kvm_vm *vm;
272 int idx;
273
274 ksft_print_header();
275
276 vm = vm_create(1);
277
278 memset(&cpu_subfunc, 0, sizeof(cpu_subfunc));
279 get_cpu_machine_subfuntions(vm, &cpu_subfunc);
280
281 ksft_set_plan(ARRAY_SIZE(testlist));
282 for (idx = 0; idx < ARRAY_SIZE(testlist); idx++) {
283 if (test_facility(testlist[idx].facility_bit)) {
284 u8 *array = malloc(testlist[idx].array_size);
285
286 testlist[idx].test((u8 (*)[testlist[idx].array_size])array);
287
288 TEST_ASSERT_EQ(memcmp(testlist[idx].subfunc_array,
289 array, testlist[idx].array_size), 0);
290
291 ksft_test_result_pass("%s\n", testlist[idx].subfunc_name);
292 free(array);
293 } else {
294 ksft_test_result_skip("%s feature is not avaialable\n",
295 testlist[idx].subfunc_name);
296 }
297 }
298
299 kvm_vm_free(vm);
300 ksft_finished();
301 }
302