Lines Matching +full:0 +full:xa
20 void xa_dump(const struct xarray *xa) { } in xa_dump() argument
23 #define XA_BUG_ON(xa, x) do { \ argument
27 xa_dump(xa); \
32 } while (0)
40 static void *xa_store_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_store_index() argument
42 return xa_store(xa, index, xa_mk_index(index), gfp); in xa_store_index()
45 static void xa_insert_index(struct xarray *xa, unsigned long index) in xa_insert_index() argument
47 XA_BUG_ON(xa, xa_insert(xa, index, xa_mk_index(index), in xa_insert_index()
48 GFP_KERNEL) != 0); in xa_insert_index()
51 static void xa_alloc_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_alloc_index() argument
55 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(index), xa_limit_32b, in xa_alloc_index()
56 gfp) != 0); in xa_alloc_index()
57 XA_BUG_ON(xa, id != index); in xa_alloc_index()
60 static void xa_erase_index(struct xarray *xa, unsigned long index) in xa_erase_index() argument
62 XA_BUG_ON(xa, xa_erase(xa, index) != xa_mk_index(index)); in xa_erase_index()
63 XA_BUG_ON(xa, xa_load(xa, index) != NULL); in xa_erase_index()
71 static void *xa_store_order(struct xarray *xa, unsigned long index, in xa_store_order() argument
74 XA_STATE_ORDER(xas, xa, index, order); in xa_store_order()
86 static noinline void check_xa_err(struct xarray *xa) in check_xa_err() argument
88 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 0, GFP_NOWAIT)) != 0); in check_xa_err()
89 XA_BUG_ON(xa, xa_err(xa_erase(xa, 0)) != 0); in check_xa_err()
92 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 1, GFP_NOWAIT)) != -ENOMEM); in check_xa_err()
93 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 1, GFP_NOWAIT)) != -ENOMEM); in check_xa_err()
95 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 1, GFP_KERNEL)) != 0); in check_xa_err()
96 XA_BUG_ON(xa, xa_err(xa_store(xa, 1, xa_mk_value(0), GFP_KERNEL)) != 0); in check_xa_err()
97 XA_BUG_ON(xa, xa_err(xa_erase(xa, 1)) != 0); in check_xa_err()
99 // XA_BUG_ON(xa, xa_err(xa_store(xa, 0, xa_mk_internal(0), 0)) != -EINVAL); in check_xa_err()
102 static noinline void check_xas_retry(struct xarray *xa) in check_xas_retry() argument
104 XA_STATE(xas, xa, 0); in check_xas_retry()
107 xa_store_index(xa, 0, GFP_KERNEL); in check_xas_retry()
108 xa_store_index(xa, 1, GFP_KERNEL); in check_xas_retry()
111 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry()
112 xa_erase_index(xa, 1); in check_xas_retry()
113 XA_BUG_ON(xa, !xa_is_retry(xas_reload(&xas))); in check_xas_retry()
114 XA_BUG_ON(xa, xas_retry(&xas, NULL)); in check_xas_retry()
115 XA_BUG_ON(xa, xas_retry(&xas, xa_mk_value(0))); in check_xas_retry()
117 XA_BUG_ON(xa, xas.xa_node != XAS_RESTART); in check_xas_retry()
118 XA_BUG_ON(xa, xas_next_entry(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry()
119 XA_BUG_ON(xa, xas.xa_node != NULL); in check_xas_retry()
122 XA_BUG_ON(xa, xa_store_index(xa, 1, GFP_KERNEL) != NULL); in check_xas_retry()
125 XA_BUG_ON(xa, !xa_is_internal(xas_reload(&xas))); in check_xas_retry()
127 XA_BUG_ON(xa, xas_next_entry(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry()
132 xas_set(&xas, 0); in check_xas_retry()
137 xas_set(&xas, 0); in check_xas_retry()
143 xa_erase_index(xa, 0); in check_xas_retry()
144 xa_erase_index(xa, 1); in check_xas_retry()
147 static noinline void check_xa_load(struct xarray *xa) in check_xa_load() argument
151 for (i = 0; i < 1024; i++) { in check_xa_load()
152 for (j = 0; j < 1024; j++) { in check_xa_load()
153 void *entry = xa_load(xa, j); in check_xa_load()
155 XA_BUG_ON(xa, xa_to_value(entry) != j); in check_xa_load()
157 XA_BUG_ON(xa, entry); in check_xa_load()
159 XA_BUG_ON(xa, xa_store_index(xa, i, GFP_KERNEL) != NULL); in check_xa_load()
162 for (i = 0; i < 1024; i++) { in check_xa_load()
163 for (j = 0; j < 1024; j++) { in check_xa_load()
164 void *entry = xa_load(xa, j); in check_xa_load()
166 XA_BUG_ON(xa, xa_to_value(entry) != j); in check_xa_load()
168 XA_BUG_ON(xa, entry); in check_xa_load()
170 xa_erase_index(xa, i); in check_xa_load()
172 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_load()
175 static noinline void check_xa_mark_1(struct xarray *xa, unsigned long index) in check_xa_mark_1() argument
181 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
182 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_1()
183 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
186 XA_BUG_ON(xa, xa_store_index(xa, index, GFP_KERNEL) != NULL); in check_xa_mark_1()
187 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
188 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_1()
189 XA_BUG_ON(xa, !xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
192 XA_BUG_ON(xa, xa_get_mark(xa, index + 1, XA_MARK_0)); in check_xa_mark_1()
193 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_1)); in check_xa_mark_1()
196 xa_erase_index(xa, index); in check_xa_mark_1()
197 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_mark_1()
198 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
199 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_1()
200 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
206 BUG_ON((index % 4) != 0); in check_xa_mark_1()
212 XA_BUG_ON(xa, xa_store_index(xa, index + 1, GFP_KERNEL)); in check_xa_mark_1()
213 xa_set_mark(xa, index + 1, XA_MARK_0); in check_xa_mark_1()
214 XA_BUG_ON(xa, xa_store_index(xa, index + 2, GFP_KERNEL)); in check_xa_mark_1()
215 xa_set_mark(xa, index + 2, XA_MARK_2); in check_xa_mark_1()
216 XA_BUG_ON(xa, xa_store_index(xa, next, GFP_KERNEL)); in check_xa_mark_1()
217 xa_store_order(xa, index, order, xa_mk_index(index), in check_xa_mark_1()
220 XA_STATE(xas, xa, i); in check_xa_mark_1()
221 unsigned int seen = 0; in check_xa_mark_1()
224 XA_BUG_ON(xa, !xa_get_mark(xa, i, XA_MARK_0)); in check_xa_mark_1()
225 XA_BUG_ON(xa, xa_get_mark(xa, i, XA_MARK_1)); in check_xa_mark_1()
226 XA_BUG_ON(xa, !xa_get_mark(xa, i, XA_MARK_2)); in check_xa_mark_1()
233 XA_BUG_ON(xa, seen != 2); in check_xa_mark_1()
236 xas_set(&xas, 0); in check_xa_mark_1()
237 seen = 0; in check_xa_mark_1()
242 XA_BUG_ON(xa, seen != 1); in check_xa_mark_1()
244 XA_BUG_ON(xa, xa_get_mark(xa, next, XA_MARK_0)); in check_xa_mark_1()
245 XA_BUG_ON(xa, xa_get_mark(xa, next, XA_MARK_1)); in check_xa_mark_1()
246 XA_BUG_ON(xa, xa_get_mark(xa, next, XA_MARK_2)); in check_xa_mark_1()
247 xa_erase_index(xa, index); in check_xa_mark_1()
248 xa_erase_index(xa, next); in check_xa_mark_1()
249 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_mark_1()
251 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_mark_1()
254 static noinline void check_xa_mark_2(struct xarray *xa) in check_xa_mark_2() argument
256 XA_STATE(xas, xa, 0); in check_xa_mark_2()
258 unsigned int count = 0; in check_xa_mark_2()
261 xa_store_index(xa, 0, GFP_KERNEL); in check_xa_mark_2()
262 xa_set_mark(xa, 0, XA_MARK_0); in check_xa_mark_2()
267 XA_BUG_ON(xa, !xa_get_mark(xa, 0, XA_MARK_0) == 0); in check_xa_mark_2()
270 xa_store_index(xa, index, GFP_KERNEL); in check_xa_mark_2()
271 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_2()
279 XA_BUG_ON(xa, count != 1000); in check_xa_mark_2()
284 XA_BUG_ON(xa, !xa_get_mark(xa, xas.xa_index, XA_MARK_0)); in check_xa_mark_2()
285 XA_BUG_ON(xa, !xas_get_mark(&xas, XA_MARK_0)); in check_xa_mark_2()
289 xa_destroy(xa); in check_xa_mark_2()
292 static noinline void check_xa_mark_3(struct xarray *xa) in check_xa_mark_3() argument
295 XA_STATE(xas, xa, 0x41); in check_xa_mark_3()
297 int count = 0; in check_xa_mark_3()
299 xa_store_order(xa, 0x40, 2, xa_mk_index(0x40), GFP_KERNEL); in check_xa_mark_3()
300 xa_set_mark(xa, 0x41, XA_MARK_0); in check_xa_mark_3()
305 XA_BUG_ON(xa, entry != xa_mk_index(0x40)); in check_xa_mark_3()
307 XA_BUG_ON(xa, count != 1); in check_xa_mark_3()
309 xa_destroy(xa); in check_xa_mark_3()
313 static noinline void check_xa_mark(struct xarray *xa) in check_xa_mark() argument
317 for (index = 0; index < 16384; index += 4) in check_xa_mark()
318 check_xa_mark_1(xa, index); in check_xa_mark()
320 check_xa_mark_2(xa); in check_xa_mark()
321 check_xa_mark_3(xa); in check_xa_mark()
324 static noinline void check_xa_shrink(struct xarray *xa) in check_xa_shrink() argument
326 XA_STATE(xas, xa, 1); in check_xa_shrink()
331 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_shrink()
332 XA_BUG_ON(xa, xa_store_index(xa, 0, GFP_KERNEL) != NULL); in check_xa_shrink()
333 XA_BUG_ON(xa, xa_store_index(xa, 1, GFP_KERNEL) != NULL); in check_xa_shrink()
340 XA_BUG_ON(xa, xas_load(&xas) != xa_mk_value(1)); in check_xa_shrink()
342 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != xa_mk_value(0)); in check_xa_shrink()
343 XA_BUG_ON(xa, xas_store(&xas, NULL) != xa_mk_value(1)); in check_xa_shrink()
344 XA_BUG_ON(xa, xa_load(xa, 1) != NULL); in check_xa_shrink()
345 XA_BUG_ON(xa, xas.xa_node != XAS_BOUNDS); in check_xa_shrink()
346 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != XA_RETRY_ENTRY); in check_xa_shrink()
347 XA_BUG_ON(xa, xas_load(&xas) != NULL); in check_xa_shrink()
349 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_xa_shrink()
350 xa_erase_index(xa, 0); in check_xa_shrink()
351 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_shrink()
353 for (order = 0; order < max_order; order++) { in check_xa_shrink()
355 xa_store_order(xa, 0, order, xa_mk_value(0), GFP_KERNEL); in check_xa_shrink()
356 XA_BUG_ON(xa, xa_load(xa, max) != xa_mk_value(0)); in check_xa_shrink()
357 XA_BUG_ON(xa, xa_load(xa, max + 1) != NULL); in check_xa_shrink()
359 node = xa_head(xa); in check_xa_shrink()
361 XA_BUG_ON(xa, xa_store_index(xa, ULONG_MAX, GFP_KERNEL) != in check_xa_shrink()
364 XA_BUG_ON(xa, xa_head(xa) == node); in check_xa_shrink()
366 XA_BUG_ON(xa, xa_load(xa, max + 1) != NULL); in check_xa_shrink()
367 xa_erase_index(xa, ULONG_MAX); in check_xa_shrink()
368 XA_BUG_ON(xa, xa->xa_head != node); in check_xa_shrink()
369 xa_erase_index(xa, 0); in check_xa_shrink()
373 static noinline void check_insert(struct xarray *xa) in check_insert() argument
377 for (i = 0; i < 1024; i++) { in check_insert()
378 xa_insert_index(xa, i); in check_insert()
379 XA_BUG_ON(xa, xa_load(xa, i - 1) != NULL); in check_insert()
380 XA_BUG_ON(xa, xa_load(xa, i + 1) != NULL); in check_insert()
381 xa_erase_index(xa, i); in check_insert()
385 xa_insert_index(xa, 1UL << i); in check_insert()
386 XA_BUG_ON(xa, xa_load(xa, (1UL << i) - 1) != NULL); in check_insert()
387 XA_BUG_ON(xa, xa_load(xa, (1UL << i) + 1) != NULL); in check_insert()
388 xa_erase_index(xa, 1UL << i); in check_insert()
390 xa_insert_index(xa, (1UL << i) - 1); in check_insert()
391 XA_BUG_ON(xa, xa_load(xa, (1UL << i) - 2) != NULL); in check_insert()
392 XA_BUG_ON(xa, xa_load(xa, 1UL << i) != NULL); in check_insert()
393 xa_erase_index(xa, (1UL << i) - 1); in check_insert()
396 xa_insert_index(xa, ~0UL); in check_insert()
397 XA_BUG_ON(xa, xa_load(xa, 0UL) != NULL); in check_insert()
398 XA_BUG_ON(xa, xa_load(xa, ~1UL) != NULL); in check_insert()
399 xa_erase_index(xa, ~0UL); in check_insert()
401 XA_BUG_ON(xa, !xa_empty(xa)); in check_insert()
404 static noinline void check_cmpxchg(struct xarray *xa) in check_cmpxchg() argument
410 XA_BUG_ON(xa, !xa_empty(xa)); in check_cmpxchg()
411 XA_BUG_ON(xa, xa_store_index(xa, 12345678, GFP_KERNEL) != NULL); in check_cmpxchg()
412 XA_BUG_ON(xa, xa_insert(xa, 12345678, xa, GFP_KERNEL) != -EBUSY); in check_cmpxchg()
413 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, SIX, FIVE, GFP_KERNEL) != LOTS); in check_cmpxchg()
414 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, LOTS, FIVE, GFP_KERNEL) != LOTS); in check_cmpxchg()
415 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, FIVE, LOTS, GFP_KERNEL) != FIVE); in check_cmpxchg()
416 XA_BUG_ON(xa, xa_cmpxchg(xa, 5, FIVE, NULL, GFP_KERNEL) != NULL); in check_cmpxchg()
417 XA_BUG_ON(xa, xa_cmpxchg(xa, 5, NULL, FIVE, GFP_KERNEL) != NULL); in check_cmpxchg()
418 XA_BUG_ON(xa, xa_insert(xa, 5, FIVE, GFP_KERNEL) != -EBUSY); in check_cmpxchg()
419 XA_BUG_ON(xa, xa_cmpxchg(xa, 5, FIVE, NULL, GFP_KERNEL) != FIVE); in check_cmpxchg()
420 XA_BUG_ON(xa, xa_insert(xa, 5, FIVE, GFP_KERNEL) == -EBUSY); in check_cmpxchg()
421 xa_erase_index(xa, 12345678); in check_cmpxchg()
422 xa_erase_index(xa, 5); in check_cmpxchg()
423 XA_BUG_ON(xa, !xa_empty(xa)); in check_cmpxchg()
426 static noinline void check_cmpxchg_order(struct xarray *xa) in check_cmpxchg_order() argument
432 XA_BUG_ON(xa, xa_store_order(xa, 0, order, FIVE, GFP_KERNEL)); in check_cmpxchg_order()
435 XA_BUG_ON(xa, xa_get_order(xa, xa_to_value(FIVE)) != order); in check_cmpxchg_order()
438 for (i = 0; i < (1 << order); i++) { in check_cmpxchg_order()
439 XA_BUG_ON(xa, xa_load(xa, i) != FIVE); in check_cmpxchg_order()
440 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
444 XA_BUG_ON(xa, xa_load(xa, 1 << order) != NULL); in check_cmpxchg_order()
450 XA_BUG_ON(xa, xa_store_order(xa, 1 << order, order, FIVE, GFP_KERNEL)); in check_cmpxchg_order()
452 XA_BUG_ON(xa, xa_load(xa, i) != FIVE); in check_cmpxchg_order()
453 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
456 /* Conditionally replace FIVE entry at index '0' with NULL */ in check_cmpxchg_order()
457 XA_BUG_ON(xa, xa_cmpxchg(xa, 0, FIVE, NULL, GFP_KERNEL) != FIVE); in check_cmpxchg_order()
460 XA_BUG_ON(xa, xa_get_order(xa, xa_to_value(FIVE)) != 0); in check_cmpxchg_order()
463 for (i = 0; i < (1 << order); i++) { in check_cmpxchg_order()
464 XA_BUG_ON(xa, xa_load(xa, i) != NULL); in check_cmpxchg_order()
465 XA_BUG_ON(xa, xa_get_order(xa, i) != 0); in check_cmpxchg_order()
470 XA_BUG_ON(xa, xa_load(xa, i) != FIVE); in check_cmpxchg_order()
471 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
474 xa_store_order(xa, 0, BITS_PER_LONG - 1, NULL, GFP_KERNEL); in check_cmpxchg_order()
475 XA_BUG_ON(xa, !xa_empty(xa)); in check_cmpxchg_order()
479 static noinline void check_reserve(struct xarray *xa) in check_reserve() argument
486 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
487 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
488 XA_BUG_ON(xa, xa_empty(xa)); in check_reserve()
489 XA_BUG_ON(xa, xa_load(xa, 12345678)); in check_reserve()
490 xa_release(xa, 12345678); in check_reserve()
491 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
494 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
495 XA_BUG_ON(xa, xa_store_index(xa, 12345678, GFP_NOWAIT) != NULL); in check_reserve()
496 xa_release(xa, 12345678); in check_reserve()
497 xa_erase_index(xa, 12345678); in check_reserve()
498 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
501 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
502 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, XA_ZERO_ENTRY, in check_reserve()
504 xa_release(xa, 12345678); in check_reserve()
505 xa_erase_index(xa, 12345678); in check_reserve()
506 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
509 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
510 XA_BUG_ON(xa, xa_insert(xa, 12345678, xa_mk_value(12345678), 0) != in check_reserve()
512 XA_BUG_ON(xa, xa_empty(xa)); in check_reserve()
513 XA_BUG_ON(xa, xa_erase(xa, 12345678) != NULL); in check_reserve()
514 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
517 xa_store_index(xa, 5, GFP_KERNEL); in check_reserve()
518 XA_BUG_ON(xa, xa_reserve(xa, 6, GFP_KERNEL) != 0); in check_reserve()
519 xa_store_index(xa, 7, GFP_KERNEL); in check_reserve()
521 count = 0; in check_reserve()
522 xa_for_each(xa, index, entry) { in check_reserve()
523 XA_BUG_ON(xa, index != 5 && index != 7); in check_reserve()
526 XA_BUG_ON(xa, count != 2); in check_reserve()
529 if (xa->xa_flags & XA_FLAGS_ALLOC) { in check_reserve()
532 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_value(8), in check_reserve()
533 XA_LIMIT(5, 10), GFP_KERNEL) != 0); in check_reserve()
534 XA_BUG_ON(xa, id != 8); in check_reserve()
536 xa_release(xa, 6); in check_reserve()
537 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_value(6), in check_reserve()
538 XA_LIMIT(5, 10), GFP_KERNEL) != 0); in check_reserve()
539 XA_BUG_ON(xa, id != 6); in check_reserve()
542 xa_destroy(xa); in check_reserve()
545 static noinline void check_xas_erase(struct xarray *xa) in check_xas_erase() argument
547 XA_STATE(xas, xa, 0); in check_xas_erase()
551 for (i = 0; i < 200; i++) { in check_xas_erase()
564 xas_store(&xas, xa_mk_value(0)); in check_xas_erase()
571 xas_set(&xas, 0); in check_xas_erase()
574 XA_BUG_ON(xa, entry != xa_mk_index(j)); in check_xas_erase()
579 XA_BUG_ON(xa, !xa_empty(xa)); in check_xas_erase()
584 static noinline void check_multi_store_1(struct xarray *xa, unsigned long index, in check_multi_store_1() argument
587 XA_STATE(xas, xa, index); in check_multi_store_1()
591 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_multi_store_1()
592 XA_BUG_ON(xa, xa_load(xa, min) != xa_mk_index(index)); in check_multi_store_1()
593 XA_BUG_ON(xa, xa_load(xa, max - 1) != xa_mk_index(index)); in check_multi_store_1()
594 XA_BUG_ON(xa, xa_load(xa, max) != NULL); in check_multi_store_1()
595 XA_BUG_ON(xa, xa_load(xa, min - 1) != NULL); in check_multi_store_1()
598 XA_BUG_ON(xa, xas_store(&xas, xa_mk_index(min)) != xa_mk_index(index)); in check_multi_store_1()
600 XA_BUG_ON(xa, xa_load(xa, min) != xa_mk_index(min)); in check_multi_store_1()
601 XA_BUG_ON(xa, xa_load(xa, max - 1) != xa_mk_index(min)); in check_multi_store_1()
602 XA_BUG_ON(xa, xa_load(xa, max) != NULL); in check_multi_store_1()
603 XA_BUG_ON(xa, xa_load(xa, min - 1) != NULL); in check_multi_store_1()
605 xa_erase_index(xa, min); in check_multi_store_1()
606 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store_1()
609 static noinline void check_multi_store_2(struct xarray *xa, unsigned long index, in check_multi_store_2() argument
612 XA_STATE(xas, xa, index); in check_multi_store_2()
613 xa_store_order(xa, index, order, xa_mk_value(0), GFP_KERNEL); in check_multi_store_2()
616 XA_BUG_ON(xa, xas_store(&xas, xa_mk_value(1)) != xa_mk_value(0)); in check_multi_store_2()
617 XA_BUG_ON(xa, xas.xa_index != index); in check_multi_store_2()
618 XA_BUG_ON(xa, xas_store(&xas, NULL) != xa_mk_value(1)); in check_multi_store_2()
620 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store_2()
623 static noinline void check_multi_store_3(struct xarray *xa, unsigned long index, in check_multi_store_3() argument
626 XA_STATE(xas, xa, 0); in check_multi_store_3()
628 int n = 0; in check_multi_store_3()
630 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_multi_store_3()
634 XA_BUG_ON(xa, entry != xa_mk_index(index)); in check_multi_store_3()
637 XA_BUG_ON(xa, n != 1); in check_multi_store_3()
640 XA_BUG_ON(xa, entry != xa_mk_index(index)); in check_multi_store_3()
643 XA_BUG_ON(xa, n != 2); in check_multi_store_3()
646 xa_destroy(xa); in check_multi_store_3()
650 static noinline void check_multi_store(struct xarray *xa) in check_multi_store() argument
657 xa_store_order(xa, 0, 1, xa_mk_value(0), GFP_KERNEL); in check_multi_store()
658 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_multi_store()
659 XA_BUG_ON(xa, xa_load(xa, 1) != xa_mk_value(0)); in check_multi_store()
660 XA_BUG_ON(xa, xa_load(xa, 2) != NULL); in check_multi_store()
662 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->count != 2); in check_multi_store()
663 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->nr_values != 2); in check_multi_store()
667 xa_store(xa, 3, xa, GFP_KERNEL); in check_multi_store()
668 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_multi_store()
669 XA_BUG_ON(xa, xa_load(xa, 1) != xa_mk_value(0)); in check_multi_store()
670 XA_BUG_ON(xa, xa_load(xa, 2) != NULL); in check_multi_store()
672 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->count != 3); in check_multi_store()
673 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->nr_values != 2); in check_multi_store()
677 xa_store_order(xa, 0, 2, xa_mk_value(1), GFP_KERNEL); in check_multi_store()
678 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(1)); in check_multi_store()
679 XA_BUG_ON(xa, xa_load(xa, 1) != xa_mk_value(1)); in check_multi_store()
680 XA_BUG_ON(xa, xa_load(xa, 2) != xa_mk_value(1)); in check_multi_store()
681 XA_BUG_ON(xa, xa_load(xa, 3) != xa_mk_value(1)); in check_multi_store()
682 XA_BUG_ON(xa, xa_load(xa, 4) != NULL); in check_multi_store()
684 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->count != 4); in check_multi_store()
685 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->nr_values != 4); in check_multi_store()
689 xa_store_order(xa, 0, BITS_PER_LONG - 1, NULL, GFP_KERNEL); in check_multi_store()
690 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store()
693 xa_store_index(xa, 1, GFP_KERNEL); in check_multi_store()
694 xa_store_index(xa, 2, GFP_KERNEL); in check_multi_store()
695 xa_store_order(xa, 0, 2, NULL, GFP_KERNEL); in check_multi_store()
696 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store()
698 for (i = 0; i < max_order; i++) { in check_multi_store()
699 for (j = 0; j < max_order; j++) { in check_multi_store()
700 xa_store_order(xa, 0, i, xa_mk_index(i), GFP_KERNEL); in check_multi_store()
701 xa_store_order(xa, 0, j, xa_mk_index(j), GFP_KERNEL); in check_multi_store()
703 for (k = 0; k < max_order; k++) { in check_multi_store()
704 void *entry = xa_load(xa, (1UL << k) - 1); in check_multi_store()
706 XA_BUG_ON(xa, entry != NULL); in check_multi_store()
708 XA_BUG_ON(xa, entry != xa_mk_index(j)); in check_multi_store()
711 xa_erase(xa, 0); in check_multi_store()
712 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store()
716 for (i = 0; i < 20; i++) { in check_multi_store()
717 check_multi_store_1(xa, 200, i); in check_multi_store()
718 check_multi_store_1(xa, 0, i); in check_multi_store()
719 check_multi_store_1(xa, (1UL << i) + 1, i); in check_multi_store()
721 check_multi_store_2(xa, 4095, 9); in check_multi_store()
724 check_multi_store_3(xa, 0, i); in check_multi_store()
725 check_multi_store_3(xa, 1UL << i, i); in check_multi_store()
732 static noinline void check_xa_multi_store_adv_add(struct xarray *xa, in check_xa_multi_store_adv_add() argument
737 XA_STATE(xas, xa, index); in check_xa_multi_store_adv_add()
741 XA_BUG_ON(xa, index & (nrpages - 1)); in check_xa_multi_store_adv_add()
756 XA_BUG_ON(xa, xas_error(&xas) && xas_error(&xas) != -ENOMEM); in check_xa_multi_store_adv_add()
759 XA_BUG_ON(xa, xas_error(&xas)); in check_xa_multi_store_adv_add()
760 XA_BUG_ON(xa, xa_load(xa, index) != p); in check_xa_multi_store_adv_add()
764 static noinline void check_xa_multi_store_adv_del_entry(struct xarray *xa, in check_xa_multi_store_adv_del_entry() argument
768 XA_STATE(xas, xa, index); in check_xa_multi_store_adv_del_entry()
775 static noinline void check_xa_multi_store_adv_delete(struct xarray *xa, in check_xa_multi_store_adv_delete() argument
779 xa_lock_irq(xa); in check_xa_multi_store_adv_delete()
780 check_xa_multi_store_adv_del_entry(xa, index, order); in check_xa_multi_store_adv_delete()
781 xa_unlock_irq(xa); in check_xa_multi_store_adv_delete()
785 static noinline void *test_get_entry(struct xarray *xa, unsigned long index) in test_get_entry() argument
787 XA_STATE(xas, xa, index); in test_get_entry()
789 static unsigned int loops = 0; in test_get_entry()
807 if (++loops % XA_CHECK_SCHED == 0) in test_get_entry()
813 static unsigned long some_val = 0xdeadbeef;
814 static unsigned long some_val_2 = 0xdeaddead;
817 static noinline void check_xa_multi_store_adv(struct xarray *xa, in check_xa_multi_store_adv() argument
830 check_xa_multi_store_adv_add(xa, base, order, &some_val); in check_xa_multi_store_adv()
832 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
833 XA_BUG_ON(xa, test_get_entry(xa, base + i) != &some_val); in check_xa_multi_store_adv()
835 XA_BUG_ON(xa, test_get_entry(xa, next_index) != NULL); in check_xa_multi_store_adv()
837 /* Use order 0 for the next item */ in check_xa_multi_store_adv()
838 check_xa_multi_store_adv_add(xa, next_index, 0, &some_val_2); in check_xa_multi_store_adv()
839 XA_BUG_ON(xa, test_get_entry(xa, next_index) != &some_val_2); in check_xa_multi_store_adv()
842 check_xa_multi_store_adv_delete(xa, next_index, 0); in check_xa_multi_store_adv()
845 check_xa_multi_store_adv_add(xa, next_index, order, &some_val_2); in check_xa_multi_store_adv()
847 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
848 XA_BUG_ON(xa, test_get_entry(xa, next_index + i) != &some_val_2); in check_xa_multi_store_adv()
850 check_xa_multi_store_adv_delete(xa, next_index, order); in check_xa_multi_store_adv()
851 check_xa_multi_store_adv_delete(xa, base, order); in check_xa_multi_store_adv()
852 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_multi_store_adv()
859 check_xa_multi_store_adv_add(xa, next_index, order, &some_val_2); in check_xa_multi_store_adv()
861 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
862 XA_BUG_ON(xa, test_get_entry(xa, base + i) != NULL); in check_xa_multi_store_adv()
864 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
865 XA_BUG_ON(xa, test_get_entry(xa, next_index + i) != &some_val_2); in check_xa_multi_store_adv()
867 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
868 XA_BUG_ON(xa, test_get_entry(xa, next_next_index + i) != NULL); in check_xa_multi_store_adv()
870 check_xa_multi_store_adv_delete(xa, next_index, order); in check_xa_multi_store_adv()
871 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_multi_store_adv()
875 check_xa_multi_store_adv_add(xa, next_next_index, order, &some_val_2); in check_xa_multi_store_adv()
877 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
878 XA_BUG_ON(xa, test_get_entry(xa, base + i) != NULL); in check_xa_multi_store_adv()
880 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
881 XA_BUG_ON(xa, test_get_entry(xa, next_index + i) != NULL); in check_xa_multi_store_adv()
883 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
884 XA_BUG_ON(xa, test_get_entry(xa, next_next_index + i) != &some_val_2); in check_xa_multi_store_adv()
886 check_xa_multi_store_adv_delete(xa, next_next_index, order); in check_xa_multi_store_adv()
887 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_multi_store_adv()
891 static noinline void check_multi_store_advanced(struct xarray *xa) in check_multi_store_advanced() argument
902 for (i = 0; i < max_order; i++) { in check_multi_store_advanced()
903 check_xa_multi_store_adv(xa, pos, i); in check_multi_store_advanced()
904 check_xa_multi_store_adv(xa, pos + 157, i); in check_multi_store_advanced()
910 static noinline void check_xa_alloc_1(struct xarray *xa, unsigned int base) in check_xa_alloc_1() argument
915 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_1()
917 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
920 xa_erase_index(xa, base); in check_xa_alloc_1()
921 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_1()
924 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
928 xa_alloc_index(xa, i, GFP_KERNEL); in check_xa_alloc_1()
930 xa_erase_index(xa, i); in check_xa_alloc_1()
931 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
934 xa_destroy(xa); in check_xa_alloc_1()
937 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
940 xa_alloc_index(xa, base + 1, GFP_KERNEL); in check_xa_alloc_1()
941 xa_erase_index(xa, base + 1); in check_xa_alloc_1()
944 xa_store_index(xa, base + 1, GFP_KERNEL); in check_xa_alloc_1()
945 xa_alloc_index(xa, base + 2, GFP_KERNEL); in check_xa_alloc_1()
948 xa_erase_index(xa, base); in check_xa_alloc_1()
949 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
951 xa_erase_index(xa, base + 1); in check_xa_alloc_1()
952 xa_erase_index(xa, base + 2); in check_xa_alloc_1()
955 xa_alloc_index(xa, base + i, GFP_KERNEL); in check_xa_alloc_1()
958 xa_destroy(xa); in check_xa_alloc_1()
961 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(UINT_MAX - 1), in check_xa_alloc_1()
963 GFP_KERNEL) != 0); in check_xa_alloc_1()
964 XA_BUG_ON(xa, id != 0xfffffffeU); in check_xa_alloc_1()
965 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(UINT_MAX), in check_xa_alloc_1()
967 GFP_KERNEL) != 0); in check_xa_alloc_1()
968 XA_BUG_ON(xa, id != 0xffffffffU); in check_xa_alloc_1()
970 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(0), in check_xa_alloc_1()
973 XA_BUG_ON(xa, id != 3); in check_xa_alloc_1()
974 xa_destroy(xa); in check_xa_alloc_1()
976 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(10), XA_LIMIT(10, 5), in check_xa_alloc_1()
978 XA_BUG_ON(xa, xa_store_index(xa, 3, GFP_KERNEL) != 0); in check_xa_alloc_1()
979 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(10), XA_LIMIT(10, 5), in check_xa_alloc_1()
981 xa_erase_index(xa, 3); in check_xa_alloc_1()
982 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_1()
985 static noinline void check_xa_alloc_2(struct xarray *xa, unsigned int base) in check_xa_alloc_2() argument
992 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
993 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, GFP_KERNEL) != 0); in check_xa_alloc_2()
994 XA_BUG_ON(xa, id != base); in check_xa_alloc_2()
995 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
996 XA_BUG_ON(xa, xa_erase(xa, id) != NULL); in check_xa_alloc_2()
997 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1000 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1001 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, GFP_KERNEL) != 0); in check_xa_alloc_2()
1002 XA_BUG_ON(xa, id != base); in check_xa_alloc_2()
1003 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
1004 xa_destroy(xa); in check_xa_alloc_2()
1005 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1008 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, in check_xa_alloc_2()
1009 GFP_KERNEL) != 0); in check_xa_alloc_2()
1010 XA_BUG_ON(xa, id != i); in check_xa_alloc_2()
1013 XA_BUG_ON(xa, xa_store(xa, 3, xa_mk_index(3), GFP_KERNEL) != NULL); in check_xa_alloc_2()
1014 XA_BUG_ON(xa, xa_store(xa, 4, xa_mk_index(4), GFP_KERNEL) != NULL); in check_xa_alloc_2()
1015 XA_BUG_ON(xa, xa_store(xa, 4, NULL, GFP_KERNEL) != xa_mk_index(4)); in check_xa_alloc_2()
1016 XA_BUG_ON(xa, xa_erase(xa, 5) != NULL); in check_xa_alloc_2()
1017 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, GFP_KERNEL) != 0); in check_xa_alloc_2()
1018 XA_BUG_ON(xa, id != 5); in check_xa_alloc_2()
1020 xa_for_each(xa, index, entry) { in check_xa_alloc_2()
1021 xa_erase_index(xa, index); in check_xa_alloc_2()
1025 XA_BUG_ON(xa, xa_erase(xa, i) != NULL); in check_xa_alloc_2()
1026 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
1028 XA_BUG_ON(xa, xa_erase(xa, 8) != NULL); in check_xa_alloc_2()
1029 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
1030 XA_BUG_ON(xa, xa_erase(xa, base + 9) != NULL); in check_xa_alloc_2()
1031 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1033 xa_destroy(xa); in check_xa_alloc_2()
1036 static noinline void check_xa_alloc_3(struct xarray *xa, unsigned int base) in check_xa_alloc_3() argument
1038 struct xa_limit limit = XA_LIMIT(1, 0x3fff); in check_xa_alloc_3()
1039 u32 next = 0; in check_xa_alloc_3()
1044 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(1), limit, in check_xa_alloc_3()
1045 &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1046 XA_BUG_ON(xa, id != 1); in check_xa_alloc_3()
1048 next = 0x3ffd; in check_xa_alloc_3()
1049 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(0x3ffd), limit, in check_xa_alloc_3()
1050 &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1051 XA_BUG_ON(xa, id != 0x3ffd); in check_xa_alloc_3()
1052 xa_erase_index(xa, 0x3ffd); in check_xa_alloc_3()
1053 xa_erase_index(xa, 1); in check_xa_alloc_3()
1054 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_3()
1056 for (i = 0x3ffe; i < 0x4003; i++) { in check_xa_alloc_3()
1057 if (i < 0x4000) in check_xa_alloc_3()
1060 entry = xa_mk_index(i - 0x3fff); in check_xa_alloc_3()
1061 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, entry, limit, in check_xa_alloc_3()
1063 XA_BUG_ON(xa, xa_mk_index(id) != entry); in check_xa_alloc_3()
1067 if (base != 0) in check_xa_alloc_3()
1068 xa_erase_index(xa, base); in check_xa_alloc_3()
1069 xa_erase_index(xa, base + 1); in check_xa_alloc_3()
1071 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(UINT_MAX), in check_xa_alloc_3()
1072 xa_limit_32b, &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1073 XA_BUG_ON(xa, id != UINT_MAX); in check_xa_alloc_3()
1074 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(base), in check_xa_alloc_3()
1076 XA_BUG_ON(xa, id != base); in check_xa_alloc_3()
1077 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(base + 1), in check_xa_alloc_3()
1078 xa_limit_32b, &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1079 XA_BUG_ON(xa, id != base + 1); in check_xa_alloc_3()
1081 xa_for_each(xa, index, entry) in check_xa_alloc_3()
1082 xa_erase_index(xa, index); in check_xa_alloc_3()
1084 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_3()
1092 check_xa_alloc_1(&xa0, 0); in check_xa_alloc()
1094 check_xa_alloc_2(&xa0, 0); in check_xa_alloc()
1096 check_xa_alloc_3(&xa0, 0); in check_xa_alloc()
1100 static noinline void __check_store_iter(struct xarray *xa, unsigned long start, in __check_store_iter() argument
1103 XA_STATE_ORDER(xas, xa, start, order); in __check_store_iter()
1105 unsigned int count = 0; in __check_store_iter()
1110 XA_BUG_ON(xa, !xa_is_value(entry)); in __check_store_iter()
1111 XA_BUG_ON(xa, entry < xa_mk_index(start)); in __check_store_iter()
1112 XA_BUG_ON(xa, entry > xa_mk_index(start + (1UL << order) - 1)); in __check_store_iter()
1118 count = 0; in __check_store_iter()
1121 XA_BUG_ON(xa, xas_error(&xas)); in __check_store_iter()
1122 XA_BUG_ON(xa, count != present); in __check_store_iter()
1123 XA_BUG_ON(xa, xa_load(xa, start) != xa_mk_index(start)); in __check_store_iter()
1124 XA_BUG_ON(xa, xa_load(xa, start + (1UL << order) - 1) != in __check_store_iter()
1126 xa_erase_index(xa, start); in __check_store_iter()
1129 static noinline void check_store_iter(struct xarray *xa) in check_store_iter() argument
1134 for (i = 0; i < max_order; i++) { in check_store_iter()
1137 __check_store_iter(xa, 0, i, 0); in check_store_iter()
1138 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1139 __check_store_iter(xa, min, i, 0); in check_store_iter()
1140 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1142 xa_store_index(xa, min, GFP_KERNEL); in check_store_iter()
1143 __check_store_iter(xa, min, i, 1); in check_store_iter()
1144 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1145 xa_store_index(xa, max, GFP_KERNEL); in check_store_iter()
1146 __check_store_iter(xa, min, i, 1); in check_store_iter()
1147 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1149 for (j = 0; j < min; j++) in check_store_iter()
1150 xa_store_index(xa, j, GFP_KERNEL); in check_store_iter()
1151 __check_store_iter(xa, 0, i, min); in check_store_iter()
1152 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1153 for (j = 0; j < min; j++) in check_store_iter()
1154 xa_store_index(xa, min + j, GFP_KERNEL); in check_store_iter()
1155 __check_store_iter(xa, min, i, min); in check_store_iter()
1156 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1159 xa_store_index(xa, 63, GFP_KERNEL); in check_store_iter()
1160 xa_store_index(xa, 65, GFP_KERNEL); in check_store_iter()
1161 __check_store_iter(xa, 64, 2, 1); in check_store_iter()
1162 xa_erase_index(xa, 63); in check_store_iter()
1164 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1167 static noinline void check_multi_find_1(struct xarray *xa, unsigned order) in check_multi_find_1() argument
1174 xa_store_order(xa, multi, order, xa_mk_value(multi), GFP_KERNEL); in check_multi_find_1()
1175 XA_BUG_ON(xa, xa_store_index(xa, next, GFP_KERNEL) != NULL); in check_multi_find_1()
1176 XA_BUG_ON(xa, xa_store_index(xa, next + 1, GFP_KERNEL) != NULL); in check_multi_find_1()
1178 index = 0; in check_multi_find_1()
1179 XA_BUG_ON(xa, xa_find(xa, &index, ULONG_MAX, XA_PRESENT) != in check_multi_find_1()
1181 XA_BUG_ON(xa, index != multi); in check_multi_find_1()
1183 XA_BUG_ON(xa, xa_find(xa, &index, ULONG_MAX, XA_PRESENT) != in check_multi_find_1()
1185 XA_BUG_ON(xa, (index < multi) || (index >= next)); in check_multi_find_1()
1186 XA_BUG_ON(xa, xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT) != in check_multi_find_1()
1188 XA_BUG_ON(xa, index != next); in check_multi_find_1()
1189 XA_BUG_ON(xa, xa_find_after(xa, &index, next, XA_PRESENT) != NULL); in check_multi_find_1()
1190 XA_BUG_ON(xa, index != next); in check_multi_find_1()
1192 xa_erase_index(xa, multi); in check_multi_find_1()
1193 xa_erase_index(xa, next); in check_multi_find_1()
1194 xa_erase_index(xa, next + 1); in check_multi_find_1()
1195 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_find_1()
1199 static noinline void check_multi_find_2(struct xarray *xa) in check_multi_find_2() argument
1205 for (i = 0; i < max_order; i++) { in check_multi_find_2()
1207 for (j = 0; j < index; j++) { in check_multi_find_2()
1208 XA_STATE(xas, xa, j + index); in check_multi_find_2()
1209 xa_store_index(xa, index - 1, GFP_KERNEL); in check_multi_find_2()
1210 xa_store_order(xa, index, i, xa_mk_index(index), in check_multi_find_2()
1214 xa_erase_index(xa, index); in check_multi_find_2()
1217 xa_erase_index(xa, index - 1); in check_multi_find_2()
1218 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_find_2()
1223 static noinline void check_multi_find_3(struct xarray *xa) in check_multi_find_3() argument
1230 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_find_3()
1231 xa_store_order(xa, 0, order - 4, xa_mk_index(0), GFP_KERNEL); in check_multi_find_3()
1232 XA_BUG_ON(xa, xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT)); in check_multi_find_3()
1233 xa_erase_index(xa, 0); in check_multi_find_3()
1237 static noinline void check_find_1(struct xarray *xa) in check_find_1() argument
1241 XA_BUG_ON(xa, !xa_empty(xa)); in check_find_1()
1244 * Check xa_find with all pairs between 0 and 99 inclusive, in check_find_1()
1245 * starting at every index between 0 and 99 in check_find_1()
1247 for (i = 0; i < 100; i++) { in check_find_1()
1248 XA_BUG_ON(xa, xa_store_index(xa, i, GFP_KERNEL) != NULL); in check_find_1()
1249 xa_set_mark(xa, i, XA_MARK_0); in check_find_1()
1250 for (j = 0; j < i; j++) { in check_find_1()
1251 XA_BUG_ON(xa, xa_store_index(xa, j, GFP_KERNEL) != in check_find_1()
1253 xa_set_mark(xa, j, XA_MARK_0); in check_find_1()
1254 for (k = 0; k < 100; k++) { in check_find_1()
1256 void *entry = xa_find(xa, &index, ULONG_MAX, in check_find_1()
1259 XA_BUG_ON(xa, index != j); in check_find_1()
1261 XA_BUG_ON(xa, index != i); in check_find_1()
1263 XA_BUG_ON(xa, entry != NULL); in check_find_1()
1266 entry = xa_find(xa, &index, ULONG_MAX, in check_find_1()
1269 XA_BUG_ON(xa, index != j); in check_find_1()
1271 XA_BUG_ON(xa, index != i); in check_find_1()
1273 XA_BUG_ON(xa, entry != NULL); in check_find_1()
1275 xa_erase_index(xa, j); in check_find_1()
1276 XA_BUG_ON(xa, xa_get_mark(xa, j, XA_MARK_0)); in check_find_1()
1277 XA_BUG_ON(xa, !xa_get_mark(xa, i, XA_MARK_0)); in check_find_1()
1279 xa_erase_index(xa, i); in check_find_1()
1280 XA_BUG_ON(xa, xa_get_mark(xa, i, XA_MARK_0)); in check_find_1()
1282 XA_BUG_ON(xa, !xa_empty(xa)); in check_find_1()
1285 static noinline void check_find_2(struct xarray *xa) in check_find_2() argument
1290 xa_for_each(xa, index, entry) { in check_find_2()
1291 XA_BUG_ON(xa, true); in check_find_2()
1294 for (i = 0; i < 1024; i++) { in check_find_2()
1295 xa_store_index(xa, index, GFP_KERNEL); in check_find_2()
1296 j = 0; in check_find_2()
1297 xa_for_each(xa, index, entry) { in check_find_2()
1298 XA_BUG_ON(xa, xa_mk_index(index) != entry); in check_find_2()
1299 XA_BUG_ON(xa, index != j++); in check_find_2()
1303 xa_destroy(xa); in check_find_2()
1306 static noinline void check_find_3(struct xarray *xa) in check_find_3() argument
1308 XA_STATE(xas, xa, 0); in check_find_3()
1312 for (i = 0; i < 100; i++) { in check_find_3()
1313 for (j = 0; j < 100; j++) { in check_find_3()
1315 for (k = 0; k < 100; k++) { in check_find_3()
1320 XA_BUG_ON(xa, in check_find_3()
1325 xa_store_index(xa, i, GFP_KERNEL); in check_find_3()
1326 xa_set_mark(xa, i, XA_MARK_0); in check_find_3()
1328 xa_destroy(xa); in check_find_3()
1331 static noinline void check_find_4(struct xarray *xa) in check_find_4() argument
1333 unsigned long index = 0; in check_find_4()
1336 xa_store_index(xa, ULONG_MAX, GFP_KERNEL); in check_find_4()
1338 entry = xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT); in check_find_4()
1339 XA_BUG_ON(xa, entry != xa_mk_index(ULONG_MAX)); in check_find_4()
1341 entry = xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT); in check_find_4()
1342 XA_BUG_ON(xa, entry); in check_find_4()
1344 xa_erase_index(xa, ULONG_MAX); in check_find_4()
1347 static noinline void check_find(struct xarray *xa) in check_find() argument
1351 check_find_1(xa); in check_find()
1352 check_find_2(xa); in check_find()
1353 check_find_3(xa); in check_find()
1354 check_find_4(xa); in check_find()
1357 check_multi_find_1(xa, i); in check_find()
1358 check_multi_find_2(xa); in check_find()
1359 check_multi_find_3(xa); in check_find()
1363 static noinline unsigned long xa_find_entry(struct xarray *xa, void *item) in xa_find_entry() argument
1365 XA_STATE(xas, xa, 0); in xa_find_entry()
1366 unsigned int checked = 0; in xa_find_entry()
1376 if ((checked % 4) != 0) in xa_find_entry()
1385 static noinline void check_find_entry(struct xarray *xa) in check_find_entry() argument
1391 for (order = 0; order < 20; order++) { in check_find_entry()
1392 for (offset = 0; offset < (1UL << (order + 3)); in check_find_entry()
1394 for (index = 0; index < (1UL << (order + 5)); in check_find_entry()
1396 xa_store_order(xa, index, order, in check_find_entry()
1398 XA_BUG_ON(xa, xa_load(xa, index) != in check_find_entry()
1400 XA_BUG_ON(xa, xa_find_entry(xa, in check_find_entry()
1403 XA_BUG_ON(xa, xa_find_entry(xa, xa) != -1); in check_find_entry()
1404 xa_destroy(xa); in check_find_entry()
1409 XA_BUG_ON(xa, xa_find_entry(xa, xa) != -1); in check_find_entry()
1410 xa_store_index(xa, ULONG_MAX, GFP_KERNEL); in check_find_entry()
1411 XA_BUG_ON(xa, xa_find_entry(xa, xa) != -1); in check_find_entry()
1412 XA_BUG_ON(xa, xa_find_entry(xa, xa_mk_index(ULONG_MAX)) != -1); in check_find_entry()
1413 xa_erase_index(xa, ULONG_MAX); in check_find_entry()
1414 XA_BUG_ON(xa, !xa_empty(xa)); in check_find_entry()
1417 static noinline void check_pause(struct xarray *xa) in check_pause() argument
1419 XA_STATE(xas, xa, 0); in check_pause()
1423 unsigned int count = 0; in check_pause()
1425 for (order = 0; order < order_limit; order++) { in check_pause()
1426 XA_BUG_ON(xa, xa_store_order(xa, index, order, in check_pause()
1433 XA_BUG_ON(xa, entry != xa_mk_index(1UL << count)); in check_pause()
1437 XA_BUG_ON(xa, count != order_limit); in check_pause()
1439 count = 0; in check_pause()
1440 xas_set(&xas, 0); in check_pause()
1443 XA_BUG_ON(xa, entry != xa_mk_index(1UL << count)); in check_pause()
1448 XA_BUG_ON(xa, count != order_limit); in check_pause()
1450 xa_destroy(xa); in check_pause()
1452 index = 0; in check_pause()
1453 for (order = order_limit - 1; order >= 0; order--) { in check_pause()
1454 XA_BUG_ON(xa, xa_store_order(xa, index, order, in check_pause()
1459 index = 0; in check_pause()
1460 count = 0; in check_pause()
1461 xas_set(&xas, 0); in check_pause()
1464 XA_BUG_ON(xa, entry != xa_mk_index(index)); in check_pause()
1469 XA_BUG_ON(xa, count != order_limit); in check_pause()
1471 index = 0; in check_pause()
1472 count = 0; in check_pause()
1477 XA_BUG_ON(xa, entry != xa_mk_index(index)); in check_pause()
1483 XA_BUG_ON(xa, count != order_limit); in check_pause()
1485 xa_destroy(xa); in check_pause()
1489 static noinline void check_move_tiny(struct xarray *xa) in check_move_tiny() argument
1491 XA_STATE(xas, xa, 0); in check_move_tiny()
1493 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_tiny()
1495 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_tiny()
1496 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_tiny()
1498 xa_store_index(xa, 0, GFP_KERNEL); in check_move_tiny()
1500 xas_set(&xas, 0); in check_move_tiny()
1501 XA_BUG_ON(xa, xas_next(&xas) != xa_mk_index(0)); in check_move_tiny()
1502 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_tiny()
1503 xas_set(&xas, 0); in check_move_tiny()
1504 XA_BUG_ON(xa, xas_prev(&xas) != xa_mk_index(0)); in check_move_tiny()
1505 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move_tiny()
1507 xa_erase_index(xa, 0); in check_move_tiny()
1508 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_tiny()
1511 static noinline void check_move_max(struct xarray *xa) in check_move_max() argument
1513 XA_STATE(xas, xa, 0); in check_move_max()
1515 xa_store_index(xa, ULONG_MAX, GFP_KERNEL); in check_move_max()
1517 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_index(ULONG_MAX)); in check_move_max()
1518 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != NULL); in check_move_max()
1521 xas_set(&xas, 0); in check_move_max()
1523 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_index(ULONG_MAX)); in check_move_max()
1525 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != NULL); in check_move_max()
1528 xa_erase_index(xa, ULONG_MAX); in check_move_max()
1529 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_max()
1532 static noinline void check_move_small(struct xarray *xa, unsigned long idx) in check_move_small() argument
1534 XA_STATE(xas, xa, 0); in check_move_small()
1537 xa_store_index(xa, 0, GFP_KERNEL); in check_move_small()
1538 xa_store_index(xa, idx, GFP_KERNEL); in check_move_small()
1541 for (i = 0; i < idx * 4; i++) { in check_move_small()
1544 XA_BUG_ON(xa, xas.xa_node == XAS_RESTART); in check_move_small()
1545 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1546 if (i == 0 || i == idx) in check_move_small()
1547 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move_small()
1549 XA_BUG_ON(xa, entry != NULL); in check_move_small()
1552 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1558 XA_BUG_ON(xa, xas.xa_node == XAS_RESTART); in check_move_small()
1559 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1560 if (i == 0 || i == idx) in check_move_small()
1561 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move_small()
1563 XA_BUG_ON(xa, entry != NULL); in check_move_small()
1564 } while (i > 0); in check_move_small()
1567 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_small()
1568 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move_small()
1569 XA_BUG_ON(xa, xas_next(&xas) != xa_mk_value(0)); in check_move_small()
1570 XA_BUG_ON(xa, xas.xa_index != 0); in check_move_small()
1571 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move_small()
1572 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move_small()
1575 xa_erase_index(xa, 0); in check_move_small()
1576 xa_erase_index(xa, idx); in check_move_small()
1577 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_small()
1580 static noinline void check_move(struct xarray *xa) in check_move() argument
1582 XA_STATE(xas, xa, (1 << 16) - 1); in check_move()
1585 for (i = 0; i < (1 << 16); i++) in check_move()
1586 XA_BUG_ON(xa, xa_store_index(xa, i, GFP_KERNEL) != NULL); in check_move()
1592 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1593 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1594 } while (i != 0); in check_move()
1596 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move()
1597 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move()
1601 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1602 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1608 xa_erase_index(xa, i); in check_move()
1617 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1619 XA_BUG_ON(xa, entry != NULL); in check_move()
1620 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1621 } while (i != 0); in check_move()
1623 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move()
1624 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move()
1629 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1631 XA_BUG_ON(xa, entry != NULL); in check_move()
1632 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1637 xa_destroy(xa); in check_move()
1639 check_move_tiny(xa); in check_move()
1640 check_move_max(xa); in check_move()
1642 for (i = 0; i < 16; i++) in check_move()
1643 check_move_small(xa, 1UL << i); in check_move()
1646 check_move_small(xa, (1UL << i) - 1); in check_move()
1649 static noinline void xa_store_many_order(struct xarray *xa, in xa_store_many_order() argument
1652 XA_STATE_ORDER(xas, xa, index, order); in xa_store_many_order()
1653 unsigned int i = 0; in xa_store_many_order()
1657 XA_BUG_ON(xa, xas_find_conflict(&xas)); in xa_store_many_order()
1661 for (i = 0; i < (1U << order); i++) { in xa_store_many_order()
1662 XA_BUG_ON(xa, xas_store(&xas, xa_mk_index(index + i))); in xa_store_many_order()
1669 XA_BUG_ON(xa, xas_error(&xas)); in xa_store_many_order()
1672 static noinline void check_create_range_1(struct xarray *xa, in check_create_range_1() argument
1677 xa_store_many_order(xa, index, order); in check_create_range_1()
1679 xa_erase_index(xa, i); in check_create_range_1()
1680 XA_BUG_ON(xa, !xa_empty(xa)); in check_create_range_1()
1683 static noinline void check_create_range_2(struct xarray *xa, unsigned order) in check_create_range_2() argument
1688 for (i = 0; i < nr * nr; i += nr) in check_create_range_2()
1689 xa_store_many_order(xa, i, order); in check_create_range_2()
1690 for (i = 0; i < nr * nr; i++) in check_create_range_2()
1691 xa_erase_index(xa, i); in check_create_range_2()
1692 XA_BUG_ON(xa, !xa_empty(xa)); in check_create_range_2()
1697 XA_STATE(xas, NULL, 0); in check_create_range_3()
1703 static noinline void check_create_range_4(struct xarray *xa, in check_create_range_4() argument
1706 XA_STATE_ORDER(xas, xa, index, order); in check_create_range_4()
1708 unsigned long i = 0; in check_create_range_4()
1710 xa_store_index(xa, index, GFP_KERNEL); in check_create_range_4()
1716 for (i = 0; i < (1UL << order); i++) { in check_create_range_4()
1719 XA_BUG_ON(xa, old != xa_mk_index(base + i)); in check_create_range_4()
1721 XA_BUG_ON(xa, old != NULL); in check_create_range_4()
1728 XA_BUG_ON(xa, xas_error(&xas)); in check_create_range_4()
1731 xa_erase_index(xa, i); in check_create_range_4()
1732 XA_BUG_ON(xa, !xa_empty(xa)); in check_create_range_4()
1735 static noinline void check_create_range_5(struct xarray *xa, in check_create_range_5() argument
1738 XA_STATE_ORDER(xas, xa, index, order); in check_create_range_5()
1741 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_create_range_5()
1743 for (i = 0; i < order + 10; i++) { in check_create_range_5()
1751 xa_destroy(xa); in check_create_range_5()
1754 static noinline void check_create_range(struct xarray *xa) in check_create_range() argument
1759 for (order = 0; order < max_order; order++) { in check_create_range()
1760 check_create_range_1(xa, 0, order); in check_create_range()
1761 check_create_range_1(xa, 1U << order, order); in check_create_range()
1762 check_create_range_1(xa, 2U << order, order); in check_create_range()
1763 check_create_range_1(xa, 3U << order, order); in check_create_range()
1764 check_create_range_1(xa, 1U << 24, order); in check_create_range()
1766 check_create_range_2(xa, order); in check_create_range()
1768 check_create_range_4(xa, 0, order); in check_create_range()
1769 check_create_range_4(xa, 1U << order, order); in check_create_range()
1770 check_create_range_4(xa, 2U << order, order); in check_create_range()
1771 check_create_range_4(xa, 3U << order, order); in check_create_range()
1772 check_create_range_4(xa, 1U << 24, order); in check_create_range()
1774 check_create_range_4(xa, 1, order); in check_create_range()
1775 check_create_range_4(xa, (1U << order) + 1, order); in check_create_range()
1776 check_create_range_4(xa, (2U << order) + 1, order); in check_create_range()
1777 check_create_range_4(xa, (2U << order) - 1, order); in check_create_range()
1778 check_create_range_4(xa, (3U << order) + 1, order); in check_create_range()
1779 check_create_range_4(xa, (3U << order) - 1, order); in check_create_range()
1780 check_create_range_4(xa, (1U << 24) + 1, order); in check_create_range()
1782 check_create_range_5(xa, 0, order); in check_create_range()
1783 check_create_range_5(xa, (1U << order), order); in check_create_range()
1789 static noinline void __check_store_range(struct xarray *xa, unsigned long first, in __check_store_range() argument
1793 xa_store_range(xa, first, last, xa_mk_index(first), GFP_KERNEL); in __check_store_range()
1795 XA_BUG_ON(xa, xa_load(xa, first) != xa_mk_index(first)); in __check_store_range()
1796 XA_BUG_ON(xa, xa_load(xa, last) != xa_mk_index(first)); in __check_store_range()
1797 XA_BUG_ON(xa, xa_load(xa, first - 1) != NULL); in __check_store_range()
1798 XA_BUG_ON(xa, xa_load(xa, last + 1) != NULL); in __check_store_range()
1800 xa_store_range(xa, first, last, NULL, GFP_KERNEL); in __check_store_range()
1803 XA_BUG_ON(xa, !xa_empty(xa)); in __check_store_range()
1806 static noinline void check_store_range(struct xarray *xa) in check_store_range() argument
1810 for (i = 0; i < 128; i++) { in check_store_range()
1812 __check_store_range(xa, i, j); in check_store_range()
1813 __check_store_range(xa, 128 + i, 128 + j); in check_store_range()
1814 __check_store_range(xa, 4095 + i, 4095 + j); in check_store_range()
1815 __check_store_range(xa, 4096 + i, 4096 + j); in check_store_range()
1816 __check_store_range(xa, 123456 + i, 123456 + j); in check_store_range()
1817 __check_store_range(xa, (1 << 24) + i, (1 << 24) + j); in check_store_range()
1823 static void check_split_1(struct xarray *xa, unsigned long index, in check_split_1() argument
1826 XA_STATE_ORDER(xas, xa, index, new_order); in check_split_1()
1830 xa_store_order(xa, index, order, xa, GFP_KERNEL); in check_split_1()
1831 xa_set_mark(xa, index, XA_MARK_1); in check_split_1()
1833 xas_split_alloc(&xas, xa, order, GFP_KERNEL); in check_split_1()
1835 xas_split(&xas, xa, order); in check_split_1()
1836 for (i = 0; i < (1 << order); i += (1 << new_order)) in check_split_1()
1837 __xa_store(xa, index + i, xa_mk_index(index + i), 0); in check_split_1()
1840 for (i = 0; i < (1 << order); i++) { in check_split_1()
1842 XA_BUG_ON(xa, xa_load(xa, index + i) != xa_mk_index(val)); in check_split_1()
1845 xa_set_mark(xa, index, XA_MARK_0); in check_split_1()
1846 XA_BUG_ON(xa, !xa_get_mark(xa, index, XA_MARK_0)); in check_split_1()
1848 xas_set_order(&xas, index, 0); in check_split_1()
1849 found = 0; in check_split_1()
1853 XA_BUG_ON(xa, xa_is_internal(entry)); in check_split_1()
1856 XA_BUG_ON(xa, found != 1 << (order - new_order)); in check_split_1()
1858 xa_destroy(xa); in check_split_1()
1861 static noinline void check_split(struct xarray *xa) in check_split() argument
1865 XA_BUG_ON(xa, !xa_empty(xa)); in check_split()
1868 for (new_order = 0; new_order < order; new_order++) { in check_split()
1869 check_split_1(xa, 0, order, new_order); in check_split()
1870 check_split_1(xa, 1UL << order, order, new_order); in check_split()
1871 check_split_1(xa, 3UL << order, order, new_order); in check_split()
1876 static void check_split(struct xarray *xa) { } in check_split() argument
1879 static void check_align_1(struct xarray *xa, char *name) in check_align_1() argument
1886 for (i = 0; i < 8; i++) { in check_align_1()
1887 XA_BUG_ON(xa, xa_alloc(xa, &id, name + i, xa_limit_32b, in check_align_1()
1888 GFP_KERNEL) != 0); in check_align_1()
1889 XA_BUG_ON(xa, id != i); in check_align_1()
1891 xa_for_each(xa, index, entry) in check_align_1()
1892 XA_BUG_ON(xa, xa_is_err(entry)); in check_align_1()
1893 xa_destroy(xa); in check_align_1()
1900 static void check_align_2(struct xarray *xa, char *name) in check_align_2() argument
1904 XA_BUG_ON(xa, !xa_empty(xa)); in check_align_2()
1906 for (i = 0; i < 8; i++) { in check_align_2()
1907 XA_BUG_ON(xa, xa_store(xa, 0, name + i, GFP_KERNEL) != NULL); in check_align_2()
1908 xa_erase(xa, 0); in check_align_2()
1911 for (i = 0; i < 8; i++) { in check_align_2()
1912 XA_BUG_ON(xa, xa_reserve(xa, 0, GFP_KERNEL) != 0); in check_align_2()
1913 XA_BUG_ON(xa, xa_store(xa, 0, name + i, 0) != NULL); in check_align_2()
1914 xa_erase(xa, 0); in check_align_2()
1917 XA_BUG_ON(xa, !xa_empty(xa)); in check_align_2()
1920 static noinline void check_align(struct xarray *xa) in check_align() argument
1924 check_align_1(xa, name); in check_align()
1925 check_align_1(xa, name + 1); in check_align()
1926 check_align_1(xa, name + 2); in check_align()
1927 check_align_1(xa, name + 3); in check_align()
1928 check_align_2(xa, name); in check_align()
1944 static noinline void shadow_remove(struct xarray *xa) in shadow_remove() argument
1948 xa_lock(xa); in shadow_remove()
1951 XA_BUG_ON(xa, node->array != xa); in shadow_remove()
1955 xa_unlock(xa); in shadow_remove()
1958 static noinline void check_workingset(struct xarray *xa, unsigned long index) in check_workingset() argument
1960 XA_STATE(xas, xa, index); in check_workingset()
1965 xas_store(&xas, xa_mk_value(0)); in check_workingset()
1971 XA_BUG_ON(xa, list_empty(&shadow_nodes)); in check_workingset()
1976 XA_BUG_ON(xa, !list_empty(&shadow_nodes)); in check_workingset()
1980 XA_BUG_ON(xa, list_empty(&shadow_nodes)); in check_workingset()
1982 shadow_remove(xa); in check_workingset()
1983 XA_BUG_ON(xa, !list_empty(&shadow_nodes)); in check_workingset()
1984 XA_BUG_ON(xa, !xa_empty(xa)); in check_workingset()
1991 static noinline void check_account(struct xarray *xa) in check_account() argument
1997 XA_STATE(xas, xa, 1 << order); in check_account()
1999 xa_store_order(xa, 0, order, xa, GFP_KERNEL); in check_account()
2002 XA_BUG_ON(xa, xas.xa_node->count == 0); in check_account()
2003 XA_BUG_ON(xa, xas.xa_node->count > (1 << order)); in check_account()
2004 XA_BUG_ON(xa, xas.xa_node->nr_values != 0); in check_account()
2007 xa_store_order(xa, 1 << order, order, xa_mk_index(1UL << order), in check_account()
2009 XA_BUG_ON(xa, xas.xa_node->count != xas.xa_node->nr_values * 2); in check_account()
2011 xa_erase(xa, 1 << order); in check_account()
2012 XA_BUG_ON(xa, xas.xa_node->nr_values != 0); in check_account()
2014 xa_erase(xa, 0); in check_account()
2015 XA_BUG_ON(xa, !xa_empty(xa)); in check_account()
2020 static noinline void check_get_order(struct xarray *xa) in check_get_order() argument
2026 for (i = 0; i < 3; i++) in check_get_order()
2027 XA_BUG_ON(xa, xa_get_order(xa, i) != 0); in check_get_order()
2029 for (order = 0; order < max_order; order++) { in check_get_order()
2030 for (i = 0; i < 10; i++) { in check_get_order()
2031 xa_store_order(xa, i << order, order, in check_get_order()
2034 XA_BUG_ON(xa, xa_get_order(xa, j) != order); in check_get_order()
2035 xa_erase(xa, i << order); in check_get_order()
2040 static noinline void check_xas_get_order(struct xarray *xa) in check_xas_get_order() argument
2042 XA_STATE(xas, xa, 0); in check_xas_get_order()
2048 for (order = 0; order < max_order; order++) { in check_xas_get_order()
2049 for (i = 0; i < 10; i++) { in check_xas_get_order()
2058 xas_set_order(&xas, j, 0); in check_xas_get_order()
2061 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_get_order()
2073 static noinline void check_xas_conflict_get_order(struct xarray *xa) in check_xas_conflict_get_order() argument
2075 XA_STATE(xas, xa, 0); in check_xas_conflict_get_order()
2083 for (order = 0; order < max_order; order++) { in check_xas_conflict_get_order()
2084 for (i = 0; i < 10; i++) { in check_xas_conflict_get_order()
2096 for (k = 0; k < order; k++) { in check_xas_conflict_get_order()
2097 only_once = 0; in check_xas_conflict_get_order()
2101 XA_BUG_ON(xa, entry != xa_mk_value(i)); in check_xas_conflict_get_order()
2102 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_conflict_get_order()
2105 XA_BUG_ON(xa, only_once != 1); in check_xas_conflict_get_order()
2110 only_once = 0; in check_xas_conflict_get_order()
2114 XA_BUG_ON(xa, entry != xa_mk_value(i)); in check_xas_conflict_get_order()
2115 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_conflict_get_order()
2118 XA_BUG_ON(xa, only_once != 1); in check_xas_conflict_get_order()
2131 static noinline void check_destroy(struct xarray *xa) in check_destroy() argument
2135 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2138 xa_destroy(xa); in check_destroy()
2139 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2142 for (index = 0; index < 1000; index++) { in check_destroy()
2143 xa_store_index(xa, index, GFP_KERNEL); in check_destroy()
2144 XA_BUG_ON(xa, xa_empty(xa)); in check_destroy()
2145 xa_destroy(xa); in check_destroy()
2146 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2150 xa_store(xa, ULONG_MAX, xa, GFP_KERNEL); in check_destroy()
2151 XA_BUG_ON(xa, xa_empty(xa)); in check_destroy()
2152 xa_destroy(xa); in check_destroy()
2153 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2157 xa_store_order(xa, 1 << 11, 11, xa, GFP_KERNEL); in check_destroy()
2158 XA_BUG_ON(xa, xa_empty(xa)); in check_destroy()
2159 xa_destroy(xa); in check_destroy()
2160 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2197 check_workingset(&array, 0); in xarray_checks()
2202 return (tests_run == tests_passed) ? 0 : -EINVAL; in xarray_checks()