1 /*
2 * Copyright 2014 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
7 #define _GNU_SOURCE
8 #include <assert.h>
9 #include <errno.h>
10 #include <fcntl.h>
11 #include <linux/dma-buf.h>
12 #include <stdbool.h>
13 #include <stddef.h>
14 #include <stdint.h>
15 #include <stdio.h>
16 #include <stdlib.h>
17 #include <string.h>
18 #include <sys/ioctl.h>
19 #include <sys/mman.h>
20 #include <sys/types.h>
21 #include <sys/stat.h>
22 #include <unistd.h>
23 #include <xf86drm.h>
24 #include <xf86drmMode.h>
25
26 #include <gbm.h>
27
28 #define CHECK(cond) do {\
29 if (!(cond)) {\
30 printf("CHECK failed in %s() %s:%d\n", __func__, __FILE__, __LINE__);\
31 return 0;\
32 }\
33 } while(0)
34
35 #define HANDLE_EINTR(x) \
36 ({ \
37 int eintr_wrapper_counter = 0; \
38 int eintr_wrapper_result; \
39 do { \
40 eintr_wrapper_result = (x); \
41 } while (eintr_wrapper_result == -1 && errno == EINTR && \
42 eintr_wrapper_counter++ < 100); \
43 eintr_wrapper_result; \
44 })
45
46 #define ARRAY_SIZE(A) (sizeof(A)/sizeof(*(A)))
47
48 #define ENODRM -1
49
50 static int fd;
51 static struct gbm_device *gbm;
52
53 static const uint32_t format_list[] = {
54 GBM_FORMAT_R8,
55 GBM_FORMAT_RGB565,
56 GBM_FORMAT_BGR888,
57 GBM_FORMAT_XRGB8888,
58 GBM_FORMAT_XBGR8888,
59 GBM_FORMAT_ARGB8888,
60 GBM_FORMAT_ABGR8888,
61 GBM_FORMAT_XRGB2101010,
62 GBM_FORMAT_XBGR2101010,
63 GBM_FORMAT_ARGB2101010,
64 GBM_FORMAT_ABGR2101010,
65 GBM_FORMAT_ABGR16161616F,
66 GBM_FORMAT_NV12,
67 GBM_FORMAT_YVU420,
68 };
69
70 struct format_info {
71 uint32_t pixel_format;
72 uint32_t bits_per_pixel;
73 uint32_t data_mask;
74 };
75
76 /* Bits per pixel for each. */
77 static const struct format_info mappable_format_list[] = {
78 {GBM_FORMAT_R8, 8, 0xFF},
79 {GBM_FORMAT_RGB565, 16, 0xFFFF},
80 {GBM_FORMAT_BGR888, 24, 0xFFFFFF},
81 {GBM_FORMAT_XRGB8888, 32, 0x00FFFFFF},
82 {GBM_FORMAT_XBGR8888, 32, 0x00FFFFFF},
83 {GBM_FORMAT_ARGB8888, 32, 0xFFFFFFFF},
84 {GBM_FORMAT_ABGR8888, 32, 0xFFFFFFFF},
85 {GBM_FORMAT_XRGB2101010, 32, 0x3FFFFFFF},
86 {GBM_FORMAT_XBGR2101010, 32, 0x3FFFFFFF},
87 {GBM_FORMAT_ARGB2101010, 32, 0xFFFFFFFF},
88 {GBM_FORMAT_ABGR2101010, 32, 0xFFFFFFFF},
89 };
90
91 static const uint32_t usage_list[] = {
92 GBM_BO_USE_SCANOUT,
93 GBM_BO_USE_CURSOR_64X64,
94 GBM_BO_USE_RENDERING,
95 GBM_BO_USE_LINEAR,
96 GBM_BO_USE_SW_READ_OFTEN,
97 GBM_BO_USE_SW_WRITE_OFTEN,
98 };
99
100 static const uint32_t mappable_usage_list[] = {
101 GBM_BO_USE_SCANOUT | GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN,
102 GBM_BO_USE_RENDERING | GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN,
103 GBM_BO_USE_TEXTURING | GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN,
104 };
105
check_bo(struct gbm_bo * bo)106 static int check_bo(struct gbm_bo *bo)
107 {
108 uint32_t format;
109 size_t num_planes, plane;
110 int fd;
111 int i;
112
113 CHECK(bo);
114 CHECK(gbm_bo_get_width(bo) >= 0);
115 CHECK(gbm_bo_get_height(bo) >= 0);
116 CHECK(gbm_bo_get_stride(bo) >= gbm_bo_get_width(bo));
117
118 format = gbm_bo_get_format(bo);
119 for (i = 0; i < ARRAY_SIZE(format_list); i++)
120 if (format_list[i] == format)
121 break;
122 CHECK(i < ARRAY_SIZE(format_list));
123
124 num_planes = gbm_bo_get_plane_count(bo);
125 if (format == GBM_FORMAT_NV12)
126 CHECK(num_planes == 2);
127 else if (format == GBM_FORMAT_YVU420)
128 CHECK(num_planes == 3);
129 else
130 CHECK(num_planes == 1);
131
132 CHECK(gbm_bo_get_handle_for_plane(bo, 0).u32 == gbm_bo_get_handle(bo).u32);
133
134 CHECK(gbm_bo_get_offset(bo, 0) == 0);
135 CHECK(gbm_bo_get_stride_for_plane(bo, 0) == gbm_bo_get_stride(bo));
136
137 for (plane = 0; plane < num_planes; plane++) {
138 CHECK(gbm_bo_get_handle_for_plane(bo, plane).u32);
139
140 fd = gbm_bo_get_fd_for_plane(bo, plane);
141 CHECK(fd > 0);
142 close(fd);
143
144 gbm_bo_get_offset(bo, plane);
145 CHECK(gbm_bo_get_stride_for_plane(bo, plane));
146 }
147
148 return 1;
149 }
150
find_first_connected_connector(int fd,drmModeRes * resources)151 static drmModeConnector *find_first_connected_connector(int fd,
152 drmModeRes *resources)
153 {
154 int i;
155 for (i = 0; i < resources->count_connectors; i++) {
156 drmModeConnector *connector;
157
158 connector = drmModeGetConnector(fd, resources->connectors[i]);
159 if (connector) {
160 if ((connector->count_modes > 0) &&
161 (connector->connection == DRM_MODE_CONNECTED))
162 return connector;
163
164 drmModeFreeConnector(connector);
165 }
166 }
167 return NULL;
168 }
169
drm_open()170 static int drm_open()
171 {
172 int fd;
173 unsigned i;
174
175 /* Find the first drm device with a connected display. */
176 for (i = 0; i < DRM_MAX_MINOR; i++) {
177 char* dev_name;
178 drmModeRes *res = NULL;
179 int ret;
180
181 ret = asprintf(&dev_name, DRM_DEV_NAME, DRM_DIR_NAME, i);
182 if (ret < 0)
183 continue;
184
185 fd = open(dev_name, O_RDWR, 0);
186 free(dev_name);
187 if (fd < 0)
188 continue;
189
190 res = drmModeGetResources(fd);
191 if (!res) {
192 drmClose(fd);
193 continue;
194 }
195
196 if (res->count_crtcs > 0 && res->count_connectors > 0) {
197 if (find_first_connected_connector(fd, res)) {
198 drmModeFreeResources(res);
199 return fd;
200 }
201 }
202
203 drmClose(fd);
204 drmModeFreeResources(res);
205 }
206
207 /*
208 * If no drm device has a connected display, fall back to the first
209 * drm device.
210 */
211 for (i = 0; i < DRM_MAX_MINOR; i++) {
212 char* dev_name;
213 int ret;
214
215 ret = asprintf(&dev_name, DRM_DEV_NAME, DRM_DIR_NAME, i);
216 if (ret < 0)
217 continue;
218
219 fd = open(dev_name, O_RDWR, 0);
220 free(dev_name);
221 if (fd < 0)
222 continue;
223
224 return fd;
225 }
226
227 return ENODRM;
228 }
229
230 /*
231 * Tests initialization.
232 */
test_init()233 static int test_init()
234 {
235 fd = drm_open();
236
237 CHECK(fd >= 0);
238
239 gbm = gbm_create_device(fd);
240
241 CHECK(gbm_device_get_fd(gbm) == fd);
242
243 const char* backend_name = gbm_device_get_backend_name(gbm);
244
245 CHECK(backend_name);
246
247 return 1;
248 }
249
250 /*
251 * Tests reinitialization.
252 */
test_reinit()253 static int test_reinit()
254 {
255 gbm_device_destroy(gbm);
256 close(fd);
257
258 fd = drm_open();
259 CHECK(fd >= 0);
260
261 gbm = gbm_create_device(fd);
262
263 CHECK(gbm_device_get_fd(gbm) == fd);
264
265 struct gbm_bo *bo;
266 bo = gbm_bo_create(gbm, 1024, 1024, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
267 CHECK(check_bo(bo));
268 gbm_bo_destroy(bo);
269
270 return 1;
271 }
272
273 /*
274 * Tests repeated alloc/free.
275 */
test_alloc_free()276 static int test_alloc_free()
277 {
278 int i;
279 for(i = 0; i < 1000; i++) {
280 struct gbm_bo *bo;
281 bo = gbm_bo_create(gbm, 1024, 1024, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
282 CHECK(check_bo(bo));
283 gbm_bo_destroy(bo);
284 }
285 return 1;
286 }
287
288 /*
289 * Tests that we can allocate different buffer dimensions.
290 */
test_alloc_free_sizes()291 static int test_alloc_free_sizes()
292 {
293 int i;
294 for(i = 1; i < 1920; i++) {
295 struct gbm_bo *bo;
296 bo = gbm_bo_create(gbm, i, i, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
297 CHECK(check_bo(bo));
298 gbm_bo_destroy(bo);
299 }
300
301 for(i = 1; i < 1920; i++) {
302 struct gbm_bo *bo;
303 bo = gbm_bo_create(gbm, i, 1, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
304 CHECK(check_bo(bo));
305 gbm_bo_destroy(bo);
306 }
307
308 for(i = 1; i < 1920; i++) {
309 struct gbm_bo *bo;
310 bo = gbm_bo_create(gbm, 1, i, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
311 CHECK(check_bo(bo));
312 gbm_bo_destroy(bo);
313 }
314
315 return 1;
316 }
317
318 /*
319 * Tests that we can allocate different buffer formats.
320 */
test_alloc_free_formats()321 static int test_alloc_free_formats()
322 {
323 int i;
324
325 for(i = 0; i < ARRAY_SIZE(format_list); i++) {
326 uint32_t format = format_list[i];
327 if (gbm_device_is_format_supported(gbm, format, GBM_BO_USE_RENDERING)) {
328 struct gbm_bo *bo;
329 bo = gbm_bo_create(gbm, 1024, 1024, format, GBM_BO_USE_RENDERING);
330 CHECK(check_bo(bo));
331 gbm_bo_destroy(bo);
332 }
333 }
334
335 return 1;
336 }
337
338 /*
339 * Tests that we find at least one working format for each usage.
340 */
test_alloc_free_usage()341 static int test_alloc_free_usage()
342 {
343 int i, j;
344
345 for(i = 0; i < ARRAY_SIZE(usage_list); i++) {
346 uint32_t usage = usage_list[i];
347 int found = 0;
348 for(j = 0; j < ARRAY_SIZE(format_list); j++) {
349 uint32_t format = format_list[j];
350 if (gbm_device_is_format_supported(gbm, format, usage)) {
351 struct gbm_bo *bo;
352 if (usage == GBM_BO_USE_CURSOR_64X64)
353 bo = gbm_bo_create(gbm, 64, 64, format, usage);
354 else
355 bo = gbm_bo_create(gbm, 1024, 1024, format, usage);
356 CHECK(check_bo(bo));
357 found = 1;
358 gbm_bo_destroy(bo);
359 }
360 }
361 CHECK(found);
362 }
363
364 return 1;
365 }
366
367 /*
368 * Tests user data.
369 */
370 static int been_there1;
371 static int been_there2;
372
destroy_data1(struct gbm_bo * bo,void * data)373 void destroy_data1(struct gbm_bo *bo, void *data)
374 {
375 been_there1 = 1;
376 }
377
destroy_data2(struct gbm_bo * bo,void * data)378 void destroy_data2(struct gbm_bo *bo, void *data)
379 {
380 been_there2 = 1;
381 }
382
test_user_data()383 static int test_user_data()
384 {
385 struct gbm_bo *bo1, *bo2;
386 char *data1, *data2;
387
388 been_there1 = 0;
389 been_there2 = 0;
390
391 bo1 = gbm_bo_create(gbm, 1024, 1024, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
392 bo2 = gbm_bo_create(gbm, 1024, 1024, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
393 data1 = (char*)malloc(1);
394 data2 = (char*)malloc(1);
395 CHECK(data1);
396 CHECK(data2);
397
398 gbm_bo_set_user_data(bo1, data1, destroy_data1);
399 gbm_bo_set_user_data(bo2, data2, destroy_data2);
400
401 CHECK((char*)gbm_bo_get_user_data(bo1) == data1);
402 CHECK((char*)gbm_bo_get_user_data(bo2) == data2);
403
404 gbm_bo_destroy(bo1);
405 CHECK(been_there1 == 1);
406
407 gbm_bo_set_user_data(bo2, NULL, NULL);
408 gbm_bo_destroy(bo2);
409 CHECK(been_there2 == 0);
410
411 free(data1);
412 free(data2);
413
414 return 1;
415 }
416
417 /*
418 * Tests destruction.
419 */
test_destroy()420 static int test_destroy()
421 {
422 gbm_device_destroy(gbm);
423 close(fd);
424
425 return 1;
426 }
427
428 /*
429 * Tests prime export.
430 */
test_export()431 static int test_export()
432 {
433 struct gbm_bo *bo;
434 int prime_fd;
435
436 bo = gbm_bo_create(gbm, 1024, 1024, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
437 CHECK(check_bo(bo));
438
439 prime_fd = gbm_bo_get_fd(bo);
440 CHECK(prime_fd > 0);
441 close(prime_fd);
442
443 gbm_bo_destroy(bo);
444
445 return 1;
446 }
447
448 /*
449 * Tests prime import using dma-buf API.
450 */
test_import_dmabuf()451 static int test_import_dmabuf()
452 {
453 struct gbm_import_fd_data fd_data;
454 struct gbm_bo *bo1, *bo2;
455 const int width = 123;
456 const int height = 456;
457 int prime_fd;
458
459 bo1 = gbm_bo_create(gbm, width, height, GBM_FORMAT_XRGB8888, GBM_BO_USE_RENDERING);
460 CHECK(check_bo(bo1));
461
462 prime_fd = gbm_bo_get_fd(bo1);
463 CHECK(prime_fd >= 0);
464
465 fd_data.fd = prime_fd;
466 fd_data.width = width;
467 fd_data.height = height;
468 fd_data.stride = gbm_bo_get_stride(bo1);
469 fd_data.format = GBM_FORMAT_XRGB8888;
470
471 gbm_bo_destroy(bo1);
472
473 bo2 = gbm_bo_import(gbm, GBM_BO_IMPORT_FD, &fd_data, GBM_BO_USE_RENDERING);
474 CHECK(check_bo(bo2));
475 CHECK(fd_data.width == gbm_bo_get_width(bo2));
476 CHECK(fd_data.height == gbm_bo_get_height(bo2));
477 CHECK(fd_data.stride == gbm_bo_get_stride(bo2));
478
479 gbm_bo_destroy(bo2);
480 close(prime_fd);
481
482 return 1;
483 }
484
485
486 /*
487 * Tests GBM_BO_IMPORT_FD_MODIFIER entry point.
488 */
test_import_modifier()489 static int test_import_modifier()
490 {
491 struct gbm_import_fd_modifier_data fd_data;
492 struct gbm_bo *bo1, *bo2;
493 const int width = 567;
494 const int height = 891;
495 size_t num_planes, p;
496 int i;
497
498 for (i = 0; i < ARRAY_SIZE(format_list); i++) {
499 uint32_t format = format_list[i];
500 if (gbm_device_is_format_supported(gbm, format, GBM_BO_USE_RENDERING)) {
501 bo1 = gbm_bo_create(gbm, width, height, format, GBM_BO_USE_RENDERING);
502 CHECK(check_bo(bo1));
503
504 num_planes = gbm_bo_get_plane_count(bo1);
505 fd_data.num_fds = num_planes;
506
507 for (p = 0; p < num_planes; p++) {
508 fd_data.fds[p] = gbm_bo_get_fd_for_plane(bo1, p);
509 CHECK(fd_data.fds[p] >= 0);
510
511 fd_data.strides[p] = gbm_bo_get_stride_for_plane(bo1, p);
512 fd_data.offsets[p] = gbm_bo_get_offset(bo1, p);
513 }
514
515 fd_data.modifier = gbm_bo_get_modifier(bo1);
516 fd_data.width = width;
517 fd_data.height = height;
518 fd_data.format = format;
519
520 gbm_bo_destroy(bo1);
521
522 bo2 = gbm_bo_import(gbm, GBM_BO_IMPORT_FD_MODIFIER, &fd_data,
523 GBM_BO_USE_RENDERING);
524
525 CHECK(check_bo(bo2));
526 CHECK(fd_data.width == gbm_bo_get_width(bo2));
527 CHECK(fd_data.height == gbm_bo_get_height(bo2));
528 CHECK(fd_data.modifier == gbm_bo_get_modifier(bo2));
529
530 for (p = 0; p < num_planes; p++) {
531 CHECK(fd_data.strides[p] == gbm_bo_get_stride_for_plane(bo2, p));
532 CHECK(fd_data.offsets[p] == gbm_bo_get_offset(bo2, p));
533 }
534
535 gbm_bo_destroy(bo2);
536
537 for (p = 0; p < num_planes; p++)
538 close(fd_data.fds[p]);
539 }
540 }
541
542 return 1;
543 }
544
test_gem_map()545 static int test_gem_map()
546 {
547 uint32_t *pixel, pixel_size;
548 struct gbm_bo *bo;
549 void *map_data, *addr;
550
551 uint32_t stride = 0;
552 const int width = 666;
553 const int height = 777;
554
555 addr = map_data = NULL;
556
557 bo = gbm_bo_create(gbm, width, height, GBM_FORMAT_ARGB8888,
558 GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN);
559 CHECK(check_bo(bo));
560
561 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ_WRITE, &stride,
562 &map_data);
563
564 CHECK(addr != MAP_FAILED);
565 CHECK(map_data);
566 CHECK(stride > 0);
567
568 pixel = (uint32_t *)addr;
569 pixel_size = sizeof(*pixel);
570
571 pixel[(height / 2) * (stride / pixel_size) + width / 2] = 0xABBAABBA;
572 gbm_bo_unmap(bo, map_data);
573
574 /* Re-map and verify written previously data. */
575 stride = 0;
576 addr = map_data = NULL;
577
578 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ_WRITE, &stride,
579 &map_data);
580
581 CHECK(addr != MAP_FAILED);
582 CHECK(map_data);
583 CHECK(stride > 0);
584
585 pixel = (uint32_t *)addr;
586 CHECK(pixel[(height / 2) * (stride / pixel_size) + width / 2] == 0xABBAABBA);
587
588 gbm_bo_unmap(bo, map_data);
589 gbm_bo_destroy(bo);
590
591 return 1;
592 }
593
test_dmabuf_map()594 static int test_dmabuf_map()
595 {
596 uint32_t *pixel;
597 struct gbm_bo *bo;
598 void *addr, *map_data;
599 const int width = 666;
600 const int height = 777;
601 int x, y, ret, prime_fd;
602 struct dma_buf_sync sync_end = { 0 };
603 struct dma_buf_sync sync_start = { 0 };
604 uint32_t pixel_size, stride, stride_pixels, length;
605
606 bo = gbm_bo_create(gbm, width, height, GBM_FORMAT_ARGB8888, GBM_BO_USE_LINEAR);
607 CHECK(check_bo(bo));
608
609 prime_fd = gbm_bo_get_fd(bo);
610 CHECK(prime_fd > 0);
611
612 stride = gbm_bo_get_stride(bo);
613 length = (uint32_t)lseek(prime_fd, 0, SEEK_END);;
614 CHECK(stride > 0);
615 CHECK(length > 0);
616
617 addr = mmap(NULL, length, (PROT_READ | PROT_WRITE), MAP_SHARED, prime_fd, 0);
618 CHECK(addr != MAP_FAILED);
619
620 pixel = (uint32_t *)addr;
621 pixel_size = sizeof(*pixel);
622 stride_pixels = stride / pixel_size;
623
624 sync_start.flags = DMA_BUF_SYNC_START | DMA_BUF_SYNC_WRITE;
625 ret = HANDLE_EINTR(ioctl(prime_fd, DMA_BUF_IOCTL_SYNC, &sync_start));
626 CHECK(ret == 0);
627
628 for (y = 0; y < height; ++y)
629 for (x = 0; x < width; ++x)
630 pixel[y * stride_pixels + x] = ((y << 16) | x);
631
632 sync_end.flags = DMA_BUF_SYNC_END | DMA_BUF_SYNC_WRITE;
633 ret = HANDLE_EINTR(ioctl(prime_fd, DMA_BUF_IOCTL_SYNC, &sync_end));
634 CHECK(ret == 0);
635
636 ret = munmap(addr, length);
637 CHECK(ret == 0);
638
639 ret = close(prime_fd);
640 CHECK(ret == 0);
641
642 prime_fd = gbm_bo_get_fd(bo);
643 CHECK(prime_fd > 0);
644
645 addr = mmap(NULL, length, (PROT_READ | PROT_WRITE), MAP_SHARED, prime_fd, 0);
646 CHECK(addr != MAP_FAILED);
647
648 pixel = (uint32_t *)addr;
649
650 memset(&sync_start, 0, sizeof(sync_start));
651 memset(&sync_end, 0, sizeof(sync_end));
652
653 sync_start.flags = DMA_BUF_SYNC_START | DMA_BUF_SYNC_READ;
654 ret = HANDLE_EINTR(ioctl(prime_fd, DMA_BUF_IOCTL_SYNC, &sync_start));
655 CHECK(ret == 0);
656
657 for (y = 0; y < height; ++y)
658 for (x = 0; x < width; ++x)
659 CHECK(pixel[y * stride_pixels + x] == ((y << 16) | x));
660
661 sync_end.flags = DMA_BUF_SYNC_END | DMA_BUF_SYNC_READ;
662 ret = HANDLE_EINTR(ioctl(prime_fd, DMA_BUF_IOCTL_SYNC, &sync_end));
663 CHECK(ret == 0);
664
665 ret = munmap(addr, length);
666 CHECK(ret == 0);
667
668 ret = close(prime_fd);
669 CHECK(ret == 0);
670
671 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
672 &map_data);
673
674 CHECK(addr != MAP_FAILED);
675 CHECK(map_data);
676 CHECK(stride > 0);
677
678 pixel = (uint32_t *)addr;
679
680 for (y = 0; y < height; ++y)
681 for (x = 0; x < width; ++x)
682 CHECK(pixel[y * stride_pixels + x] == ((y << 16) | x));
683
684 gbm_bo_unmap(bo, map_data);
685 gbm_bo_destroy(bo);
686
687 return 1;
688 }
689
test_gem_map_tiling(enum gbm_bo_flags buffer_create_flag)690 static int test_gem_map_tiling(enum gbm_bo_flags buffer_create_flag)
691 {
692 uint32_t *pixel, pixel_size;
693 struct gbm_bo *bo;
694 void *map_data, *addr;
695
696 uint32_t stride = 0;
697 uint32_t stride_pixels = 0;
698 const int width = 666;
699 const int height = 777;
700 int x, y;
701
702 addr = map_data = NULL;
703
704 bo = gbm_bo_create(gbm, width, height, GBM_FORMAT_ARGB8888, buffer_create_flag);
705 CHECK(check_bo(bo));
706
707 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, &stride,
708 &map_data);
709
710 CHECK(addr != MAP_FAILED);
711 CHECK(map_data);
712 CHECK(stride > 0);
713
714 pixel = (uint32_t *)addr;
715 pixel_size = sizeof(*pixel);
716 stride_pixels = stride / pixel_size;
717
718 for (y = 0; y < height; ++y)
719 for (x = 0; x < width; ++x)
720 pixel[y * stride_pixels + x] = ((y << 16) | x);
721
722 gbm_bo_unmap(bo, map_data);
723
724 /* Re-map and verify written previously data. */
725 stride = 0;
726 addr = map_data = NULL;
727
728 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
729 &map_data);
730
731 CHECK(addr != MAP_FAILED);
732 CHECK(map_data);
733 CHECK(stride > 0);
734
735 pixel = (uint32_t *)addr;
736 pixel_size = sizeof(*pixel);
737 stride_pixels = stride / pixel_size;
738
739 for (y = 0; y < height; ++y)
740 for (x = 0; x < width; ++x)
741 CHECK(pixel[y * stride_pixels + x] == ((y << 16) | x));
742
743 gbm_bo_unmap(bo, map_data);
744 gbm_bo_destroy(bo);
745
746 return 1;
747 }
748
test_gem_map_format(int format_index,enum gbm_bo_flags buffer_create_flag)749 static int test_gem_map_format(int format_index,
750 enum gbm_bo_flags buffer_create_flag)
751 {
752 uint8_t *pixel;
753 struct gbm_bo *bo;
754 void *map_data, *addr;
755 uint32_t x, y, b, bytes_per_pixel, pixel_data_mask, idx;
756 uint8_t byte_mask;
757 uint32_t stride = 0;
758 const int width = 333;
759 const int height = 444;
760 const uint32_t pixel_format = mappable_format_list[format_index].pixel_format;
761
762 addr = map_data = NULL;
763 if (!gbm_device_is_format_supported(gbm, pixel_format, buffer_create_flag))
764 return 1;
765
766 bo = gbm_bo_create(gbm, width, height, pixel_format, buffer_create_flag);
767 CHECK(check_bo(bo));
768
769 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, &stride,
770 &map_data);
771
772 CHECK(addr != MAP_FAILED);
773 CHECK(map_data);
774 CHECK(stride > 0);
775
776 pixel = (uint8_t *)addr;
777 bytes_per_pixel = mappable_format_list[format_index].bits_per_pixel / 8;
778 for (y = 0; y < height; ++y) {
779 for (x = 0; x < width; ++x) {
780 idx = y * stride + x * bytes_per_pixel;
781 for (b = 0; b < bytes_per_pixel; ++b)
782 pixel[idx + b] = y ^ x ^ b;
783 }
784 }
785 gbm_bo_unmap(bo, map_data);
786 stride = 0;
787 addr = map_data = NULL;
788
789 /* Re-map and verify written previously data. */
790 addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
791 &map_data);
792
793 CHECK(addr != MAP_FAILED);
794 CHECK(map_data);
795 CHECK(stride > 0);
796
797 pixel = (uint8_t *)addr;
798 pixel_data_mask = mappable_format_list[format_index].data_mask;
799 for (y = 0; y < height; ++y) {
800 for (x = 0; x < width; ++x) {
801 idx = y * stride + x * bytes_per_pixel;
802 for (b = 0; b < bytes_per_pixel; ++b) {
803 byte_mask = pixel_data_mask >> (8 * b);
804 CHECK((pixel[idx + b] & byte_mask) == ((uint8_t)(y ^ x ^ b) & byte_mask));
805 }
806 }
807 }
808 gbm_bo_unmap(bo, map_data);
809 stride = 0;
810 addr = map_data = NULL;
811
812 gbm_bo_destroy(bo);
813 return 1;
814 }
815
main(int argc,char * argv[])816 int main(int argc, char *argv[])
817 {
818 int result, i, j;
819
820 result = test_init();
821 if (result == ENODRM) {
822 printf("[ FAILED ] graphics_Gbm test initialization failed\n");
823 return EXIT_FAILURE;
824 }
825
826 result &= test_reinit();
827 result &= test_alloc_free();
828 result &= test_alloc_free_sizes();
829 result &= test_alloc_free_formats();
830 result &= test_alloc_free_usage();
831 result &= test_user_data();
832 result &= test_export();
833 result &= test_import_dmabuf();
834 result &= test_import_modifier();
835 result &= test_gem_map();
836
837 // TODO(crbug.com/752669)
838 if (strcmp(gbm_device_get_backend_name(gbm), "tegra")) {
839 for (i = 0; i < ARRAY_SIZE(mappable_usage_list); ++i) {
840 result &= test_gem_map_tiling(mappable_usage_list[i]);
841 for (j = 0; j < ARRAY_SIZE(mappable_format_list); ++j)
842 result &= test_gem_map_format(j, mappable_usage_list[i]);
843 }
844
845 result &= test_dmabuf_map();
846 }
847 result &= test_destroy();
848
849 if (!result) {
850 printf("[ FAILED ] graphics_Gbm test failed\n");
851 return EXIT_FAILURE;
852 } else {
853 printf("[ PASSED ] graphics_Gbm test success\n");
854 return EXIT_SUCCESS;
855 }
856 }
857