1 /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ 2 3 /* 4 * Copyright (c) 2019, Microsoft Corporation. 5 * 6 * Author: 7 * Iouri Tarassov <[email protected]> 8 * 9 * Dxgkrnl Graphics Driver 10 * User mode WDDM interface definitions 11 * 12 */ 13 14 #ifndef _D3DKMTHK_H 15 #define _D3DKMTHK_H 16 17 /* 18 * This structure matches the definition of D3DKMTHANDLE in Windows. 19 * The handle is opaque in user mode. It is used by user mode applications to 20 * represent kernel mode objects, created by dxgkrnl. 21 */ 22 struct d3dkmthandle { 23 union { 24 struct { 25 __u32 instance : 6; 26 __u32 index : 24; 27 __u32 unique : 2; 28 }; 29 __u32 v; 30 }; 31 }; 32 33 /* 34 * VM bus messages return Windows' NTSTATUS, which is integer and only negative 35 * value indicates a failure. A positive number is a success and needs to be 36 * returned to user mode as the IOCTL return code. Negative status codes are 37 * converted to Linux error codes. 38 */ 39 struct ntstatus { 40 union { 41 struct { 42 int code : 16; 43 int facility : 13; 44 int customer : 1; 45 int severity : 2; 46 }; 47 int v; 48 }; 49 }; 50 51 /* 52 * Matches the Windows LUID definition. 53 * LUID is a locally unique identifier (similar to GUID, but not global), 54 * which is guaranteed to be unique intil the computer is rebooted. 55 */ 56 struct winluid { 57 __u32 a; 58 __u32 b; 59 }; 60 61 #define D3DDDI_MAX_WRITTEN_PRIMARIES 16 62 63 #define D3DKMT_CREATEALLOCATION_MAX 1024 64 #define D3DKMT_MAKERESIDENT_ALLOC_MAX (1024 * 10) 65 #define D3DKMT_ADAPTERS_MAX 64 66 #define D3DDDI_MAX_BROADCAST_CONTEXT 64 67 #define D3DDDI_MAX_OBJECT_WAITED_ON 32 68 #define D3DDDI_MAX_OBJECT_SIGNALED 32 69 70 struct d3dkmt_adapterinfo { 71 struct d3dkmthandle adapter_handle; 72 struct winluid adapter_luid; 73 __u32 num_sources; 74 __u32 present_move_regions_preferred; 75 }; 76 77 struct d3dkmt_enumadapters2 { 78 __u32 num_adapters; 79 __u32 reserved; 80 #ifdef __KERNEL__ 81 struct d3dkmt_adapterinfo *adapters; 82 #else 83 __u64 *adapters; 84 #endif 85 }; 86 87 struct d3dkmt_closeadapter { 88 struct d3dkmthandle adapter_handle; 89 }; 90 91 struct d3dkmt_openadapterfromluid { 92 struct winluid adapter_luid; 93 struct d3dkmthandle adapter_handle; 94 }; 95 96 struct d3dddi_allocationlist { 97 struct d3dkmthandle allocation; 98 union { 99 struct { 100 __u32 write_operation :1; 101 __u32 do_not_retire_instance :1; 102 __u32 offer_priority :3; 103 __u32 reserved :27; 104 }; 105 __u32 value; 106 }; 107 }; 108 109 struct d3dddi_patchlocationlist { 110 __u32 allocation_index; 111 union { 112 struct { 113 __u32 slot_id:24; 114 __u32 reserved:8; 115 }; 116 __u32 value; 117 }; 118 __u32 driver_id; 119 __u32 allocation_offset; 120 __u32 patch_offset; 121 __u32 split_offset; 122 }; 123 124 struct d3dkmt_createdeviceflags { 125 __u32 legacy_mode:1; 126 __u32 request_vSync:1; 127 __u32 disable_gpu_timeout:1; 128 __u32 gdi_device:1; 129 __u32 reserved:28; 130 }; 131 132 struct d3dkmt_createdevice { 133 struct d3dkmthandle adapter; 134 __u32 reserved3; 135 struct d3dkmt_createdeviceflags flags; 136 struct d3dkmthandle device; 137 #ifdef __KERNEL__ 138 void *command_buffer; 139 #else 140 __u64 command_buffer; 141 #endif 142 __u32 command_buffer_size; 143 __u32 reserved; 144 #ifdef __KERNEL__ 145 struct d3dddi_allocationlist *allocation_list; 146 #else 147 __u64 allocation_list; 148 #endif 149 __u32 allocation_list_size; 150 __u32 reserved1; 151 #ifdef __KERNEL__ 152 struct d3dddi_patchlocationlist *patch_location_list; 153 #else 154 __u64 patch_location_list; 155 #endif 156 __u32 patch_location_list_size; 157 __u32 reserved2; 158 }; 159 160 struct d3dkmt_destroydevice { 161 struct d3dkmthandle device; 162 }; 163 164 enum d3dkmt_clienthint { 165 _D3DKMT_CLIENTHNT_UNKNOWN = 0, 166 _D3DKMT_CLIENTHINT_OPENGL = 1, 167 _D3DKMT_CLIENTHINT_CDD = 2, 168 _D3DKMT_CLIENTHINT_DX7 = 7, 169 _D3DKMT_CLIENTHINT_DX8 = 8, 170 _D3DKMT_CLIENTHINT_DX9 = 9, 171 _D3DKMT_CLIENTHINT_DX10 = 10, 172 }; 173 174 struct d3dddi_createcontextflags { 175 union { 176 struct { 177 __u32 null_rendering:1; 178 __u32 initial_data:1; 179 __u32 disable_gpu_timeout:1; 180 __u32 synchronization_only:1; 181 __u32 hw_queue_supported:1; 182 __u32 reserved:27; 183 }; 184 __u32 value; 185 }; 186 }; 187 188 struct d3dkmt_destroycontext { 189 struct d3dkmthandle context; 190 }; 191 192 struct d3dkmt_createcontextvirtual { 193 struct d3dkmthandle device; 194 __u32 node_ordinal; 195 __u32 engine_affinity; 196 struct d3dddi_createcontextflags flags; 197 #ifdef __KERNEL__ 198 void *priv_drv_data; 199 #else 200 __u64 priv_drv_data; 201 #endif 202 __u32 priv_drv_data_size; 203 enum d3dkmt_clienthint client_hint; 204 struct d3dkmthandle context; 205 }; 206 207 struct d3dddi_createhwqueueflags { 208 union { 209 struct { 210 __u32 disable_gpu_timeout:1; 211 __u32 reserved:31; 212 }; 213 __u32 value; 214 }; 215 }; 216 217 enum d3dddi_pagingqueue_priority { 218 _D3DDDI_PAGINGQUEUE_PRIORITY_BELOW_NORMAL = -1, 219 _D3DDDI_PAGINGQUEUE_PRIORITY_NORMAL = 0, 220 _D3DDDI_PAGINGQUEUE_PRIORITY_ABOVE_NORMAL = 1, 221 }; 222 223 struct d3dkmt_createpagingqueue { 224 struct d3dkmthandle device; 225 enum d3dddi_pagingqueue_priority priority; 226 struct d3dkmthandle paging_queue; 227 struct d3dkmthandle sync_object; 228 #ifdef __KERNEL__ 229 void *fence_cpu_virtual_address; 230 #else 231 __u64 fence_cpu_virtual_address; 232 #endif 233 __u32 physical_adapter_index; 234 }; 235 236 struct d3dddi_destroypagingqueue { 237 struct d3dkmthandle paging_queue; 238 }; 239 240 enum d3dddi_knownescapetype { 241 _D3DDDI_DRIVERESCAPETYPE_TRANSLATEALLOCATIONHANDLE = 0, 242 _D3DDDI_DRIVERESCAPETYPE_TRANSLATERESOURCEHANDLE = 1, 243 _D3DDDI_DRIVERESCAPETYPE_CPUEVENTUSAGE = 2, 244 _D3DDDI_DRIVERESCAPETYPE_BUILDTESTCOMMANDBUFFER = 3, 245 }; 246 247 struct d3dddi_translate_allocation_handle { 248 enum d3dddi_knownescapetype escape_type; 249 struct d3dkmthandle allocation; 250 }; 251 252 struct d3dddi_testcommand { 253 char buffer[72]; 254 }; 255 256 #define D3DDDI_MAXTESTBUFFERSIZE 4096 257 #define D3DDDI_MAXTESTBUFFERPRIVATEDRIVERDATASIZE 1024 258 259 struct d3dddi_buildtestcommandbuffer { 260 enum d3dddi_knownescapetype escape_type; 261 struct d3dkmthandle device; 262 struct d3dkmthandle context; 263 __u32 flags; 264 struct d3dddi_testcommand command; 265 void *dma_buffer; 266 void *dma_buffer_priv_data; 267 __u32 dma_buffer_size; 268 __u32 dma_buffer_priv_data_size; 269 }; 270 271 enum d3dkmt_escapetype { 272 _D3DKMT_ESCAPE_DRIVERPRIVATE = 0, 273 _D3DKMT_ESCAPE_VIDMM = 1, 274 _D3DKMT_ESCAPE_VIDSCH = 3, 275 _D3DKMT_ESCAPE_DEVICE = 4, 276 _D3DKMT_ESCAPE_DRT_TEST = 8, 277 }; 278 279 struct d3dddi_escapeflags { 280 union { 281 struct { 282 __u32 hardware_access:1; 283 __u32 device_status_query:1; 284 __u32 change_frame_latency:1; 285 __u32 no_adapter_synchronization:1; 286 __u32 reserved:1; 287 __u32 virtual_machine_data:1; 288 __u32 driver_known_escape:1; 289 __u32 driver_common_escape:1; 290 __u32 reserved2:24; 291 }; 292 __u32 value; 293 }; 294 }; 295 296 struct d3dkmt_escape { 297 struct d3dkmthandle adapter; 298 struct d3dkmthandle device; 299 enum d3dkmt_escapetype type; 300 struct d3dddi_escapeflags flags; 301 #ifdef __KERNEL__ 302 void *priv_drv_data; 303 #else 304 __u64 priv_drv_data; 305 #endif 306 __u32 priv_drv_data_size; 307 struct d3dkmthandle context; 308 }; 309 310 enum dxgk_render_pipeline_stage { 311 _DXGK_RENDER_PIPELINE_STAGE_UNKNOWN = 0, 312 _DXGK_RENDER_PIPELINE_STAGE_INPUT_ASSEMBLER = 1, 313 _DXGK_RENDER_PIPELINE_STAGE_VERTEX_SHADER = 2, 314 _DXGK_RENDER_PIPELINE_STAGE_GEOMETRY_SHADER = 3, 315 _DXGK_RENDER_PIPELINE_STAGE_STREAM_OUTPUT = 4, 316 _DXGK_RENDER_PIPELINE_STAGE_RASTERIZER = 5, 317 _DXGK_RENDER_PIPELINE_STAGE_PIXEL_SHADER = 6, 318 _DXGK_RENDER_PIPELINE_STAGE_OUTPUT_MERGER = 7, 319 }; 320 321 enum dxgk_page_fault_flags { 322 _DXGK_PAGE_FAULT_WRITE = 0x1, 323 _DXGK_PAGE_FAULT_FENCE_INVALID = 0x2, 324 _DXGK_PAGE_FAULT_ADAPTER_RESET_REQUIRED = 0x4, 325 _DXGK_PAGE_FAULT_ENGINE_RESET_REQUIRED = 0x8, 326 _DXGK_PAGE_FAULT_FATAL_HARDWARE_ERROR = 0x10, 327 _DXGK_PAGE_FAULT_IOMMU = 0x20, 328 _DXGK_PAGE_FAULT_HW_CONTEXT_VALID = 0x40, 329 _DXGK_PAGE_FAULT_PROCESS_HANDLE_VALID = 0x80, 330 }; 331 332 enum dxgk_general_error_code { 333 _DXGK_GENERAL_ERROR_PAGE_FAULT = 0, 334 _DXGK_GENERAL_ERROR_INVALID_INSTRUCTION = 1, 335 }; 336 337 struct dxgk_fault_error_code { 338 union { 339 struct { 340 __u32 is_device_specific_code:1; 341 enum dxgk_general_error_code general_error_code:31; 342 }; 343 struct { 344 __u32 is_device_specific_code_reserved_bit:1; 345 __u32 device_specific_code:31; 346 }; 347 }; 348 }; 349 350 struct d3dkmt_devicereset_state { 351 union { 352 struct { 353 __u32 desktop_switched:1; 354 __u32 reserved:31; 355 }; 356 __u32 value; 357 }; 358 }; 359 360 struct d3dkmt_devicepagefault_state { 361 __u64 faulted_primitive_api_sequence_number; 362 enum dxgk_render_pipeline_stage faulted_pipeline_stage; 363 __u32 faulted_bind_table_entry; 364 enum dxgk_page_fault_flags page_fault_flags; 365 struct dxgk_fault_error_code fault_error_code; 366 __u64 faulted_virtual_address; 367 }; 368 369 enum d3dkmt_deviceexecution_state { 370 _D3DKMT_DEVICEEXECUTION_ACTIVE = 1, 371 _D3DKMT_DEVICEEXECUTION_RESET = 2, 372 _D3DKMT_DEVICEEXECUTION_HUNG = 3, 373 _D3DKMT_DEVICEEXECUTION_STOPPED = 4, 374 _D3DKMT_DEVICEEXECUTION_ERROR_OUTOFMEMORY = 5, 375 _D3DKMT_DEVICEEXECUTION_ERROR_DMAFAULT = 6, 376 _D3DKMT_DEVICEEXECUTION_ERROR_DMAPAGEFAULT = 7, 377 }; 378 379 enum d3dkmt_devicestate_type { 380 _D3DKMT_DEVICESTATE_EXECUTION = 1, 381 _D3DKMT_DEVICESTATE_PRESENT = 2, 382 _D3DKMT_DEVICESTATE_RESET = 3, 383 _D3DKMT_DEVICESTATE_PRESENT_DWM = 4, 384 _D3DKMT_DEVICESTATE_PAGE_FAULT = 5, 385 _D3DKMT_DEVICESTATE_PRESENT_QUEUE = 6, 386 }; 387 388 struct d3dkmt_getdevicestate { 389 struct d3dkmthandle device; 390 enum d3dkmt_devicestate_type state_type; 391 union { 392 enum d3dkmt_deviceexecution_state execution_state; 393 struct d3dkmt_devicereset_state reset_state; 394 struct d3dkmt_devicepagefault_state page_fault_state; 395 char alignment[48]; 396 }; 397 }; 398 399 enum d3dkmdt_gdisurfacetype { 400 _D3DKMDT_GDISURFACE_INVALID = 0, 401 _D3DKMDT_GDISURFACE_TEXTURE = 1, 402 _D3DKMDT_GDISURFACE_STAGING_CPUVISIBLE = 2, 403 _D3DKMDT_GDISURFACE_STAGING = 3, 404 _D3DKMDT_GDISURFACE_LOOKUPTABLE = 4, 405 _D3DKMDT_GDISURFACE_EXISTINGSYSMEM = 5, 406 _D3DKMDT_GDISURFACE_TEXTURE_CPUVISIBLE = 6, 407 _D3DKMDT_GDISURFACE_TEXTURE_CROSSADAPTER = 7, 408 _D3DKMDT_GDISURFACE_TEXTURE_CPUVISIBLE_CROSSADAPTER = 8, 409 }; 410 411 struct d3dddi_rational { 412 __u32 numerator; 413 __u32 denominator; 414 }; 415 416 enum d3dddiformat { 417 _D3DDDIFMT_UNKNOWN = 0, 418 }; 419 420 struct d3dkmdt_gdisurfacedata { 421 __u32 width; 422 __u32 height; 423 __u32 format; 424 enum d3dkmdt_gdisurfacetype type; 425 __u32 flags; 426 __u32 pitch; 427 }; 428 429 struct d3dkmdt_stagingsurfacedata { 430 __u32 width; 431 __u32 height; 432 __u32 pitch; 433 }; 434 435 struct d3dkmdt_sharedprimarysurfacedata { 436 __u32 width; 437 __u32 height; 438 enum d3dddiformat format; 439 struct d3dddi_rational refresh_rate; 440 __u32 vidpn_source_id; 441 }; 442 443 struct d3dkmdt_shadowsurfacedata { 444 __u32 width; 445 __u32 height; 446 enum d3dddiformat format; 447 __u32 pitch; 448 }; 449 450 enum d3dkmdt_standardallocationtype { 451 _D3DKMDT_STANDARDALLOCATION_SHAREDPRIMARYSURFACE = 1, 452 _D3DKMDT_STANDARDALLOCATION_SHADOWSURFACE = 2, 453 _D3DKMDT_STANDARDALLOCATION_STAGINGSURFACE = 3, 454 _D3DKMDT_STANDARDALLOCATION_GDISURFACE = 4, 455 }; 456 457 struct d3dddi_synchronizationobject_flags { 458 union { 459 struct { 460 __u32 shared:1; 461 __u32 nt_security_sharing:1; 462 __u32 cross_adapter:1; 463 __u32 top_of_pipeline:1; 464 __u32 no_signal:1; 465 __u32 no_wait:1; 466 __u32 no_signal_max_value_on_tdr:1; 467 __u32 no_gpu_access:1; 468 __u32 reserved:23; 469 }; 470 __u32 value; 471 }; 472 }; 473 474 enum d3dddi_synchronizationobject_type { 475 _D3DDDI_SYNCHRONIZATION_MUTEX = 1, 476 _D3DDDI_SEMAPHORE = 2, 477 _D3DDDI_FENCE = 3, 478 _D3DDDI_CPU_NOTIFICATION = 4, 479 _D3DDDI_MONITORED_FENCE = 5, 480 _D3DDDI_PERIODIC_MONITORED_FENCE = 6, 481 _D3DDDI_SYNCHRONIZATION_TYPE_LIMIT 482 }; 483 484 struct d3dddi_synchronizationobjectinfo2 { 485 enum d3dddi_synchronizationobject_type type; 486 struct d3dddi_synchronizationobject_flags flags; 487 union { 488 struct { 489 __u32 initial_state; 490 } synchronization_mutex; 491 492 struct { 493 __u32 max_count; 494 __u32 initial_count; 495 } semaphore; 496 497 struct { 498 __u64 fence_value; 499 } fence; 500 501 struct { 502 __u64 event; 503 } cpu_notification; 504 505 struct { 506 __u64 initial_fence_value; 507 #ifdef __KERNEL__ 508 void *fence_cpu_virtual_address; 509 #else 510 __u64 *fence_cpu_virtual_address; 511 #endif 512 __u64 fence_gpu_virtual_address; 513 __u32 engine_affinity; 514 } monitored_fence; 515 516 struct { 517 struct d3dkmthandle adapter; 518 __u32 vidpn_target_id; 519 __u64 time; 520 #ifdef __KERNEL__ 521 void *fence_cpu_virtual_address; 522 #else 523 __u64 fence_cpu_virtual_address; 524 #endif 525 __u64 fence_gpu_virtual_address; 526 __u32 engine_affinity; 527 } periodic_monitored_fence; 528 529 struct { 530 __u64 reserved[8]; 531 } reserved; 532 }; 533 struct d3dkmthandle shared_handle; 534 }; 535 536 struct d3dkmt_createsynchronizationobject2 { 537 struct d3dkmthandle device; 538 __u32 reserved; 539 struct d3dddi_synchronizationobjectinfo2 info; 540 struct d3dkmthandle sync_object; 541 __u32 reserved1; 542 }; 543 544 struct d3dkmt_waitforsynchronizationobject2 { 545 struct d3dkmthandle context; 546 __u32 object_count; 547 struct d3dkmthandle object_array[D3DDDI_MAX_OBJECT_WAITED_ON]; 548 union { 549 struct { 550 __u64 fence_value; 551 } fence; 552 __u64 reserved[8]; 553 }; 554 }; 555 556 struct d3dddicb_signalflags { 557 union { 558 struct { 559 __u32 signal_at_submission:1; 560 __u32 enqueue_cpu_event:1; 561 __u32 allow_fence_rewind:1; 562 __u32 reserved:28; 563 __u32 DXGK_SIGNAL_FLAG_INTERNAL0:1; 564 }; 565 __u32 value; 566 }; 567 }; 568 569 struct d3dkmt_signalsynchronizationobject2 { 570 struct d3dkmthandle context; 571 __u32 object_count; 572 struct d3dkmthandle object_array[D3DDDI_MAX_OBJECT_SIGNALED]; 573 struct d3dddicb_signalflags flags; 574 __u32 context_count; 575 struct d3dkmthandle contexts[D3DDDI_MAX_BROADCAST_CONTEXT]; 576 union { 577 struct { 578 __u64 fence_value; 579 } fence; 580 __u64 cpu_event_handle; 581 __u64 reserved[8]; 582 }; 583 }; 584 585 struct d3dddi_waitforsynchronizationobjectfromcpu_flags { 586 union { 587 struct { 588 __u32 wait_any:1; 589 __u32 reserved:31; 590 }; 591 __u32 value; 592 }; 593 }; 594 595 struct d3dkmt_waitforsynchronizationobjectfromcpu { 596 struct d3dkmthandle device; 597 __u32 object_count; 598 #ifdef __KERNEL__ 599 struct d3dkmthandle *objects; 600 __u64 *fence_values; 601 #else 602 __u64 objects; 603 __u64 fence_values; 604 #endif 605 __u64 async_event; 606 struct d3dddi_waitforsynchronizationobjectfromcpu_flags flags; 607 }; 608 609 struct d3dkmt_signalsynchronizationobjectfromcpu { 610 struct d3dkmthandle device; 611 __u32 object_count; 612 #ifdef __KERNEL__ 613 struct d3dkmthandle *objects; 614 __u64 *fence_values; 615 #else 616 __u64 objects; 617 __u64 fence_values; 618 #endif 619 struct d3dddicb_signalflags flags; 620 }; 621 622 struct d3dkmt_waitforsynchronizationobjectfromgpu { 623 struct d3dkmthandle context; 624 __u32 object_count; 625 #ifdef __KERNEL__ 626 struct d3dkmthandle *objects; 627 #else 628 __u64 objects; 629 #endif 630 union { 631 #ifdef __KERNEL__ 632 __u64 *monitored_fence_values; 633 #else 634 __u64 monitored_fence_values; 635 #endif 636 __u64 fence_value; 637 __u64 reserved[8]; 638 }; 639 }; 640 641 struct d3dkmt_signalsynchronizationobjectfromgpu { 642 struct d3dkmthandle context; 643 __u32 object_count; 644 #ifdef __KERNEL__ 645 struct d3dkmthandle *objects; 646 #else 647 __u64 objects; 648 #endif 649 union { 650 #ifdef __KERNEL__ 651 __u64 *monitored_fence_values; 652 #else 653 __u64 monitored_fence_values; 654 #endif 655 __u64 reserved[8]; 656 }; 657 }; 658 659 struct d3dkmt_signalsynchronizationobjectfromgpu2 { 660 __u32 object_count; 661 __u32 reserved1; 662 #ifdef __KERNEL__ 663 struct d3dkmthandle *objects; 664 #else 665 __u64 objects; 666 #endif 667 struct d3dddicb_signalflags flags; 668 __u32 context_count; 669 #ifdef __KERNEL__ 670 struct d3dkmthandle *contexts; 671 #else 672 __u64 contexts; 673 #endif 674 union { 675 __u64 fence_value; 676 __u64 cpu_event_handle; 677 #ifdef __KERNEL__ 678 __u64 *monitored_fence_values; 679 #else 680 __u64 monitored_fence_values; 681 #endif 682 __u64 reserved[8]; 683 }; 684 }; 685 686 struct d3dkmt_destroysynchronizationobject { 687 struct d3dkmthandle sync_object; 688 }; 689 690 struct d3dkmt_submitcommandflags { 691 __u32 null_rendering:1; 692 __u32 present_redirected:1; 693 __u32 reserved:30; 694 }; 695 696 struct d3dkmt_submitcommand { 697 __u64 command_buffer; 698 __u32 command_length; 699 struct d3dkmt_submitcommandflags flags; 700 __u64 present_history_token; 701 __u32 broadcast_context_count; 702 struct d3dkmthandle broadcast_context[D3DDDI_MAX_BROADCAST_CONTEXT]; 703 __u32 reserved; 704 #ifdef __KERNEL__ 705 void *priv_drv_data; 706 #else 707 __u64 priv_drv_data; 708 #endif 709 __u32 priv_drv_data_size; 710 __u32 num_primaries; 711 struct d3dkmthandle written_primaries[D3DDDI_MAX_WRITTEN_PRIMARIES]; 712 __u32 num_history_buffers; 713 __u32 reserved1; 714 #ifdef __KERNEL__ 715 struct d3dkmthandle *history_buffer_array; 716 #else 717 __u64 history_buffer_array; 718 #endif 719 }; 720 721 struct d3dkmt_submitcommandtohwqueue { 722 struct d3dkmthandle hwqueue; 723 __u32 reserved; 724 __u64 hwqueue_progress_fence_id; 725 __u64 command_buffer; 726 __u32 command_length; 727 __u32 priv_drv_data_size; 728 #ifdef __KERNEL__ 729 void *priv_drv_data; 730 #else 731 __u64 priv_drv_data; 732 #endif 733 __u32 num_primaries; 734 __u32 reserved1; 735 #ifdef __KERNEL__ 736 struct d3dkmthandle *written_primaries; 737 #else 738 __u64 written_primaries; 739 #endif 740 }; 741 742 struct d3dkmt_setcontextschedulingpriority { 743 struct d3dkmthandle context; 744 int priority; 745 }; 746 747 struct d3dkmt_setcontextinprocessschedulingpriority { 748 struct d3dkmthandle context; 749 int priority; 750 }; 751 752 struct d3dkmt_getcontextschedulingpriority { 753 struct d3dkmthandle context; 754 int priority; 755 }; 756 757 struct d3dkmt_getcontextinprocessschedulingpriority { 758 struct d3dkmthandle context; 759 int priority; 760 }; 761 762 struct d3dkmt_setallocationpriority { 763 struct d3dkmthandle device; 764 struct d3dkmthandle resource; 765 #ifdef __KERNEL__ 766 const struct d3dkmthandle *allocation_list; 767 #else 768 __u64 allocation_list; 769 #endif 770 __u32 allocation_count; 771 __u32 reserved; 772 #ifdef __KERNEL__ 773 const __u32 *priorities; 774 #else 775 __u64 priorities; 776 #endif 777 }; 778 779 struct d3dkmt_getallocationpriority { 780 struct d3dkmthandle device; 781 struct d3dkmthandle resource; 782 #ifdef __KERNEL__ 783 const struct d3dkmthandle *allocation_list; 784 #else 785 __u64 allocation_list; 786 #endif 787 __u32 allocation_count; 788 __u32 reserved; 789 #ifdef __KERNEL__ 790 __u32 *priorities; 791 #else 792 __u64 priorities; 793 #endif 794 }; 795 796 enum d3dkmt_allocationresidencystatus { 797 _D3DKMT_ALLOCATIONRESIDENCYSTATUS_RESIDENTINGPUMEMORY = 1, 798 _D3DKMT_ALLOCATIONRESIDENCYSTATUS_RESIDENTINSHAREDMEMORY = 2, 799 _D3DKMT_ALLOCATIONRESIDENCYSTATUS_NOTRESIDENT = 3, 800 }; 801 802 struct d3dkmt_queryallocationresidency { 803 struct d3dkmthandle device; 804 struct d3dkmthandle resource; 805 #ifdef __KERNEL__ 806 struct d3dkmthandle *allocations; 807 #else 808 __u64 allocations; 809 #endif 810 __u32 allocation_count; 811 __u32 reserved; 812 #ifdef __KERNEL__ 813 enum d3dkmt_allocationresidencystatus *residency_status; 814 #else 815 __u64 residency_status; 816 #endif 817 }; 818 819 struct d3dddicb_lock2flags { 820 union { 821 struct { 822 __u32 reserved:32; 823 }; 824 __u32 value; 825 }; 826 }; 827 828 struct d3dkmt_lock2 { 829 struct d3dkmthandle device; 830 struct d3dkmthandle allocation; 831 struct d3dddicb_lock2flags flags; 832 __u32 reserved; 833 #ifdef __KERNEL__ 834 void *data; 835 #else 836 __u64 data; 837 #endif 838 }; 839 840 struct d3dkmt_unlock2 { 841 struct d3dkmthandle device; 842 struct d3dkmthandle allocation; 843 }; 844 845 enum d3dkmt_device_error_reason { 846 _D3DKMT_DEVICE_ERROR_REASON_GENERIC = 0x80000000, 847 _D3DKMT_DEVICE_ERROR_REASON_DRIVER_ERROR = 0x80000006, 848 }; 849 850 struct d3dkmt_markdeviceaserror { 851 struct d3dkmthandle device; 852 enum d3dkmt_device_error_reason reason; 853 }; 854 855 enum d3dkmt_standardallocationtype { 856 _D3DKMT_STANDARDALLOCATIONTYPE_EXISTINGHEAP = 1, 857 _D3DKMT_STANDARDALLOCATIONTYPE_CROSSADAPTER = 2, 858 }; 859 860 struct d3dkmt_standardallocation_existingheap { 861 __u64 size; 862 }; 863 864 struct d3dkmt_createstandardallocationflags { 865 union { 866 struct { 867 __u32 reserved:32; 868 }; 869 __u32 value; 870 }; 871 }; 872 873 struct d3dkmt_createstandardallocation { 874 enum d3dkmt_standardallocationtype type; 875 __u32 reserved; 876 struct d3dkmt_standardallocation_existingheap existing_heap_data; 877 struct d3dkmt_createstandardallocationflags flags; 878 __u32 reserved1; 879 }; 880 881 struct d3dddi_allocationinfo2 { 882 struct d3dkmthandle allocation; 883 #ifdef __KERNEL__ 884 const void *sysmem; 885 #else 886 __u64 sysmem; 887 #endif 888 #ifdef __KERNEL__ 889 void *priv_drv_data; 890 #else 891 __u64 priv_drv_data; 892 #endif 893 __u32 priv_drv_data_size; 894 __u32 vidpn_source_id; 895 union { 896 struct { 897 __u32 primary:1; 898 __u32 stereo:1; 899 __u32 override_priority:1; 900 __u32 reserved:29; 901 }; 902 __u32 value; 903 } flags; 904 __u64 gpu_virtual_address; 905 union { 906 __u32 priority; 907 __u64 unused; 908 }; 909 __u64 reserved[5]; 910 }; 911 912 struct d3dkmt_createallocationflags { 913 union { 914 struct { 915 __u32 create_resource:1; 916 __u32 create_shared:1; 917 __u32 non_secure:1; 918 __u32 create_protected:1; 919 __u32 restrict_shared_access:1; 920 __u32 existing_sysmem:1; 921 __u32 nt_security_sharing:1; 922 __u32 read_only:1; 923 __u32 create_write_combined:1; 924 __u32 create_cached:1; 925 __u32 swap_chain_back_buffer:1; 926 __u32 cross_adapter:1; 927 __u32 open_cross_adapter:1; 928 __u32 partial_shared_creation:1; 929 __u32 zeroed:1; 930 __u32 write_watch:1; 931 __u32 standard_allocation:1; 932 __u32 existing_section:1; 933 __u32 reserved:14; 934 }; 935 __u32 value; 936 }; 937 }; 938 939 struct d3dkmt_createallocation { 940 struct d3dkmthandle device; 941 struct d3dkmthandle resource; 942 struct d3dkmthandle global_share; 943 __u32 reserved; 944 #ifdef __KERNEL__ 945 const void *private_runtime_data; 946 #else 947 __u64 private_runtime_data; 948 #endif 949 __u32 private_runtime_data_size; 950 __u32 reserved1; 951 union { 952 #ifdef __KERNEL__ 953 struct d3dkmt_createstandardallocation *standard_allocation; 954 const void *priv_drv_data; 955 #else 956 __u64 standard_allocation; 957 __u64 priv_drv_data; 958 #endif 959 }; 960 __u32 priv_drv_data_size; 961 __u32 alloc_count; 962 #ifdef __KERNEL__ 963 struct d3dddi_allocationinfo2 *allocation_info; 964 #else 965 __u64 allocation_info; 966 #endif 967 struct d3dkmt_createallocationflags flags; 968 __u32 reserved2; 969 __u64 private_runtime_resource_handle; 970 }; 971 972 struct d3dddicb_destroyallocation2flags { 973 union { 974 struct { 975 __u32 assume_not_in_use:1; 976 __u32 synchronous_destroy:1; 977 __u32 reserved:29; 978 __u32 system_use_only:1; 979 }; 980 __u32 value; 981 }; 982 }; 983 984 struct d3dkmt_destroyallocation2 { 985 struct d3dkmthandle device; 986 struct d3dkmthandle resource; 987 #ifdef __KERNEL__ 988 const struct d3dkmthandle *allocations; 989 #else 990 __u64 allocations; 991 #endif 992 __u32 alloc_count; 993 struct d3dddicb_destroyallocation2flags flags; 994 }; 995 996 struct d3dddi_makeresident_flags { 997 union { 998 struct { 999 __u32 cant_trim_further:1; 1000 __u32 must_succeed:1; 1001 __u32 reserved:30; 1002 }; 1003 __u32 value; 1004 }; 1005 }; 1006 1007 struct d3dddi_makeresident { 1008 struct d3dkmthandle paging_queue; 1009 __u32 alloc_count; 1010 #ifdef __KERNEL__ 1011 const struct d3dkmthandle *allocation_list; 1012 const __u32 *priority_list; 1013 #else 1014 __u64 allocation_list; 1015 __u64 priority_list; 1016 #endif 1017 struct d3dddi_makeresident_flags flags; 1018 __u64 paging_fence_value; 1019 __u64 num_bytes_to_trim; 1020 }; 1021 1022 struct d3dddi_evict_flags { 1023 union { 1024 struct { 1025 __u32 evict_only_if_necessary:1; 1026 __u32 not_written_to:1; 1027 __u32 reserved:30; 1028 }; 1029 __u32 value; 1030 }; 1031 }; 1032 1033 struct d3dkmt_evict { 1034 struct d3dkmthandle device; 1035 __u32 alloc_count; 1036 #ifdef __KERNEL__ 1037 const struct d3dkmthandle *allocations; 1038 #else 1039 __u64 allocations; 1040 #endif 1041 struct d3dddi_evict_flags flags; 1042 __u32 reserved; 1043 __u64 num_bytes_to_trim; 1044 }; 1045 1046 struct d3dddigpuva_protection_type { 1047 union { 1048 struct { 1049 __u64 write:1; 1050 __u64 execute:1; 1051 __u64 zero:1; 1052 __u64 no_access:1; 1053 __u64 system_use_only:1; 1054 __u64 reserved:59; 1055 }; 1056 __u64 value; 1057 }; 1058 }; 1059 1060 enum d3dddi_updategpuvirtualaddress_operation_type { 1061 _D3DDDI_UPDATEGPUVIRTUALADDRESS_MAP = 0, 1062 _D3DDDI_UPDATEGPUVIRTUALADDRESS_UNMAP = 1, 1063 _D3DDDI_UPDATEGPUVIRTUALADDRESS_COPY = 2, 1064 _D3DDDI_UPDATEGPUVIRTUALADDRESS_MAP_PROTECT = 3, 1065 }; 1066 1067 struct d3dddi_updategpuvirtualaddress_operation { 1068 enum d3dddi_updategpuvirtualaddress_operation_type operation; 1069 union { 1070 struct { 1071 __u64 base_address; 1072 __u64 size; 1073 struct d3dkmthandle allocation; 1074 __u64 allocation_offset; 1075 __u64 allocation_size; 1076 } map; 1077 struct { 1078 __u64 base_address; 1079 __u64 size; 1080 struct d3dkmthandle allocation; 1081 __u64 allocation_offset; 1082 __u64 allocation_size; 1083 struct d3dddigpuva_protection_type protection; 1084 __u64 driver_protection; 1085 } map_protect; 1086 struct { 1087 __u64 base_address; 1088 __u64 size; 1089 struct d3dddigpuva_protection_type protection; 1090 } unmap; 1091 struct { 1092 __u64 source_address; 1093 __u64 size; 1094 __u64 dest_address; 1095 } copy; 1096 }; 1097 }; 1098 1099 enum d3dddigpuva_reservation_type { 1100 _D3DDDIGPUVA_RESERVE_NO_ACCESS = 0, 1101 _D3DDDIGPUVA_RESERVE_ZERO = 1, 1102 _D3DDDIGPUVA_RESERVE_NO_COMMIT = 2 1103 }; 1104 1105 struct d3dkmt_updategpuvirtualaddress { 1106 struct d3dkmthandle device; 1107 struct d3dkmthandle context; 1108 struct d3dkmthandle fence_object; 1109 __u32 num_operations; 1110 #ifdef __KERNEL__ 1111 struct d3dddi_updategpuvirtualaddress_operation *operations; 1112 #else 1113 __u64 operations; 1114 #endif 1115 __u32 reserved0; 1116 __u32 reserved1; 1117 __u64 reserved2; 1118 __u64 fence_value; 1119 union { 1120 struct { 1121 __u32 do_not_wait:1; 1122 __u32 reserved:31; 1123 }; 1124 __u32 value; 1125 } flags; 1126 __u32 reserved3; 1127 }; 1128 1129 struct d3dddi_mapgpuvirtualaddress { 1130 struct d3dkmthandle paging_queue; 1131 __u64 base_address; 1132 __u64 minimum_address; 1133 __u64 maximum_address; 1134 struct d3dkmthandle allocation; 1135 __u64 offset_in_pages; 1136 __u64 size_in_pages; 1137 struct d3dddigpuva_protection_type protection; 1138 __u64 driver_protection; 1139 __u32 reserved0; 1140 __u64 reserved1; 1141 __u64 virtual_address; 1142 __u64 paging_fence_value; 1143 }; 1144 1145 struct d3dddi_reservegpuvirtualaddress { 1146 struct d3dkmthandle adapter; 1147 __u64 base_address; 1148 __u64 minimum_address; 1149 __u64 maximum_address; 1150 __u64 size; 1151 enum d3dddigpuva_reservation_type reservation_type; 1152 __u64 driver_protection; 1153 __u64 virtual_address; 1154 __u64 paging_fence_value; 1155 }; 1156 1157 struct d3dkmt_freegpuvirtualaddress { 1158 struct d3dkmthandle adapter; 1159 __u32 reserved; 1160 __u64 base_address; 1161 __u64 size; 1162 }; 1163 1164 enum d3dkmt_memory_segment_group { 1165 _D3DKMT_MEMORY_SEGMENT_GROUP_LOCAL = 0, 1166 _D3DKMT_MEMORY_SEGMENT_GROUP_NON_LOCAL = 1 1167 }; 1168 1169 struct d3dkmt_queryvideomemoryinfo { 1170 __u64 process; 1171 struct d3dkmthandle adapter; 1172 enum d3dkmt_memory_segment_group memory_segment_group; 1173 __u64 budget; 1174 __u64 current_usage; 1175 __u64 current_reservation; 1176 __u64 available_for_reservation; 1177 __u32 physical_adapter_index; 1178 }; 1179 1180 struct d3dkmt_adaptertype { 1181 union { 1182 struct { 1183 __u32 render_supported:1; 1184 __u32 display_supported:1; 1185 __u32 software_device:1; 1186 __u32 post_device:1; 1187 __u32 hybrid_discrete:1; 1188 __u32 hybrid_integrated:1; 1189 __u32 indirect_display_device:1; 1190 __u32 paravirtualized:1; 1191 __u32 acg_supported:1; 1192 __u32 support_set_timings_from_vidpn:1; 1193 __u32 detachable:1; 1194 __u32 compute_only:1; 1195 __u32 prototype:1; 1196 __u32 reserved:19; 1197 }; 1198 __u32 value; 1199 }; 1200 }; 1201 1202 enum kmtqueryadapterinfotype { 1203 _KMTQAITYPE_UMDRIVERPRIVATE = 0, 1204 _KMTQAITYPE_ADAPTERTYPE = 15, 1205 _KMTQAITYPE_ADAPTERTYPE_RENDER = 57 1206 }; 1207 1208 struct d3dkmt_queryadapterinfo { 1209 struct d3dkmthandle adapter; 1210 enum kmtqueryadapterinfotype type; 1211 #ifdef __KERNEL__ 1212 void *private_data; 1213 #else 1214 __u64 private_data; 1215 #endif 1216 __u32 private_data_size; 1217 }; 1218 1219 #pragma pack(push, 1) 1220 1221 struct dxgk_gpuclockdata_flags { 1222 union { 1223 struct { 1224 __u32 context_management_processor:1; 1225 __u32 reserved:31; 1226 }; 1227 __u32 value; 1228 }; 1229 }; 1230 1231 struct dxgk_gpuclockdata { 1232 __u64 gpu_frequency; 1233 __u64 gpu_clock_counter; 1234 __u64 cpu_clock_counter; 1235 struct dxgk_gpuclockdata_flags flags; 1236 } __packed; 1237 1238 struct d3dkmt_queryclockcalibration { 1239 struct d3dkmthandle adapter; 1240 __u32 node_ordinal; 1241 __u32 physical_adapter_index; 1242 struct dxgk_gpuclockdata clock_data; 1243 }; 1244 1245 #pragma pack(pop) 1246 1247 struct d3dkmt_flushheaptransitions { 1248 struct d3dkmthandle adapter; 1249 }; 1250 1251 struct d3dddi_openallocationinfo2 { 1252 struct d3dkmthandle allocation; 1253 #ifdef __KERNEL__ 1254 void *priv_drv_data; 1255 #else 1256 __u64 priv_drv_data; 1257 #endif 1258 __u32 priv_drv_data_size; 1259 __u64 gpu_va; 1260 __u64 reserved[6]; 1261 }; 1262 1263 struct d3dddi_updateallocproperty_flags { 1264 union { 1265 struct { 1266 __u32 accessed_physically:1; 1267 __u32 reserved:31; 1268 }; 1269 __u32 value; 1270 }; 1271 }; 1272 1273 struct d3dddi_segmentpreference { 1274 union { 1275 struct { 1276 __u32 segment_id0:5; 1277 __u32 direction0:1; 1278 __u32 segment_id1:5; 1279 __u32 direction1:1; 1280 __u32 segment_id2:5; 1281 __u32 direction2:1; 1282 __u32 segment_id3:5; 1283 __u32 direction3:1; 1284 __u32 segment_id4:5; 1285 __u32 direction4:1; 1286 __u32 reserved:2; 1287 }; 1288 __u32 value; 1289 }; 1290 }; 1291 1292 struct d3dddi_updateallocproperty { 1293 struct d3dkmthandle paging_queue; 1294 struct d3dkmthandle allocation; 1295 __u32 supported_segment_set; 1296 struct d3dddi_segmentpreference preferred_segment; 1297 struct d3dddi_updateallocproperty_flags flags; 1298 __u64 paging_fence_value; 1299 union { 1300 struct { 1301 __u32 set_accessed_physically:1; 1302 __u32 set_supported_segmentSet:1; 1303 __u32 set_preferred_segment:1; 1304 __u32 reserved:29; 1305 }; 1306 __u32 property_mask_value; 1307 }; 1308 }; 1309 1310 enum d3dkmt_offer_priority { 1311 _D3DKMT_OFFER_PRIORITY_LOW = 1, 1312 _D3DKMT_OFFER_PRIORITY_NORMAL = 2, 1313 _D3DKMT_OFFER_PRIORITY_HIGH = 3, 1314 _D3DKMT_OFFER_PRIORITY_AUTO = 4, 1315 }; 1316 1317 struct d3dkmt_offer_flags { 1318 union { 1319 struct { 1320 __u32 offer_immediately:1; 1321 __u32 allow_decommit:1; 1322 __u32 reserved:30; 1323 }; 1324 __u32 value; 1325 }; 1326 }; 1327 1328 struct d3dkmt_offerallocations { 1329 struct d3dkmthandle device; 1330 __u32 reserved; 1331 #ifdef __KERNEL__ 1332 struct d3dkmthandle *resources; 1333 const struct d3dkmthandle *allocations; 1334 #else 1335 __u64 resources; 1336 __u64 allocations; 1337 #endif 1338 __u32 allocation_count; 1339 enum d3dkmt_offer_priority priority; 1340 struct d3dkmt_offer_flags flags; 1341 __u32 reserved1; 1342 }; 1343 1344 enum d3dddi_reclaim_result { 1345 _D3DDDI_RECLAIM_RESULT_OK = 0, 1346 _D3DDDI_RECLAIM_RESULT_DISCARDED = 1, 1347 _D3DDDI_RECLAIM_RESULT_NOT_COMMITTED = 2, 1348 }; 1349 1350 struct d3dkmt_reclaimallocations2 { 1351 struct d3dkmthandle paging_queue; 1352 __u32 allocation_count; 1353 #ifdef __KERNEL__ 1354 struct d3dkmthandle *resources; 1355 struct d3dkmthandle *allocations; 1356 #else 1357 __u64 resources; 1358 __u64 allocations; 1359 #endif 1360 union { 1361 #ifdef __KERNEL__ 1362 __u32 *discarded; 1363 enum d3dddi_reclaim_result *results; 1364 #else 1365 __u64 discarded; 1366 __u64 results; 1367 #endif 1368 }; 1369 __u64 paging_fence_value; 1370 }; 1371 1372 struct d3dkmt_changevideomemoryreservation { 1373 __u64 process; 1374 struct d3dkmthandle adapter; 1375 enum d3dkmt_memory_segment_group memory_segment_group; 1376 __u64 reservation; 1377 __u32 physical_adapter_index; 1378 }; 1379 1380 struct d3dkmt_createhwqueue { 1381 struct d3dkmthandle context; 1382 struct d3dddi_createhwqueueflags flags; 1383 __u32 priv_drv_data_size; 1384 __u32 reserved; 1385 #ifdef __KERNEL__ 1386 void *priv_drv_data; 1387 #else 1388 __u64 priv_drv_data; 1389 #endif 1390 struct d3dkmthandle queue; 1391 struct d3dkmthandle queue_progress_fence; 1392 #ifdef __KERNEL__ 1393 void *queue_progress_fence_cpu_va; 1394 #else 1395 __u64 queue_progress_fence_cpu_va; 1396 #endif 1397 __u64 queue_progress_fence_gpu_va; 1398 }; 1399 1400 struct d3dkmt_destroyhwqueue { 1401 struct d3dkmthandle queue; 1402 }; 1403 1404 struct d3dkmt_submitwaitforsyncobjectstohwqueue { 1405 struct d3dkmthandle hwqueue; 1406 __u32 object_count; 1407 #ifdef __KERNEL__ 1408 struct d3dkmthandle *objects; 1409 __u64 *fence_values; 1410 #else 1411 __u64 objects; 1412 __u64 fence_values; 1413 #endif 1414 }; 1415 1416 struct d3dkmt_submitsignalsyncobjectstohwqueue { 1417 struct d3dddicb_signalflags flags; 1418 __u32 hwqueue_count; 1419 #ifdef __KERNEL__ 1420 struct d3dkmthandle *hwqueues; 1421 #else 1422 __u64 hwqueues; 1423 #endif 1424 __u32 object_count; 1425 __u32 reserved; 1426 #ifdef __KERNEL__ 1427 struct d3dkmthandle *objects; 1428 __u64 *fence_values; 1429 #else 1430 __u64 objects; 1431 __u64 fence_values; 1432 #endif 1433 }; 1434 1435 struct d3dkmt_opensyncobjectfromnthandle2 { 1436 __u64 nt_handle; 1437 struct d3dkmthandle device; 1438 struct d3dddi_synchronizationobject_flags flags; 1439 struct d3dkmthandle sync_object; 1440 __u32 reserved1; 1441 union { 1442 struct { 1443 #ifdef __KERNEL__ 1444 void *fence_value_cpu_va; 1445 #else 1446 __u64 fence_value_cpu_va; 1447 #endif 1448 __u64 fence_value_gpu_va; 1449 __u32 engine_affinity; 1450 } monitored_fence; 1451 __u64 reserved[8]; 1452 }; 1453 }; 1454 1455 struct d3dkmt_openresourcefromnthandle { 1456 struct d3dkmthandle device; 1457 __u32 reserved; 1458 __u64 nt_handle; 1459 __u32 allocation_count; 1460 __u32 reserved1; 1461 #ifdef __KERNEL__ 1462 struct d3dddi_openallocationinfo2 *open_alloc_info; 1463 #else 1464 __u64 open_alloc_info; 1465 #endif 1466 int private_runtime_data_size; 1467 __u32 reserved2; 1468 #ifdef __KERNEL__ 1469 void *private_runtime_data; 1470 #else 1471 __u64 private_runtime_data; 1472 #endif 1473 __u32 resource_priv_drv_data_size; 1474 __u32 reserved3; 1475 #ifdef __KERNEL__ 1476 void *resource_priv_drv_data; 1477 #else 1478 __u64 resource_priv_drv_data; 1479 #endif 1480 __u32 total_priv_drv_data_size; 1481 #ifdef __KERNEL__ 1482 void *total_priv_drv_data; 1483 #else 1484 __u64 total_priv_drv_data; 1485 #endif 1486 struct d3dkmthandle resource; 1487 struct d3dkmthandle keyed_mutex; 1488 #ifdef __KERNEL__ 1489 void *keyed_mutex_private_data; 1490 #else 1491 __u64 keyed_mutex_private_data; 1492 #endif 1493 __u32 keyed_mutex_private_data_size; 1494 struct d3dkmthandle sync_object; 1495 }; 1496 1497 struct d3dkmt_queryresourceinfofromnthandle { 1498 struct d3dkmthandle device; 1499 __u32 reserved; 1500 __u64 nt_handle; 1501 #ifdef __KERNEL__ 1502 void *private_runtime_data; 1503 #else 1504 __u64 private_runtime_data; 1505 #endif 1506 __u32 private_runtime_data_size; 1507 __u32 total_priv_drv_data_size; 1508 __u32 resource_priv_drv_data_size; 1509 __u32 allocation_count; 1510 }; 1511 1512 struct d3dkmt_shareobjects { 1513 __u32 object_count; 1514 __u32 reserved; 1515 #ifdef __KERNEL__ 1516 const struct d3dkmthandle *objects; 1517 void *object_attr; /* security attributes */ 1518 #else 1519 __u64 objects; 1520 __u64 object_attr; 1521 #endif 1522 __u32 desired_access; 1523 __u32 reserved1; 1524 #ifdef __KERNEL__ 1525 __u64 *shared_handle; /* output file descriptors */ 1526 #else 1527 __u64 shared_handle; 1528 #endif 1529 }; 1530 1531 union d3dkmt_enumadapters_filter { 1532 struct { 1533 __u64 include_compute_only:1; 1534 __u64 include_display_only:1; 1535 __u64 reserved:62; 1536 }; 1537 __u64 value; 1538 }; 1539 1540 struct d3dkmt_enumadapters3 { 1541 union d3dkmt_enumadapters_filter filter; 1542 __u32 adapter_count; 1543 __u32 reserved; 1544 #ifdef __KERNEL__ 1545 struct d3dkmt_adapterinfo *adapters; 1546 #else 1547 __u64 adapters; 1548 #endif 1549 }; 1550 1551 enum d3dkmt_querystatistics_type { 1552 _D3DKMT_QUERYSTATISTICS_ADAPTER = 0, 1553 _D3DKMT_QUERYSTATISTICS_PROCESS = 1, 1554 _D3DKMT_QUERYSTATISTICS_PROCESS_ADAPTER = 2, 1555 _D3DKMT_QUERYSTATISTICS_SEGMENT = 3, 1556 _D3DKMT_QUERYSTATISTICS_PROCESS_SEGMENT = 4, 1557 _D3DKMT_QUERYSTATISTICS_NODE = 5, 1558 _D3DKMT_QUERYSTATISTICS_PROCESS_NODE = 6, 1559 _D3DKMT_QUERYSTATISTICS_VIDPNSOURCE = 7, 1560 _D3DKMT_QUERYSTATISTICS_PROCESS_VIDPNSOURCE = 8, 1561 _D3DKMT_QUERYSTATISTICS_PROCESS_SEGMENT_GROUP = 9, 1562 _D3DKMT_QUERYSTATISTICS_PHYSICAL_ADAPTER = 10, 1563 }; 1564 1565 struct d3dkmt_querystatistics_result { 1566 char size[0x308]; 1567 }; 1568 1569 struct d3dkmt_querystatistics { 1570 union { 1571 struct { 1572 enum d3dkmt_querystatistics_type type; 1573 struct winluid adapter_luid; 1574 __u64 process; 1575 struct d3dkmt_querystatistics_result result; 1576 }; 1577 char size[0x328]; 1578 }; 1579 }; 1580 1581 struct d3dkmt_shareobjectwithhost { 1582 struct d3dkmthandle device_handle; 1583 struct d3dkmthandle object_handle; 1584 __u64 reserved; 1585 __u64 object_vail_nt_handle; 1586 }; 1587 1588 struct d3dkmt_createsyncfile { 1589 struct d3dkmthandle device; 1590 struct d3dkmthandle monitored_fence; 1591 __u64 fence_value; 1592 __u64 sync_file_handle; /* out */ 1593 }; 1594 1595 struct d3dkmt_waitsyncfile { 1596 __u64 sync_file_handle; 1597 struct d3dkmthandle context; 1598 __u32 reserved; 1599 }; 1600 1601 struct d3dkmt_opensyncobjectfromsyncfile { 1602 __u64 sync_file_handle; 1603 struct d3dkmthandle device; 1604 struct d3dkmthandle syncobj; /* out */ 1605 __u64 fence_value; /* out */ 1606 #ifdef __KERNEL__ 1607 void *fence_value_cpu_va; /* out */ 1608 #else 1609 __u64 fence_value_cpu_va; /* out */ 1610 #endif 1611 __u64 fence_value_gpu_va; /* out */ 1612 }; 1613 1614 struct d3dkmt_enumprocesses { 1615 struct winluid adapter_luid; 1616 #ifdef __KERNEL__ 1617 __u32 *buffer; 1618 #else 1619 __u64 buffer; 1620 #endif 1621 __u64 buffer_count; 1622 }; 1623 1624 enum dxgk_feature_id { 1625 _DXGK_FEATURE_HWSCH = 0, 1626 _DXGK_FEATURE_PAGE_BASED_MEMORY_MANAGER = 32, 1627 _DXGK_FEATURE_KERNEL_MODE_TESTING = 33, 1628 _DXGK_FEATURE_MAX 1629 }; 1630 1631 struct dxgk_isfeatureenabled_result { 1632 __u16 version; 1633 union { 1634 struct { 1635 __u16 enabled : 1; 1636 __u16 known_feature : 1; 1637 __u16 supported_by_driver : 1; 1638 __u16 supported_on_config : 1; 1639 __u16 reserved : 12; 1640 }; 1641 __u16 value; 1642 }; 1643 }; 1644 1645 struct d3dkmt_isfeatureenabled { 1646 struct d3dkmthandle adapter; 1647 enum dxgk_feature_id feature_id; 1648 struct dxgk_isfeatureenabled_result result; 1649 }; 1650 1651 struct d3dkmt_invalidatecache { 1652 struct d3dkmthandle device; 1653 struct d3dkmthandle allocation; 1654 __u64 offset; 1655 __u64 length; 1656 }; 1657 1658 /* 1659 * Dxgkrnl Graphics Port Driver ioctl definitions 1660 * 1661 */ 1662 1663 #define LX_DXOPENADAPTERFROMLUID \ 1664 _IOWR(0x47, 0x01, struct d3dkmt_openadapterfromluid) 1665 #define LX_DXCREATEDEVICE \ 1666 _IOWR(0x47, 0x02, struct d3dkmt_createdevice) 1667 #define LX_DXCREATECONTEXTVIRTUAL \ 1668 _IOWR(0x47, 0x04, struct d3dkmt_createcontextvirtual) 1669 #define LX_DXDESTROYCONTEXT \ 1670 _IOWR(0x47, 0x05, struct d3dkmt_destroycontext) 1671 #define LX_DXCREATEALLOCATION \ 1672 _IOWR(0x47, 0x06, struct d3dkmt_createallocation) 1673 #define LX_DXCREATEPAGINGQUEUE \ 1674 _IOWR(0x47, 0x07, struct d3dkmt_createpagingqueue) 1675 #define LX_DXRESERVEGPUVIRTUALADDRESS \ 1676 _IOWR(0x47, 0x08, struct d3dddi_reservegpuvirtualaddress) 1677 #define LX_DXQUERYADAPTERINFO \ 1678 _IOWR(0x47, 0x09, struct d3dkmt_queryadapterinfo) 1679 #define LX_DXQUERYVIDEOMEMORYINFO \ 1680 _IOWR(0x47, 0x0a, struct d3dkmt_queryvideomemoryinfo) 1681 #define LX_DXMAKERESIDENT \ 1682 _IOWR(0x47, 0x0b, struct d3dddi_makeresident) 1683 #define LX_DXMAPGPUVIRTUALADDRESS \ 1684 _IOWR(0x47, 0x0c, struct d3dddi_mapgpuvirtualaddress) 1685 #define LX_DXESCAPE \ 1686 _IOWR(0x47, 0x0d, struct d3dkmt_escape) 1687 #define LX_DXGETDEVICESTATE \ 1688 _IOWR(0x47, 0x0e, struct d3dkmt_getdevicestate) 1689 #define LX_DXSUBMITCOMMAND \ 1690 _IOWR(0x47, 0x0f, struct d3dkmt_submitcommand) 1691 #define LX_DXCREATESYNCHRONIZATIONOBJECT \ 1692 _IOWR(0x47, 0x10, struct d3dkmt_createsynchronizationobject2) 1693 #define LX_DXSIGNALSYNCHRONIZATIONOBJECT \ 1694 _IOWR(0x47, 0x11, struct d3dkmt_signalsynchronizationobject2) 1695 #define LX_DXWAITFORSYNCHRONIZATIONOBJECT \ 1696 _IOWR(0x47, 0x12, struct d3dkmt_waitforsynchronizationobject2) 1697 #define LX_DXDESTROYALLOCATION2 \ 1698 _IOWR(0x47, 0x13, struct d3dkmt_destroyallocation2) 1699 #define LX_DXENUMADAPTERS2 \ 1700 _IOWR(0x47, 0x14, struct d3dkmt_enumadapters2) 1701 #define LX_DXCLOSEADAPTER \ 1702 _IOWR(0x47, 0x15, struct d3dkmt_closeadapter) 1703 #define LX_DXCHANGEVIDEOMEMORYRESERVATION \ 1704 _IOWR(0x47, 0x16, struct d3dkmt_changevideomemoryreservation) 1705 #define LX_DXCREATEHWQUEUE \ 1706 _IOWR(0x47, 0x18, struct d3dkmt_createhwqueue) 1707 #define LX_DXDESTROYHWQUEUE \ 1708 _IOWR(0x47, 0x1b, struct d3dkmt_destroyhwqueue) 1709 #define LX_DXDESTROYPAGINGQUEUE \ 1710 _IOWR(0x47, 0x1c, struct d3dddi_destroypagingqueue) 1711 #define LX_DXDESTROYDEVICE \ 1712 _IOWR(0x47, 0x19, struct d3dkmt_destroydevice) 1713 #define LX_DXDESTROYSYNCHRONIZATIONOBJECT \ 1714 _IOWR(0x47, 0x1d, struct d3dkmt_destroysynchronizationobject) 1715 #define LX_DXEVICT \ 1716 _IOWR(0x47, 0x1e, struct d3dkmt_evict) 1717 #define LX_DXFLUSHHEAPTRANSITIONS \ 1718 _IOWR(0x47, 0x1f, struct d3dkmt_flushheaptransitions) 1719 #define LX_DXFREEGPUVIRTUALADDRESS \ 1720 _IOWR(0x47, 0x20, struct d3dkmt_freegpuvirtualaddress) 1721 #define LX_DXGETCONTEXTINPROCESSSCHEDULINGPRIORITY \ 1722 _IOWR(0x47, 0x21, struct d3dkmt_getcontextinprocessschedulingpriority) 1723 #define LX_DXGETCONTEXTSCHEDULINGPRIORITY \ 1724 _IOWR(0x47, 0x22, struct d3dkmt_getcontextschedulingpriority) 1725 #define LX_DXINVALIDATECACHE \ 1726 _IOWR(0x47, 0x24, struct d3dkmt_invalidatecache) 1727 #define LX_DXLOCK2 \ 1728 _IOWR(0x47, 0x25, struct d3dkmt_lock2) 1729 #define LX_DXMARKDEVICEASERROR \ 1730 _IOWR(0x47, 0x26, struct d3dkmt_markdeviceaserror) 1731 #define LX_DXOFFERALLOCATIONS \ 1732 _IOWR(0x47, 0x27, struct d3dkmt_offerallocations) 1733 #define LX_DXQUERYALLOCATIONRESIDENCY \ 1734 _IOWR(0x47, 0x2a, struct d3dkmt_queryallocationresidency) 1735 #define LX_DXRECLAIMALLOCATIONS2 \ 1736 _IOWR(0x47, 0x2c, struct d3dkmt_reclaimallocations2) 1737 #define LX_DXSETALLOCATIONPRIORITY \ 1738 _IOWR(0x47, 0x2e, struct d3dkmt_setallocationpriority) 1739 #define LX_DXSETCONTEXTINPROCESSSCHEDULINGPRIORITY \ 1740 _IOWR(0x47, 0x2f, struct d3dkmt_setcontextinprocessschedulingpriority) 1741 #define LX_DXSETCONTEXTSCHEDULINGPRIORITY \ 1742 _IOWR(0x47, 0x30, struct d3dkmt_setcontextschedulingpriority) 1743 #define LX_DXSIGNALSYNCHRONIZATIONOBJECTFROMCPU \ 1744 _IOWR(0x47, 0x31, struct d3dkmt_signalsynchronizationobjectfromcpu) 1745 #define LX_DXSIGNALSYNCHRONIZATIONOBJECTFROMGPU \ 1746 _IOWR(0x47, 0x32, struct d3dkmt_signalsynchronizationobjectfromgpu) 1747 #define LX_DXSIGNALSYNCHRONIZATIONOBJECTFROMGPU2 \ 1748 _IOWR(0x47, 0x33, struct d3dkmt_signalsynchronizationobjectfromgpu2) 1749 #define LX_DXSUBMITCOMMANDTOHWQUEUE \ 1750 _IOWR(0x47, 0x34, struct d3dkmt_submitcommandtohwqueue) 1751 #define LX_DXSUBMITSIGNALSYNCOBJECTSTOHWQUEUE \ 1752 _IOWR(0x47, 0x35, struct d3dkmt_submitsignalsyncobjectstohwqueue) 1753 #define LX_DXSUBMITWAITFORSYNCOBJECTSTOHWQUEUE \ 1754 _IOWR(0x47, 0x36, struct d3dkmt_submitwaitforsyncobjectstohwqueue) 1755 #define LX_DXUNLOCK2 \ 1756 _IOWR(0x47, 0x37, struct d3dkmt_unlock2) 1757 #define LX_DXUPDATEALLOCPROPERTY \ 1758 _IOWR(0x47, 0x38, struct d3dddi_updateallocproperty) 1759 #define LX_DXUPDATEGPUVIRTUALADDRESS \ 1760 _IOWR(0x47, 0x39, struct d3dkmt_updategpuvirtualaddress) 1761 #define LX_DXWAITFORSYNCHRONIZATIONOBJECTFROMCPU \ 1762 _IOWR(0x47, 0x3a, struct d3dkmt_waitforsynchronizationobjectfromcpu) 1763 #define LX_DXWAITFORSYNCHRONIZATIONOBJECTFROMGPU \ 1764 _IOWR(0x47, 0x3b, struct d3dkmt_waitforsynchronizationobjectfromgpu) 1765 #define LX_DXGETALLOCATIONPRIORITY \ 1766 _IOWR(0x47, 0x3c, struct d3dkmt_getallocationpriority) 1767 #define LX_DXQUERYCLOCKCALIBRATION \ 1768 _IOWR(0x47, 0x3d, struct d3dkmt_queryclockcalibration) 1769 #define LX_DXENUMADAPTERS3 \ 1770 _IOWR(0x47, 0x3e, struct d3dkmt_enumadapters3) 1771 #define LX_DXSHAREOBJECTS \ 1772 _IOWR(0x47, 0x3f, struct d3dkmt_shareobjects) 1773 #define LX_DXOPENSYNCOBJECTFROMNTHANDLE2 \ 1774 _IOWR(0x47, 0x40, struct d3dkmt_opensyncobjectfromnthandle2) 1775 #define LX_DXQUERYRESOURCEINFOFROMNTHANDLE \ 1776 _IOWR(0x47, 0x41, struct d3dkmt_queryresourceinfofromnthandle) 1777 #define LX_DXOPENRESOURCEFROMNTHANDLE \ 1778 _IOWR(0x47, 0x42, struct d3dkmt_openresourcefromnthandle) 1779 #define LX_DXQUERYSTATISTICS \ 1780 _IOWR(0x47, 0x43, struct d3dkmt_querystatistics) 1781 #define LX_DXSHAREOBJECTWITHHOST \ 1782 _IOWR(0x47, 0x44, struct d3dkmt_shareobjectwithhost) 1783 #define LX_DXCREATESYNCFILE \ 1784 _IOWR(0x47, 0x45, struct d3dkmt_createsyncfile) 1785 #define LX_DXWAITSYNCFILE \ 1786 _IOWR(0x47, 0x46, struct d3dkmt_waitsyncfile) 1787 #define LX_DXOPENSYNCOBJECTFROMSYNCFILE \ 1788 _IOWR(0x47, 0x47, struct d3dkmt_opensyncobjectfromsyncfile) 1789 #define LX_DXENUMPROCESSES \ 1790 _IOWR(0x47, 0x48, struct d3dkmt_enumprocesses) 1791 #define LX_ISFEATUREENABLED \ 1792 _IOWR(0x47, 0x49, struct d3dkmt_isfeatureenabled) 1793 1794 #endif /* _D3DKMTHK_H */ 1795