/aosp_15_r20/external/igt-gpu-tools/tests/ |
H A D | Makefile.sources | 103 gem_bad_reloc_SOURCES = i915/gem_bad_reloc.c 106 gem_basic_SOURCES = i915/gem_basic.c 109 gem_busy_SOURCES = i915/gem_busy.c 112 gem_caching_SOURCES = i915/gem_caching.c 115 gem_close_SOURCES = i915/gem_close.c 118 gem_close_race_SOURCES = i915/gem_close_race.c 121 gem_concurrent_blit_SOURCES = i915/gem_concurrent_blit.c 124 gem_cpu_reloc_SOURCES = i915/gem_cpu_reloc.c 127 gem_create_SOURCES = i915/gem_create.c 130 gem_cs_prefetch_SOURCES = i915/gem_cs_prefetch.c [all …]
|
H A D | prime_vgem.c | 33 static void test_read(int vgem, int i915) in test_read() argument 46 handle = prime_fd_to_handle(i915, dmabuf); in test_read() 57 gem_read(i915, handle, 4096*i, &tmp, sizeof(tmp)); in test_read() 60 gem_close(i915, handle); in test_read() 63 static void test_fence_read(int i915, int vgem) in test_fence_read() argument 81 handle = prime_fd_to_handle(i915, dmabuf); in test_fence_read() 89 gem_read(i915, handle, 4096*i, &tmp, sizeof(tmp)); in test_fence_read() 96 gem_read(i915, handle, 4096*i, &tmp, sizeof(tmp)); in test_fence_read() 99 gem_close(i915, handle); in test_fence_read() 120 static void test_fence_mmap(int i915, int vgem) in test_fence_mmap() argument [all …]
|
/aosp_15_r20/external/igt-gpu-tools/tests/i915/ |
H A D | gem_ctx_shared.c | 61 static void create_shared_gtt(int i915, unsigned int flags) in create_shared_gtt() argument 66 .handle = gem_create(i915, 4096), in create_shared_gtt() 74 gem_write(i915, obj.handle, 0, &bbe, sizeof(bbe)); in create_shared_gtt() 75 gem_execbuf(i915, &execbuf); in create_shared_gtt() 76 gem_sync(i915, obj.handle); in create_shared_gtt() 78 child = flags & DETACHED ? gem_context_create(i915) : 0; in create_shared_gtt() 81 child = gem_context_clone(i915, in create_shared_gtt() 85 gem_execbuf(i915, &execbuf); in create_shared_gtt() 88 gem_context_destroy(i915, parent); in create_shared_gtt() 89 gem_execbuf(i915, &execbuf); in create_shared_gtt() [all …]
|
H A D | gem_exec_balancer.c | 28 #include "i915/gem_ring.h" 55 static bool has_class_instance(int i915, uint16_t class, uint16_t instance) in has_class_instance() argument 69 list_engines(int i915, uint32_t class_mask, unsigned int *out) in list_engines() argument 86 if (!has_class_instance(i915, class, instance)) in list_engines() 112 static int __set_engines(int i915, uint32_t ctx, in __set_engines() argument 128 return __gem_context_set_param(i915, &p); in __set_engines() 131 static void set_engines(int i915, uint32_t ctx, in set_engines() argument 135 igt_assert_eq(__set_engines(i915, ctx, ci, count), 0); in set_engines() 138 static int __set_load_balancer(int i915, uint32_t ctx, in __set_load_balancer() argument 168 return __gem_context_set_param(i915, &p); in __set_load_balancer() [all …]
|
H A D | gem_vm_create.c | 26 #include "i915/gem_vm.h" 28 static int vm_create_ioctl(int i915, struct drm_i915_gem_vm_control *ctl) in vm_create_ioctl() argument 31 if (igt_ioctl(i915, DRM_IOCTL_I915_GEM_VM_CREATE, ctl)) { in vm_create_ioctl() 39 static int vm_destroy_ioctl(int i915, struct drm_i915_gem_vm_control *ctl) in vm_destroy_ioctl() argument 42 if (igt_ioctl(i915, DRM_IOCTL_I915_GEM_VM_DESTROY, ctl)) { in vm_destroy_ioctl() 50 static int ctx_create_ioctl(int i915, in ctx_create_ioctl() argument 54 if (igt_ioctl(i915, DRM_IOCTL_I915_GEM_CONTEXT_CREATE_EXT, arg)) { in ctx_create_ioctl() 62 static bool has_vm(int i915) in has_vm() argument 67 err = vm_create_ioctl(i915, &ctl); in has_vm() 74 gem_vm_destroy(i915, ctl.vm_id); in has_vm() [all …]
|
H A D | gem_ctx_clone.c | 26 #include "i915/gem_vm.h" 29 static int ctx_create_ioctl(int i915, struct drm_i915_gem_context_create_ext *arg) in ctx_create_ioctl() argument 34 if (igt_ioctl(i915, DRM_IOCTL_I915_GEM_CONTEXT_CREATE_EXT, arg)) { in ctx_create_ioctl() 43 static bool has_ctx_clone(int i915) in has_ctx_clone() argument 53 return ctx_create_ioctl(i915, &create) == -ENOENT; in has_ctx_clone() 56 static void invalid_clone(int i915) in invalid_clone() argument 66 igt_assert_eq(ctx_create_ioctl(i915, &create), 0); in invalid_clone() 67 gem_context_destroy(i915, create.ctx_id); in invalid_clone() 70 igt_assert_eq(ctx_create_ioctl(i915, &create), -EINVAL); in invalid_clone() 74 igt_assert_eq(ctx_create_ioctl(i915, &create), -EFAULT); in invalid_clone() [all …]
|
H A D | gem_ctx_engines.c | 41 #include "i915/gem_context.h" 47 static bool has_context_engines(int i915) in has_context_engines() argument 53 return __gem_context_set_param(i915, ¶m) == 0; in has_context_engines() 56 static void invalid_engines(int i915) in invalid_engines() argument 60 .ctx_id = gem_context_create(i915), in invalid_engines() 68 igt_assert_eq(__gem_context_set_param(i915, ¶m), 0); in invalid_engines() 71 igt_assert_eq(__gem_context_set_param(i915, ¶m), -EINVAL); in invalid_engines() 74 igt_assert_eq(__gem_context_set_param(i915, ¶m), -EINVAL); in invalid_engines() 77 igt_assert_eq(__gem_context_set_param(i915, ¶m), -EINVAL); in invalid_engines() 80 igt_assert_eq(__gem_context_set_param(i915, ¶m), 0); in invalid_engines() [all …]
|
H A D | gem_exec_schedule.c | 36 #include "i915/gem_ring.h" 340 static uint32_t __batch_create(int i915, uint32_t offset) in __batch_create() argument 345 handle = gem_create(i915, ALIGN(offset + 4, 4096)); in __batch_create() 346 gem_write(i915, handle, offset, &bbe, sizeof(bbe)); in __batch_create() 351 static uint32_t batch_create(int i915) in batch_create() argument 353 return __batch_create(i915, 0); in batch_create() 356 static void semaphore_userlock(int i915) in semaphore_userlock() argument 359 .handle = batch_create(i915), in semaphore_userlock() 365 igt_require(gem_scheduler_has_semaphores(i915)); in semaphore_userlock() 374 scratch = gem_create(i915, 4096); in semaphore_userlock() [all …]
|
H A D | gem_ctx_param.c | 31 #include "i915/gem_vm.h" 38 static void set_priority(int i915) in set_priority() argument 82 int fd = gem_reopen_driver(i915); in set_priority() 134 static uint32_t __batch_create(int i915, uint32_t offset) in __batch_create() argument 139 handle = gem_create(i915, ALIGN(offset + 4, 4096)); in __batch_create() 140 gem_write(i915, handle, offset, &bbe, sizeof(bbe)); in __batch_create() 145 static uint32_t batch_create(int i915) in batch_create() argument 147 return __batch_create(i915, 0); in batch_create() 150 static void test_vm(int i915) in test_vm() argument 154 .handle = batch_create(i915), in test_vm() [all …]
|
H A D | gem_ctx_create.c | 63 static int create_ext_ioctl(int i915, in create_ext_ioctl() argument 69 if (igt_ioctl(i915, DRM_IOCTL_I915_GEM_CONTEXT_CREATE_EXT, arg)) { in create_ext_ioctl() 333 static void basic_ext_param(int i915) in basic_ext_param() argument 343 igt_require(create_ext_ioctl(i915, &create) == 0); in basic_ext_param() 344 gem_context_destroy(i915, create.ctx_id); in basic_ext_param() 347 igt_assert_eq(create_ext_ioctl(i915, &create), -EFAULT); in basic_ext_param() 350 igt_assert_eq(create_ext_ioctl(i915, &create), -EINVAL); in basic_ext_param() 353 if (create_ext_ioctl(i915, &create) != -ENODEV) { in basic_ext_param() 354 gem_context_destroy(i915, create.ctx_id); in basic_ext_param() 357 igt_assert_eq(create_ext_ioctl(i915, &create), -EFAULT); in basic_ext_param() [all …]
|
/aosp_15_r20/external/mesa3d/src/gallium/drivers/i915/ |
H A D | i915_state_emit.c | 44 validate_flush(struct i915_context *i915, unsigned *batch_space) in validate_flush() argument 46 *batch_space = i915->flush_dirty ? 1 : 0; in validate_flush() 50 emit_flush(struct i915_context *i915) in emit_flush() argument 58 if (i915->flush_dirty & I915_FLUSH_CACHE) in emit_flush() 60 else if (i915->flush_dirty & I915_PIPELINE_FLUSH) in emit_flush() 90 emit_invariant(struct i915_context *i915) in emit_invariant() argument 93 i915->batch, invariant_state, in emit_invariant() 98 validate_immediate(struct i915_context *i915, unsigned *batch_space) in validate_immediate() argument 104 i915->immediate_dirty; in validate_immediate() 106 if (i915->immediate_dirty & (1 << I915_IMMEDIATE_S0) && i915->vbo) in validate_immediate() [all …]
|
H A D | i915_context.c | 62 struct i915_context *i915 = i915_context(pipe); in i915_draw_vbo() local 63 struct draw_context *draw = i915->draw; in i915_draw_vbo() 73 i915->dirty &= ~I915_NEW_VS_CONSTANTS; in i915_draw_vbo() 75 if (i915->dirty) in i915_draw_vbo() 76 i915_update_derived(i915); in i915_draw_vbo() 81 for (i = 0; i < i915->nr_vertex_buffers; i++) { in i915_draw_vbo() 82 const void *buf = i915->vertex_buffers[i].is_user_buffer in i915_draw_vbo() 83 ? i915->vertex_buffers[i].buffer.user in i915_draw_vbo() 86 if (!i915->vertex_buffers[i].buffer.resource) in i915_draw_vbo() 88 buf = i915_buffer(i915->vertex_buffers[i].buffer.resource)->data; in i915_draw_vbo() [all …]
|
H A D | i915_state.c | 49 /* The i915 (and related graphics cores) do not support GL_CLAMP. The 232 struct i915_context *i915 = i915_context(pipe); in i915_bind_blend_state() local 234 if (i915->blend == blend) in i915_bind_blend_state() 237 i915->blend = (struct i915_blend_state *)blend; in i915_bind_blend_state() 239 i915->dirty |= I915_NEW_BLEND; in i915_bind_blend_state() 252 struct i915_context *i915 = i915_context(pipe); in i915_set_blend_color() local 257 i915->blend_color = *blend_color; in i915_set_blend_color() 259 i915->dirty |= I915_NEW_BLEND; in i915_set_blend_color() 266 struct i915_context *i915 = i915_context(pipe); in i915_set_stencil_ref() local 268 i915->stencil_ref = stencil_ref; in i915_set_stencil_ref() [all …]
|
H A D | i915_state_dynamic.c | 38 * i915 indirect state mechanism. 49 set_dynamic(struct i915_context *i915, unsigned offset, const unsigned state) in set_dynamic() argument 51 if (i915->current.dynamic[offset] == state) in set_dynamic() 54 i915->current.dynamic[offset] = state; in set_dynamic() 55 i915->dynamic_dirty |= 1 << offset; in set_dynamic() 56 i915->hardware_dirty |= I915_HW_DYNAMIC; in set_dynamic() 60 set_dynamic_array(struct i915_context *i915, unsigned offset, in set_dynamic_array() argument 65 if (!memcmp(src, &i915->current.dynamic[offset], dwords * 4)) in set_dynamic_array() 69 i915->current.dynamic[offset + i] = src[i]; in set_dynamic_array() 70 i915->dynamic_dirty |= 1 << (offset + i); in set_dynamic_array() [all …]
|
H A D | i915_surface.c | 51 i915_util_blitter_save_states(struct i915_context *i915) in i915_util_blitter_save_states() argument 53 util_blitter_save_blend(i915->blitter, (void *)i915->blend); in i915_util_blitter_save_states() 54 util_blitter_save_depth_stencil_alpha(i915->blitter, in i915_util_blitter_save_states() 55 (void *)i915->depth_stencil); in i915_util_blitter_save_states() 56 util_blitter_save_stencil_ref(i915->blitter, &i915->stencil_ref); in i915_util_blitter_save_states() 57 util_blitter_save_rasterizer(i915->blitter, (void *)i915->rasterizer); in i915_util_blitter_save_states() 58 util_blitter_save_fragment_shader(i915->blitter, i915->fs); in i915_util_blitter_save_states() 59 util_blitter_save_vertex_shader(i915->blitter, i915->vs); in i915_util_blitter_save_states() 60 util_blitter_save_viewport(i915->blitter, &i915->viewport); in i915_util_blitter_save_states() 61 util_blitter_save_scissor(i915->blitter, &i915->scissor); in i915_util_blitter_save_states() [all …]
|
H A D | i915_prim_vbuf.c | 53 * Primitive renderer for i915. 58 struct i915_context *i915; member 97 * hw_offset to i915->vbo_offset and vbo to i915->vbo. 106 struct i915_context *i915 = i915_render->i915; in i915_vbuf_update_vbo_state() local 108 if (i915->vbo != i915_render->vbo || in i915_vbuf_update_vbo_state() 109 i915->vbo_offset != i915_render->vbo_hw_offset) { in i915_vbuf_update_vbo_state() 110 i915->vbo = i915_render->vbo; in i915_vbuf_update_vbo_state() 111 i915->vbo_offset = i915_render->vbo_hw_offset; in i915_vbuf_update_vbo_state() 112 i915->dirty |= I915_NEW_VBO; in i915_vbuf_update_vbo_state() 127 struct i915_context *i915 = i915_render->i915; in i915_vbuf_render_get_vertex_info() local [all …]
|
H A D | i915_state_immediate.c | 42 set_immediate(struct i915_context *i915, unsigned offset, const unsigned state) in set_immediate() argument 44 if (i915->current.immediate[offset] == state) in set_immediate() 47 i915->current.immediate[offset] = state; in set_immediate() 48 i915->immediate_dirty |= 1 << offset; in set_immediate() 49 i915->hardware_dirty |= I915_HW_IMMEDIATE; in set_immediate() 56 upload_S0S1(struct i915_context *i915) in upload_S0S1() argument 62 LIS0 = i915->vbo_offset; in upload_S0S1() 65 if (i915->dirty & I915_NEW_VBO) { in upload_S0S1() 66 i915->immediate_dirty |= 1 << I915_IMMEDIATE_S0; in upload_S0S1() 67 i915->hardware_dirty |= I915_HW_IMMEDIATE; in upload_S0S1() [all …]
|
H A D | i915_state_static.c | 79 update_framebuffer(struct i915_context *i915) in update_framebuffer() argument 81 struct pipe_surface *cbuf_surface = i915->framebuffer.cbufs[0]; in update_framebuffer() 82 struct pipe_surface *depth_surface = i915->framebuffer.zsbuf; in update_framebuffer() 93 i915->current.cbuf_bo = tex->buffer; in update_framebuffer() 94 i915->current.cbuf_flags = surf->buf_info; in update_framebuffer() 95 i915->current.cbuf_offset = 0; in update_framebuffer() 102 if (y + i915->framebuffer.height >= (1 << (I915_MAX_TEXTURE_2D_LEVELS - 1))) { in update_framebuffer() 106 i915->current.cbuf_offset = y1 * tex->stride; in update_framebuffer() 109 i915->current.cbuf_bo = NULL; in update_framebuffer() 112 i915->static_dirty |= I915_DST_BUF_COLOR; in update_framebuffer() [all …]
|
H A D | i915_state_derived.c | 44 calculate_vertex_layout(struct i915_context *i915) in calculate_vertex_layout() argument 46 const struct i915_fragment_shader *fs = i915->fs; in calculate_vertex_layout() 84 src = draw_find_shader_output(i915->draw, TGSI_SEMANTIC_POSITION, 0); in calculate_vertex_layout() 96 if (i915->rasterizer->templ.point_size_per_vertex) { in calculate_vertex_layout() 97 src = draw_find_shader_output(i915->draw, TGSI_SEMANTIC_PSIZE, 0); in calculate_vertex_layout() 106 src = draw_find_shader_output(i915->draw, TGSI_SEMANTIC_COLOR, 0); in calculate_vertex_layout() 113 src = draw_find_shader_output(i915->draw, TGSI_SEMANTIC_COLOR, 1); in calculate_vertex_layout() 120 src = draw_find_shader_output(i915->draw, TGSI_SEMANTIC_FOG, 0); in calculate_vertex_layout() 129 src = draw_find_shader_output(i915->draw, fs->texcoords[i].semantic, in calculate_vertex_layout() 151 if (memcmp(&i915->current.vertex_info, &vinfo, sizeof(vinfo))) { in calculate_vertex_layout() [all …]
|
/aosp_15_r20/external/minigbm/ |
H A D | i915.c | 69 static void i915_info_from_device_id(struct i915_device *i915) in i915_info_from_device_id() argument 125 i915->graphics_version = 4; in i915_info_from_device_id() 126 i915->is_xelpd = false; in i915_info_from_device_id() 127 i915->is_mtl = false; in i915_info_from_device_id() 130 if (gen3_ids[i] == i915->device_id) in i915_info_from_device_id() 131 i915->graphics_version = 3; in i915_info_from_device_id() 135 if (gen4_ids[i] == i915->device_id) in i915_info_from_device_id() 136 i915->graphics_version = 4; in i915_info_from_device_id() 140 if (gen5_ids[i] == i915->device_id) in i915_info_from_device_id() 141 i915->graphics_version = 5; in i915_info_from_device_id() [all …]
|
/aosp_15_r20/external/mesa3d/src/intel/tools/ |
H A D | intel_noop_drm_shim.c | 54 static struct i915_device i915 = {}; variable 200 if (i915.devinfo.ver >= 8 && i915.devinfo.platform != INTEL_PLATFORM_CHV) in i915_ioctl_gem_context_getparam() 218 *gp->value = i915.device_id; in i915_ioctl_get_param() 224 *gp->value = i915.devinfo.timestamp_frequency; in i915_ioctl_get_param() 227 if (i915.devinfo.ver < 6) in i915_ioctl_get_param() 229 else if (i915.devinfo.ver <= 7) in i915_ioctl_get_param() 268 /* Most recent version in drivers/gpu/drm/i915/i915_cmd_parser.c */ in i915_ioctl_get_param() 277 for (uint32_t s = 0; s < i915.devinfo.num_slices; s++) in i915_ioctl_get_param() 278 *gp->value += i915.devinfo.num_subslices[s]; in i915_ioctl_get_param() 282 for (uint32_t s = 0; s < i915.devinfo.num_slices; s++) in i915_ioctl_get_param() [all …]
|
/aosp_15_r20/external/igt-gpu-tools/tests/amdgpu/ |
H A D | amd_prime.c | 171 static void i915_to_amd(int i915, int amd, amdgpu_device_handle device) in i915_to_amd() argument 182 for_each_physical_engine(i915, engine) in i915_to_amd() 187 obj[1].handle = gem_create(i915, 4096); in i915_to_amd() 188 gem_write(i915, obj[1].handle, 0, &bbe, sizeof(bbe)); in i915_to_amd() 194 plug(i915, &c); in i915_to_amd() 199 execbuf.rsvd1 = gem_context_create(i915); in i915_to_amd() 203 gem_execbuf(i915, &execbuf); in i915_to_amd() 206 gem_context_destroy(i915, execbuf.rsvd1); in i915_to_amd() 209 if (!gem_uses_full_ppgtt(i915)) in i915_to_amd() 233 dmabuf = prime_handle_to_fd(i915, obj[1].handle); in i915_to_amd() [all …]
|
/aosp_15_r20/external/igt-gpu-tools/lib/i915/ |
H A D | gem_context.c | 30 #include "i915/gem_context.h" 48 * @fd: open i915 drm file descriptor 67 * @fd: open i915 drm file descriptor 96 * @fd: open i915 drm file descriptor 130 * @fd: open i915 drm file descriptor 131 * @ctx_id: i915 context id 155 * @fd: open i915 drm file descriptor 156 * @p: i915 context parameter 181 * @fd: open i915 drm file descriptor 182 * @p: i915 context parameter [all …]
|
H A D | gem_vm.c | 30 #include "i915/gem_vm.h" 43 * @i915: open i915 drm file descriptor 47 bool gem_has_vm(int i915) in gem_has_vm() argument 51 __gem_vm_create(i915, &vm_id); in gem_has_vm() 53 gem_vm_destroy(i915, vm_id); in gem_has_vm() 60 * @i915: open i915 drm file descriptor 65 void gem_require_vm(int i915) in gem_require_vm() argument 67 igt_require(gem_has_vm(i915)); in gem_require_vm() 70 int __gem_vm_create(int i915, uint32_t *vm_id) in __gem_vm_create() argument 75 if (igt_ioctl(i915, DRM_IOCTL_I915_GEM_VM_CREATE, &ctl) == 0) { in __gem_vm_create() [all …]
|
H A D | gem_submission.c | 31 #include "i915/gem_engine_topology.h" 40 #include "i915/gem_submission.h" 69 * @fd: open i915 drm file descriptor 104 * @fd: open i915 drm file descriptor 128 * @fd: open i915 drm file descriptor 140 * @fd: open i915 drm file descriptor 152 * @fd: open i915 drm file descriptor 164 * @fd: re-open the i915 drm file descriptor 180 static bool is_wedged(int i915) in is_wedged() argument 183 if (ioctl(i915, DRM_IOCTL_I915_GEM_THROTTLE)) in is_wedged() [all …]
|