/aosp_15_r20/external/mesa3d/src/intel/vulkan/ |
H A D | genX_cmd_draw.c | 39 cmd_buffer_alloc_gfx_push_constants(struct anv_cmd_buffer *cmd_buffer) in cmd_buffer_alloc_gfx_push_constants() argument 42 anv_pipeline_to_graphics(cmd_buffer->state.gfx.base.pipeline); in cmd_buffer_alloc_gfx_push_constants() 54 if (stages == cmd_buffer->state.gfx.push_constant_stages) in cmd_buffer_alloc_gfx_push_constants() 59 const struct intel_device_info *devinfo = cmd_buffer->device->info; in cmd_buffer_alloc_gfx_push_constants() 79 anv_batch_emit(&cmd_buffer->batch, in cmd_buffer_alloc_gfx_push_constants() 88 anv_batch_emit(&cmd_buffer->batch, in cmd_buffer_alloc_gfx_push_constants() 102 anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_CONSTANT_ALL), c) { in cmd_buffer_alloc_gfx_push_constants() 105 c.MOCS = anv_mocs(cmd_buffer->device, NULL, 0); in cmd_buffer_alloc_gfx_push_constants() 109 cmd_buffer->state.gfx.push_constant_stages = stages; in cmd_buffer_alloc_gfx_push_constants() 120 cmd_buffer->state.push_constants_dirty |= stages; in cmd_buffer_alloc_gfx_push_constants() [all …]
|
H A D | anv_cmd_buffer.c | 45 anv_cmd_state_init(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_init() argument 47 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_init() 58 cmd_buffer->device->gfx_dirty_state, in anv_cmd_state_init() 63 anv_cmd_pipeline_state_finish(struct anv_cmd_buffer *cmd_buffer, in anv_cmd_pipeline_state_finish() argument 70 anv_cmd_state_finish(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_finish() argument 72 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_finish() 74 anv_cmd_pipeline_state_finish(cmd_buffer, &state->gfx.base); in anv_cmd_state_finish() 75 anv_cmd_pipeline_state_finish(cmd_buffer, &state->compute.base); in anv_cmd_state_finish() 79 anv_cmd_state_reset(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_reset() argument 81 anv_cmd_state_finish(cmd_buffer); in anv_cmd_state_reset() [all …]
|
H A D | genX_cmd_draw_generated_indirect.h | 45 genX(cmd_buffer_emit_generate_draws)(struct anv_cmd_buffer *cmd_buffer, in genX() 59 struct anv_device *device = cmd_buffer->device; in genX() 68 anv_pipeline_to_graphics(cmd_buffer->state.gfx.base.pipeline); in genX() 70 const bool use_tbimr = cmd_buffer->state.gfx.dyn_state.use_tbimr; in genX() 88 (cmd_buffer->state.conditional_render_enabled ? in genX() 115 genX(cmd_buffer_emit_indirect_generated_draws_init)(struct anv_cmd_buffer *cmd_buffer) in genX() 117 anv_batch_emit_ensure_space(&cmd_buffer->generation.batch, 4); in genX() 119 trace_intel_begin_generate_draws(&cmd_buffer->trace); in genX() 121 anv_batch_emit(&cmd_buffer->batch, GENX(MI_BATCH_BUFFER_START), bbs) { in genX() 124 anv_batch_current_address(&cmd_buffer->generation.batch); in genX() [all …]
|
H A D | genX_cmd_compute.c | 41 genX(cmd_buffer_ensure_cfe_state)(struct anv_cmd_buffer *cmd_buffer, in genX() 45 assert(cmd_buffer->state.current_pipeline == GPGPU); in genX() 47 struct anv_cmd_compute_state *comp_state = &cmd_buffer->state.compute; in genX() 52 const struct intel_device_info *devinfo = cmd_buffer->device->info; in genX() 53 anv_batch_emit(&cmd_buffer->batch, GENX(CFE_STATE), cfe) { in genX() 58 (cmd_buffer->vk.pool->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) ? in genX() 59 &cmd_buffer->device->protected_scratch_pool : in genX() 60 &cmd_buffer->device->scratch_pool; in genX() 62 anv_scratch_pool_alloc(cmd_buffer->device, scratch_pool, in genX() 65 anv_reloc_list_add_bo(cmd_buffer->batch.relocs, scratch_bo); in genX() [all …]
|
H A D | genX_cmd_buffer.c | 40 static void genX(flush_pipeline_select)(struct anv_cmd_buffer *cmd_buffer, 81 fill_state_base_addr(struct anv_cmd_buffer *cmd_buffer, in fill_state_base_addr() argument 84 struct anv_device *device = cmd_buffer->device; in fill_state_base_addr() 91 if (cmd_buffer->state.pending_db_mode == in fill_state_base_addr() 93 cmd_buffer->state.pending_db_mode = in fill_state_base_addr() 94 cmd_buffer->device->vk.enabled_extensions.EXT_descriptor_buffer ? in fill_state_base_addr() 113 cmd_buffer->vk.pool->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT ? in fill_state_base_addr() 129 anv_cmd_buffer_surface_base_address(cmd_buffer); in fill_state_base_addr() 166 if (cmd_buffer->state.pending_db_mode == ANV_CMD_DESCRIPTOR_BUFFER_MODE_BUFFER) { in fill_state_base_addr() 178 cmd_buffer->state.descriptor_buffers.surfaces_address != 0 ? in fill_state_base_addr() [all …]
|
H A D | anv_batch_chain.c | 251 anv_batch_bo_create(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_create() argument 257 struct anv_batch_bo *bbo = vk_zalloc(&cmd_buffer->vk.pool->alloc, sizeof(*bbo), in anv_batch_bo_create() 260 return vk_error(cmd_buffer, VK_ERROR_OUT_OF_HOST_MEMORY); in anv_batch_bo_create() 262 result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, in anv_batch_bo_create() 267 const bool uses_relocs = cmd_buffer->device->physical->uses_relocs; in anv_batch_bo_create() 268 result = anv_reloc_list_init(&bbo->relocs, &cmd_buffer->vk.pool->alloc, uses_relocs); in anv_batch_bo_create() 277 anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, bbo->bo); in anv_batch_bo_create() 279 vk_free(&cmd_buffer->vk.pool->alloc, bbo); in anv_batch_bo_create() 285 anv_batch_bo_clone(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_clone() argument 291 struct anv_batch_bo *bbo = vk_alloc(&cmd_buffer->vk.pool->alloc, sizeof(*bbo), in anv_batch_bo_clone() [all …]
|
H A D | genX_query.c | 64 emit_query_mi_flush_availability(struct anv_cmd_buffer *cmd_buffer, in emit_query_mi_flush_availability() argument 68 anv_batch_emit(&cmd_buffer->batch, GENX(MI_FLUSH_DW), flush) { in emit_query_mi_flush_availability() 369 khr_perf_query_ensure_relocs(struct anv_cmd_buffer *cmd_buffer) in khr_perf_query_ensure_relocs() argument 371 if (anv_batch_has_error(&cmd_buffer->batch)) in khr_perf_query_ensure_relocs() 374 if (cmd_buffer->self_mod_locations) in khr_perf_query_ensure_relocs() 377 struct anv_device *device = cmd_buffer->device; in khr_perf_query_ensure_relocs() 380 cmd_buffer->self_mod_locations = in khr_perf_query_ensure_relocs() 381 vk_alloc(&cmd_buffer->vk.pool->alloc, in khr_perf_query_ensure_relocs() 382 pdevice->n_perf_query_commands * sizeof(*cmd_buffer->self_mod_locations), 8, in khr_perf_query_ensure_relocs() 385 if (!cmd_buffer->self_mod_locations) { in khr_perf_query_ensure_relocs() [all …]
|
H A D | genX_blorp_exec.c | 42 struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch; in blorp_measure_start() local 43 trace_intel_begin_blorp(&cmd_buffer->trace); in blorp_measure_start() 44 anv_measure_snapshot(cmd_buffer, in blorp_measure_start() 52 struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch; in blorp_measure_end() local 53 trace_intel_end_blorp(&cmd_buffer->trace, in blorp_measure_end() 67 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_dwords() local 68 return anv_batch_emit_dwords(&cmd_buffer->batch, n); in blorp_emit_dwords() 75 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_reloc() local 80 anv_reloc_list_add_bo(cmd_buffer->batch.relocs, anv_addr.bo); in blorp_emit_reloc() 88 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_surface_reloc() local [all …]
|
H A D | anv_blorp.c | 143 anv_blorp_batch_init(struct anv_cmd_buffer *cmd_buffer, in anv_blorp_batch_init() argument 146 VkQueueFlags queue_flags = cmd_buffer->queue_family->queueFlags; in anv_blorp_batch_init() 162 blorp_batch_init(&cmd_buffer->device->blorp.context, batch, cmd_buffer, flags); in anv_blorp_batch_init() 172 get_usage_flag_for_cmd_buffer(const struct anv_cmd_buffer *cmd_buffer, in get_usage_flag_for_cmd_buffer() argument 177 switch (cmd_buffer->queue_family->engine_class) { in get_usage_flag_for_cmd_buffer() 201 get_blorp_surf_for_anv_address(struct anv_cmd_buffer *cmd_buffer, in get_blorp_surf_for_anv_address() argument 211 get_usage_flag_for_cmd_buffer(cmd_buffer, is_dest, protected); in get_blorp_surf_for_anv_address() 218 .mocs = anv_mocs(cmd_buffer->device, address.bo, usage), in get_blorp_surf_for_anv_address() 222 ok = isl_surf_init(&cmd_buffer->device->isl_dev, isl_surf, in get_blorp_surf_for_anv_address() 238 get_blorp_surf_for_anv_buffer(struct anv_cmd_buffer *cmd_buffer, in get_blorp_surf_for_anv_buffer() argument [all …]
|
H A D | anv_measure.c | 62 config_from_command_buffer(struct anv_cmd_buffer *cmd_buffer) in config_from_command_buffer() argument 64 return cmd_buffer->device->physical->measure_device.config; in config_from_command_buffer() 68 anv_measure_init(struct anv_cmd_buffer *cmd_buffer) in anv_measure_init() argument 70 struct intel_measure_config *config = config_from_command_buffer(cmd_buffer); in anv_measure_init() 71 struct anv_device *device = cmd_buffer->device; in anv_measure_init() 74 cmd_buffer->measure = NULL; in anv_measure_init() 85 vk_alloc(&cmd_buffer->vk.pool->alloc, in anv_measure_init() 90 cmd_buffer->measure = measure; in anv_measure_init() 107 anv_measure_start_snapshot(struct anv_cmd_buffer *cmd_buffer, in anv_measure_start_snapshot() argument 112 struct anv_batch *batch = &cmd_buffer->batch; in anv_measure_start_snapshot() [all …]
|
H A D | genX_cmd_video.c | 35 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); in genX() 39 cmd_buffer->video.vid = vid; in genX() 40 cmd_buffer->video.params = params; in genX() 47 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); in genX() 50 anv_batch_emit(&cmd_buffer->batch, GENX(MI_FLUSH_DW), flush) { in genX() 63 cmd_buffer->video.params->rc_mode = rate_control_info->rateControlMode; in genX() 65 cmd_buffer->video.params->rc_mode = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR; in genX() 73 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); in genX() 75 cmd_buffer->video.vid = NULL; in genX() 76 cmd_buffer->video.params = NULL; in genX() [all …]
|
/aosp_15_r20/external/mesa3d/src/amd/vulkan/ |
H A D | radv_cmd_buffer.c | 56 static void radv_handle_image_transition(struct radv_cmd_buffer *cmd_buffer, struct radv_image *ima… 62 radv_bind_dynamic_state(struct radv_cmd_buffer *cmd_buffer, const struct radv_dynamic_state *src) in radv_bind_dynamic_state() argument 64 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in radv_bind_dynamic_state() 66 struct radv_dynamic_state *dest = &cmd_buffer->state.dynamic; in radv_bind_dynamic_state() 253 cmd_buffer->state.dirty_dynamic |= dest_mask; in radv_bind_dynamic_state() 258 cmd_buffer->state.dirty |= RADV_CMD_DIRTY_GUARDBAND; in radv_bind_dynamic_state() 262 cmd_buffer->state.dirty |= RADV_CMD_DIRTY_RBPLUS; in radv_bind_dynamic_state() 266 cmd_buffer->state.dirty |= RADV_CMD_DIRTY_FBFETCH_OUTPUT; in radv_bind_dynamic_state() 271 radv_cmd_buffer_uses_mec(struct radv_cmd_buffer *cmd_buffer) in radv_cmd_buffer_uses_mec() argument 273 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in radv_cmd_buffer_uses_mec() [all …]
|
H A D | radv_video_enc.c | 255 radv_enc_set_emulation_prevention(struct radv_cmd_buffer *cmd_buffer, bool set) in radv_enc_set_emulation_prevention() argument 257 struct radv_enc_state *enc = &cmd_buffer->video.enc; in radv_enc_set_emulation_prevention() 280 radv_enc_output_one_byte(struct radv_cmd_buffer *cmd_buffer, unsigned char byte) in radv_enc_output_one_byte() argument 282 struct radeon_cmdbuf *cs = cmd_buffer->cs; in radv_enc_output_one_byte() 283 struct radv_enc_state *enc = &cmd_buffer->video.enc; in radv_enc_output_one_byte() 296 radv_enc_emulation_prevention(struct radv_cmd_buffer *cmd_buffer, unsigned char byte) in radv_enc_emulation_prevention() argument 298 struct radv_enc_state *enc = &cmd_buffer->video.enc; in radv_enc_emulation_prevention() 301 radv_enc_output_one_byte(cmd_buffer, 0x03); in radv_enc_emulation_prevention() 310 radv_enc_code_fixed_bits(struct radv_cmd_buffer *cmd_buffer, unsigned int value, unsigned int num_b… in radv_enc_code_fixed_bits() argument 312 struct radv_enc_state *enc = &cmd_buffer->video.enc; in radv_enc_code_fixed_bits() [all …]
|
H A D | radv_query.c | 992 radv_query_shader(struct radv_cmd_buffer *cmd_buffer, VkPipeline *pipeline, struct radeon_winsys_bo… in radv_query_shader() argument 997 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in radv_query_shader() 1004 vk_command_buffer_set_error(&cmd_buffer->vk, ret); in radv_query_shader() 1012 radv_meta_save(&saved_state, cmd_buffer, in radv_query_shader() 1022 …radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE, *pipel… in radv_query_shader() 1025 cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, device->meta_state.query.p_layout, 0, 2, in radv_query_shader() 1057 …vk_common_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer), device->meta_state.query.p_layou… in radv_query_shader() 1060 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_INV_L2 | RADV_CMD_FLAG_INV_VCACHE; in radv_query_shader() 1063 cmd_buffer->state.flush_bits |= RADV_CMD_FLUSH_AND_INV_FRAMEBUFFER; in radv_query_shader() 1065 radv_unaligned_dispatch(cmd_buffer, count, 1, 1); in radv_query_shader() [all …]
|
/aosp_15_r20/external/mesa3d/src/broadcom/vulkan/ |
H A D | v3dv_cmd_buffer.c | 71 cmd_buffer_init(struct v3dv_cmd_buffer *cmd_buffer, in cmd_buffer_init() argument 79 uint8_t *cmd_buffer_driver_start = ((uint8_t *) cmd_buffer) + base_size; in cmd_buffer_init() 80 memset(cmd_buffer_driver_start, 0, sizeof(*cmd_buffer) - base_size); in cmd_buffer_init() 82 cmd_buffer->device = device; in cmd_buffer_init() 84 list_inithead(&cmd_buffer->private_objs); in cmd_buffer_init() 85 list_inithead(&cmd_buffer->jobs); in cmd_buffer_init() 87 cmd_buffer->state.subpass_idx = -1; in cmd_buffer_init() 88 cmd_buffer->state.meta.subpass_idx = -1; in cmd_buffer_init() 90 cmd_buffer->status = V3DV_CMD_BUFFER_STATUS_INITIALIZED; in cmd_buffer_init() 100 struct v3dv_cmd_buffer *cmd_buffer; in cmd_buffer_create() local [all …]
|
H A D | v3dv_uniforms.c | 85 check_push_constants_ubo(struct v3dv_cmd_buffer *cmd_buffer, in check_push_constants_ubo() argument 88 if (!(cmd_buffer->state.dirty & V3DV_CMD_DIRTY_PUSH_CONSTANTS_UBO) || in check_push_constants_ubo() 92 if (cmd_buffer->push_constants_resource.bo == NULL) { in check_push_constants_ubo() 93 cmd_buffer->push_constants_resource.bo = in check_push_constants_ubo() 94 v3dv_bo_alloc(cmd_buffer->device, 4096, "push constants", true); in check_push_constants_ubo() 96 v3dv_job_add_bo(cmd_buffer->state.job, in check_push_constants_ubo() 97 cmd_buffer->push_constants_resource.bo); in check_push_constants_ubo() 99 if (!cmd_buffer->push_constants_resource.bo) { in check_push_constants_ubo() 104 bool ok = v3dv_bo_map(cmd_buffer->device, in check_push_constants_ubo() 105 cmd_buffer->push_constants_resource.bo, in check_push_constants_ubo() [all …]
|
/aosp_15_r20/external/mesa3d/src/intel/vulkan_hasvk/ |
H A D | anv_cmd_buffer.c | 45 anv_cmd_state_init(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_init() argument 47 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_init() 57 anv_cmd_pipeline_state_finish(struct anv_cmd_buffer *cmd_buffer, in anv_cmd_pipeline_state_finish() argument 62 anv_descriptor_set_layout_unref(cmd_buffer->device, in anv_cmd_pipeline_state_finish() 64 vk_free(&cmd_buffer->vk.pool->alloc, pipe_state->push_descriptors[i]); in anv_cmd_pipeline_state_finish() 70 anv_cmd_state_finish(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_finish() argument 72 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_finish() 74 anv_cmd_pipeline_state_finish(cmd_buffer, &state->gfx.base); in anv_cmd_state_finish() 75 anv_cmd_pipeline_state_finish(cmd_buffer, &state->compute.base); in anv_cmd_state_finish() 79 anv_cmd_state_reset(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_reset() argument [all …]
|
H A D | genX_cmd_buffer.c | 54 static void genX(flush_pipeline_select)(struct anv_cmd_buffer *cmd_buffer, 82 is_render_queue_cmd_buffer(const struct anv_cmd_buffer *cmd_buffer) in is_render_queue_cmd_buffer() argument 84 struct anv_queue_family *queue_family = cmd_buffer->queue_family; in is_render_queue_cmd_buffer() 89 genX(cmd_buffer_emit_state_base_address)(struct anv_cmd_buffer *cmd_buffer) in genX() 91 struct anv_device *device = cmd_buffer->device; in genX() 97 cmd_buffer->state.descriptors_dirty |= ~0; in genX() 106 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() 113 anv_batch_emit(&cmd_buffer->batch, GENX(STATE_BASE_ADDRESS), sba) { in genX() 121 anv_cmd_buffer_surface_base_address(cmd_buffer); in genX() 223 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() [all …]
|
H A D | genX_blorp_exec.c | 42 struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch; in blorp_measure_start() local 43 trace_intel_begin_blorp(&cmd_buffer->trace); in blorp_measure_start() 44 anv_measure_snapshot(cmd_buffer, in blorp_measure_start() 52 struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch; in blorp_measure_end() local 53 trace_intel_end_blorp(&cmd_buffer->trace, in blorp_measure_end() 67 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_dwords() local 68 return anv_batch_emit_dwords(&cmd_buffer->batch, n); in blorp_emit_dwords() 75 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_reloc() local 76 assert(cmd_buffer->batch.start <= location && in blorp_emit_reloc() 77 location < cmd_buffer->batch.end); in blorp_emit_reloc() [all …]
|
H A D | anv_batch_chain.c | 348 anv_batch_bo_create(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_create() argument 354 struct anv_batch_bo *bbo = vk_zalloc(&cmd_buffer->vk.pool->alloc, sizeof(*bbo), in anv_batch_bo_create() 357 return vk_error(cmd_buffer, VK_ERROR_OUT_OF_HOST_MEMORY); in anv_batch_bo_create() 359 result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, in anv_batch_bo_create() 364 result = anv_reloc_list_init(&bbo->relocs, &cmd_buffer->vk.pool->alloc); in anv_batch_bo_create() 373 anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, bbo->bo); in anv_batch_bo_create() 375 vk_free(&cmd_buffer->vk.pool->alloc, bbo); in anv_batch_bo_create() 381 anv_batch_bo_clone(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_clone() argument 387 struct anv_batch_bo *bbo = vk_alloc(&cmd_buffer->vk.pool->alloc, sizeof(*bbo), in anv_batch_bo_clone() 390 return vk_error(cmd_buffer, VK_ERROR_OUT_OF_HOST_MEMORY); in anv_batch_bo_clone() [all …]
|
H A D | anv_measure.c | 62 config_from_command_buffer(struct anv_cmd_buffer *cmd_buffer) in config_from_command_buffer() argument 64 return cmd_buffer->device->physical->measure_device.config; in config_from_command_buffer() 68 anv_measure_init(struct anv_cmd_buffer *cmd_buffer) in anv_measure_init() argument 70 struct intel_measure_config *config = config_from_command_buffer(cmd_buffer); in anv_measure_init() 71 struct anv_device *device = cmd_buffer->device; in anv_measure_init() 74 cmd_buffer->measure = NULL; in anv_measure_init() 85 vk_alloc(&cmd_buffer->vk.pool->alloc, in anv_measure_init() 99 cmd_buffer->measure = measure; in anv_measure_init() 103 anv_measure_start_snapshot(struct anv_cmd_buffer *cmd_buffer, in anv_measure_start_snapshot() argument 108 struct anv_batch *batch = &cmd_buffer->batch; in anv_measure_start_snapshot() [all …]
|
H A D | genX_query.c | 330 khr_perf_query_ensure_relocs(struct anv_cmd_buffer *cmd_buffer) in khr_perf_query_ensure_relocs() argument 332 if (anv_batch_has_error(&cmd_buffer->batch)) in khr_perf_query_ensure_relocs() 335 if (cmd_buffer->self_mod_locations) in khr_perf_query_ensure_relocs() 338 struct anv_device *device = cmd_buffer->device; in khr_perf_query_ensure_relocs() 341 cmd_buffer->self_mod_locations = in khr_perf_query_ensure_relocs() 342 vk_alloc(&cmd_buffer->vk.pool->alloc, in khr_perf_query_ensure_relocs() 343 pdevice->n_perf_query_commands * sizeof(*cmd_buffer->self_mod_locations), 8, in khr_perf_query_ensure_relocs() 346 if (!cmd_buffer->self_mod_locations) { in khr_perf_query_ensure_relocs() 347 anv_batch_set_error(&cmd_buffer->batch, VK_ERROR_OUT_OF_HOST_MEMORY); in khr_perf_query_ensure_relocs() 613 emit_ps_depth_count(struct anv_cmd_buffer *cmd_buffer, in emit_ps_depth_count() argument [all …]
|
/aosp_15_r20/external/mesa3d/src/imagination/vulkan/ |
H A D | pvr_cmd_buffer.c | 92 static void pvr_cmd_buffer_free_sub_cmd(struct pvr_cmd_buffer *cmd_buffer, in pvr_cmd_buffer_free_sub_cmd() argument 100 pvr_bo_free(cmd_buffer->device, sub_cmd->gfx.terminate_ctrl_stream); in pvr_cmd_buffer_free_sub_cmd() 117 vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd); in pvr_cmd_buffer_free_sub_cmd() 123 vk_free(&cmd_buffer->vk.pool->alloc, sub_cmd->event.wait.events); in pvr_cmd_buffer_free_sub_cmd() 132 vk_free(&cmd_buffer->vk.pool->alloc, sub_cmd); in pvr_cmd_buffer_free_sub_cmd() 135 static void pvr_cmd_buffer_free_sub_cmds(struct pvr_cmd_buffer *cmd_buffer) in pvr_cmd_buffer_free_sub_cmds() argument 139 &cmd_buffer->sub_cmds, in pvr_cmd_buffer_free_sub_cmds() 141 pvr_cmd_buffer_free_sub_cmd(cmd_buffer, sub_cmd); in pvr_cmd_buffer_free_sub_cmds() 145 static void pvr_cmd_buffer_free_resources(struct pvr_cmd_buffer *cmd_buffer) in pvr_cmd_buffer_free_resources() argument 147 vk_free(&cmd_buffer->vk.pool->alloc, in pvr_cmd_buffer_free_resources() [all …]
|
/aosp_15_r20/external/mesa3d/src/amd/vulkan/meta/ |
H A D | radv_meta_copy.c | 56 alloc_transfer_temp_bo(struct radv_cmd_buffer *cmd_buffer) in alloc_transfer_temp_bo() argument 58 if (cmd_buffer->transfer.copy_temp) in alloc_transfer_temp_bo() 61 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in alloc_transfer_temp_bo() 63 …radv_bo_create(device, &cmd_buffer->vk.base, RADV_SDMA_TRANSFER_TEMP_BYTES, 4096, RADEON_DOMAIN_VR… in alloc_transfer_temp_bo() 65 &cmd_buffer->transfer.copy_temp); in alloc_transfer_temp_bo() 68 vk_command_buffer_set_error(&cmd_buffer->vk, r); in alloc_transfer_temp_bo() 72 radv_cs_add_buffer(device->ws, cmd_buffer->cs, cmd_buffer->transfer.copy_temp); in alloc_transfer_temp_bo() 77 transfer_copy_buffer_image(struct radv_cmd_buffer *cmd_buffer, struct radv_buffer *buffer, struct r… in transfer_copy_buffer_image() argument 80 const struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in transfer_copy_buffer_image() 81 struct radeon_cmdbuf *cs = cmd_buffer->cs; in transfer_copy_buffer_image() [all …]
|
H A D | radv_meta_buffer.c | 185 fill_buffer_shader(struct radv_cmd_buffer *cmd_buffer, uint64_t va, uint64_t size, uint32_t data) in fill_buffer_shader() argument 187 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in fill_buffer_shader() 194 vk_command_buffer_set_error(&cmd_buffer->vk, result); in fill_buffer_shader() 198 …radv_meta_save(&saved_state, cmd_buffer, RADV_META_SAVE_COMPUTE_PIPELINE | RADV_META_SAVE_CONSTANT… in fill_buffer_shader() 200 …radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE, pipeli… in fill_buffer_shader() 210 …vk_common_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer), device->meta_state.buffer.fill_p… in fill_buffer_shader() 213 radv_unaligned_dispatch(cmd_buffer, DIV_ROUND_UP(size, 16), 1, 1); in fill_buffer_shader() 215 radv_meta_restore(&saved_state, cmd_buffer); in fill_buffer_shader() 219 copy_buffer_shader(struct radv_cmd_buffer *cmd_buffer, uint64_t src_va, uint64_t dst_va, uint64_t s… in copy_buffer_shader() argument 221 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer); in copy_buffer_shader() [all …]
|