xref: /aosp_15_r20/external/mesa3d/src/nouveau/vulkan/nvk_cmd_pool.c (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1 /*
2  * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3  * SPDX-License-Identifier: MIT
4  */
5 #include "nvk_cmd_pool.h"
6 
7 #include "nvk_device.h"
8 #include "nvk_entrypoints.h"
9 #include "nvk_physical_device.h"
10 #include "nvkmd/nvkmd.h"
11 
12 static VkResult
nvk_cmd_mem_create(struct nvk_cmd_pool * pool,bool force_gart,struct nvk_cmd_mem ** mem_out)13 nvk_cmd_mem_create(struct nvk_cmd_pool *pool, bool force_gart, struct nvk_cmd_mem **mem_out)
14 {
15    struct nvk_device *dev = nvk_cmd_pool_device(pool);
16    struct nvk_cmd_mem *mem;
17    VkResult result;
18 
19    mem = vk_zalloc(&pool->vk.alloc, sizeof(*mem), 8,
20                   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
21    if (mem == NULL)
22       return vk_error(pool, VK_ERROR_OUT_OF_HOST_MEMORY);
23 
24    uint32_t flags = NVKMD_MEM_GART;
25    if (force_gart)
26       assert(flags & NVKMD_MEM_GART);
27    result = nvkmd_dev_alloc_mapped_mem(dev->nvkmd, &pool->vk.base,
28                                        NVK_CMD_MEM_SIZE, 0,
29                                        flags, NVKMD_MEM_MAP_WR,
30                                        &mem->mem);
31    if (result != VK_SUCCESS) {
32       vk_free(&pool->vk.alloc, mem);
33       return result;
34    }
35 
36    *mem_out = mem;
37    return VK_SUCCESS;
38 }
39 
40 static void
nvk_cmd_mem_destroy(struct nvk_cmd_pool * pool,struct nvk_cmd_mem * mem)41 nvk_cmd_mem_destroy(struct nvk_cmd_pool *pool, struct nvk_cmd_mem *mem)
42 {
43    nvkmd_mem_unref(mem->mem);
44    vk_free(&pool->vk.alloc, mem);
45 }
46 
47 VKAPI_ATTR VkResult VKAPI_CALL
nvk_CreateCommandPool(VkDevice _device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCmdPool)48 nvk_CreateCommandPool(VkDevice _device,
49                       const VkCommandPoolCreateInfo *pCreateInfo,
50                       const VkAllocationCallbacks *pAllocator,
51                       VkCommandPool *pCmdPool)
52 {
53    VK_FROM_HANDLE(nvk_device, device, _device);
54    struct nvk_cmd_pool *pool;
55 
56    pool = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8,
57                     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
58    if (pool == NULL)
59       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
60 
61    VkResult result = vk_command_pool_init(&device->vk, &pool->vk,
62                                           pCreateInfo, pAllocator);
63    if (result != VK_SUCCESS) {
64       vk_free2(&device->vk.alloc, pAllocator, pool);
65       return result;
66    }
67 
68    list_inithead(&pool->free_mem);
69    list_inithead(&pool->free_gart_mem);
70 
71    *pCmdPool = nvk_cmd_pool_to_handle(pool);
72 
73    return VK_SUCCESS;
74 }
75 
76 static void
nvk_cmd_pool_destroy_mem(struct nvk_cmd_pool * pool)77 nvk_cmd_pool_destroy_mem(struct nvk_cmd_pool *pool)
78 {
79    list_for_each_entry_safe(struct nvk_cmd_mem, mem, &pool->free_mem, link)
80       nvk_cmd_mem_destroy(pool, mem);
81 
82    list_inithead(&pool->free_mem);
83 
84    list_for_each_entry_safe(struct nvk_cmd_mem, mem, &pool->free_gart_mem, link)
85       nvk_cmd_mem_destroy(pool, mem);
86 
87    list_inithead(&pool->free_gart_mem);
88 }
89 
90 VkResult
nvk_cmd_pool_alloc_mem(struct nvk_cmd_pool * pool,bool force_gart,struct nvk_cmd_mem ** mem_out)91 nvk_cmd_pool_alloc_mem(struct nvk_cmd_pool *pool, bool force_gart,
92                        struct nvk_cmd_mem **mem_out)
93 {
94    struct nvk_cmd_mem *mem = NULL;
95    if (force_gart) {
96       if (!list_is_empty(&pool->free_gart_mem))
97          mem = list_first_entry(&pool->free_gart_mem, struct nvk_cmd_mem, link);
98    } else {
99       if (!list_is_empty(&pool->free_mem))
100          mem = list_first_entry(&pool->free_mem, struct nvk_cmd_mem, link);
101    }
102    if (mem) {
103       list_del(&mem->link);
104       *mem_out = mem;
105       return VK_SUCCESS;
106    }
107 
108    return nvk_cmd_mem_create(pool, force_gart, mem_out);
109 }
110 
111 void
nvk_cmd_pool_free_mem_list(struct nvk_cmd_pool * pool,struct list_head * mem_list)112 nvk_cmd_pool_free_mem_list(struct nvk_cmd_pool *pool,
113                            struct list_head *mem_list)
114 {
115    list_splicetail(mem_list, &pool->free_mem);
116    list_inithead(mem_list);
117 }
118 
119 void
nvk_cmd_pool_free_gart_mem_list(struct nvk_cmd_pool * pool,struct list_head * mem_list)120 nvk_cmd_pool_free_gart_mem_list(struct nvk_cmd_pool *pool,
121                                 struct list_head *mem_list)
122 {
123    list_splicetail(mem_list, &pool->free_gart_mem);
124    list_inithead(mem_list);
125 }
126 
127 VKAPI_ATTR void VKAPI_CALL
nvk_DestroyCommandPool(VkDevice _device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)128 nvk_DestroyCommandPool(VkDevice _device,
129                        VkCommandPool commandPool,
130                        const VkAllocationCallbacks *pAllocator)
131 {
132    VK_FROM_HANDLE(nvk_device, device, _device);
133    VK_FROM_HANDLE(nvk_cmd_pool, pool, commandPool);
134 
135    if (!pool)
136       return;
137 
138    vk_command_pool_finish(&pool->vk);
139    nvk_cmd_pool_destroy_mem(pool);
140    vk_free2(&device->vk.alloc, pAllocator, pool);
141 }
142 
143 VKAPI_ATTR void VKAPI_CALL
nvk_TrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)144 nvk_TrimCommandPool(VkDevice device,
145                     VkCommandPool commandPool,
146                     VkCommandPoolTrimFlags flags)
147 {
148    VK_FROM_HANDLE(nvk_cmd_pool, pool, commandPool);
149 
150    vk_command_pool_trim(&pool->vk, flags);
151    nvk_cmd_pool_destroy_mem(pool);
152 }
153