Lines Matching full:va

118 #define to_pvr_vm_gpuva(va) container_of_const(va, struct pvr_vm_gpuva, base)  argument
147 * @new_va: Prealloced VA mapping object (init in callback).
153 * @prev_va: Prealloced VA mapping object (init in callback).
160 * @next_va: Prealloced VA mapping object (init in callback).
354 if ((op->map.gem.offset | op->map.va.range) & ~PVR_DEVICE_PAGE_MASK) in pvr_vm_gpuva_map()
357 err = pvr_mmu_map(ctx->mmu_op_ctx, op->map.va.range, pvr_gem->flags, in pvr_vm_gpuva_map()
358 op->map.va.addr); in pvr_vm_gpuva_map()
386 int err = pvr_mmu_unmap(ctx->mmu_op_ctx, op->unmap.va->va.addr, in pvr_vm_gpuva_unmap()
387 op->unmap.va->va.range); in pvr_vm_gpuva_unmap()
393 drm_gpuva_unlink(op->unmap.va); in pvr_vm_gpuva_unmap()
394 kfree(to_pvr_vm_gpuva(op->unmap.va)); in pvr_vm_gpuva_unmap()
441 drm_gpuva_unlink(op->remap.unmap->va); in pvr_vm_gpuva_remap()
442 kfree(to_pvr_vm_gpuva(op->remap.unmap->va)); in pvr_vm_gpuva_remap()
853 struct drm_gpuva *va; in pvr_vm_unmap() local
858 va = drm_gpuva_find(&vm_ctx->gpuvm_mgr, device_addr, size); in pvr_vm_unmap()
859 if (va) { in pvr_vm_unmap()
860 pvr_obj = gem_to_pvr_gem(va->gem.obj); in pvr_vm_unmap()
862 va->va.addr, va->va.range); in pvr_vm_unmap()
886 struct drm_gpuva *va; in pvr_vm_unmap_all() local
888 va = drm_gpuva_find_first(&vm_ctx->gpuvm_mgr, in pvr_vm_unmap_all()
891 if (!va) in pvr_vm_unmap_all()
894 pvr_obj = gem_to_pvr_gem(va->gem.obj); in pvr_vm_unmap_all()
897 va->va.addr, va->va.range)); in pvr_vm_unmap_all()
1150 struct drm_gpuva *va; in pvr_vm_find_gem_object() local
1154 va = drm_gpuva_find_first(&vm_ctx->gpuvm_mgr, device_addr, 1); in pvr_vm_find_gem_object()
1155 if (!va) in pvr_vm_find_gem_object()
1158 pvr_obj = gem_to_pvr_gem(va->gem.obj); in pvr_vm_find_gem_object()
1162 *mapped_offset_out = va->gem.offset; in pvr_vm_find_gem_object()
1164 *mapped_size_out = va->va.range; in pvr_vm_find_gem_object()