Lines Matching +full:reset +full:- +full:bps

43 		vf2pf_info->ucode_info[ucode].id = ucode; \
44 vf2pf_info->ucode_info[ucode].version = ver; \
60 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting()
61 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting()
62 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting()
63 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting()
64 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting()
65 adev->enable_virtual_display = true; in amdgpu_virt_init_setting()
67 ddev->driver_features &= ~DRIVER_ATOMIC; in amdgpu_virt_init_setting()
68 adev->cg_flags = 0; in amdgpu_virt_init_setting()
69 adev->pg_flags = 0; in amdgpu_virt_init_setting()
72 if (amdgpu_num_kcq == -1) in amdgpu_virt_init_setting()
77 * amdgpu_virt_request_full_gpu() - request full gpu access
85 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu()
88 if (virt->ops && virt->ops->req_full_gpu) { in amdgpu_virt_request_full_gpu()
89 r = virt->ops->req_full_gpu(adev, init); in amdgpu_virt_request_full_gpu()
91 adev->no_hw_access = true; in amdgpu_virt_request_full_gpu()
95 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_request_full_gpu()
102 * amdgpu_virt_release_full_gpu() - release full gpu access
110 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu()
113 if (virt->ops && virt->ops->rel_full_gpu) { in amdgpu_virt_release_full_gpu()
114 r = virt->ops->rel_full_gpu(adev, init); in amdgpu_virt_release_full_gpu()
118 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_release_full_gpu()
124 * amdgpu_virt_reset_gpu() - reset gpu
126 * Send reset command to GPU hypervisor to reset GPU that VM is using
127 * Return: Zero if reset success, otherwise will return error.
131 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_reset_gpu()
134 if (virt->ops && virt->ops->reset_gpu) { in amdgpu_virt_reset_gpu()
135 r = virt->ops->reset_gpu(adev); in amdgpu_virt_reset_gpu()
139 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_reset_gpu()
147 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_init_data()
149 if (virt->ops && virt->ops->req_init_data) in amdgpu_virt_request_init_data()
150 virt->ops->req_init_data(adev); in amdgpu_virt_request_init_data()
152 if (adev->virt.req_init_data_ver > 0) in amdgpu_virt_request_init_data()
159 * amdgpu_virt_ready_to_reset() - send ready to reset to host
161 * Send ready to reset message to GPU hypervisor to signal we have stopped GPU
166 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ready_to_reset()
168 if (virt->ops && virt->ops->reset_gpu) in amdgpu_virt_ready_to_reset()
169 virt->ops->ready_to_reset(adev); in amdgpu_virt_ready_to_reset()
173 * amdgpu_virt_wait_reset() - wait for reset gpu completed
175 * Wait for GPU reset completed.
176 * Return: Zero if reset success, otherwise will return error.
180 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_wait_reset()
182 if (!virt->ops || !virt->ops->wait_reset) in amdgpu_virt_wait_reset()
183 return -EINVAL; in amdgpu_virt_wait_reset()
185 return virt->ops->wait_reset(adev); in amdgpu_virt_wait_reset()
189 * amdgpu_virt_alloc_mm_table() - alloc memory for mm table
198 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
204 &adev->virt.mm_table.bo, in amdgpu_virt_alloc_mm_table()
205 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
206 (void *)&adev->virt.mm_table.cpu_addr); in amdgpu_virt_alloc_mm_table()
212 memset((void *)adev->virt.mm_table.cpu_addr, 0, PAGE_SIZE); in amdgpu_virt_alloc_mm_table()
214 adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
215 adev->virt.mm_table.cpu_addr); in amdgpu_virt_alloc_mm_table()
220 * amdgpu_virt_free_mm_table() - free mm table memory
226 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
229 amdgpu_bo_free_kernel(&adev->virt.mm_table.bo, in amdgpu_virt_free_mm_table()
230 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_free_mm_table()
231 (void *)&adev->virt.mm_table.cpu_addr); in amdgpu_virt_free_mm_table()
232 adev->virt.mm_table.gpu_addr = 0; in amdgpu_virt_free_mm_table()
236 * amdgpu_virt_rcvd_ras_interrupt() - receive ras interrupt
243 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_rcvd_ras_interrupt()
245 if (!virt->ops || !virt->ops->rcvd_ras_intr) in amdgpu_virt_rcvd_ras_interrupt()
248 return virt->ops->rcvd_ras_intr(adev); in amdgpu_virt_rcvd_ras_interrupt()
268 ret -= *(pos + i); in amd_sriov_msg_checksum()
274 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_init_ras_err_handler_data()
275 struct amdgpu_virt_ras_err_handler_data **data = &virt->virt_eh_data; in amdgpu_virt_init_ras_err_handler_data()
280 void *bps = NULL; in amdgpu_virt_init_ras_err_handler_data() local
287 bps = kmalloc_array(align_space, sizeof(*(*data)->bps), GFP_KERNEL); in amdgpu_virt_init_ras_err_handler_data()
288 if (!bps) in amdgpu_virt_init_ras_err_handler_data()
291 bps_bo = kmalloc_array(align_space, sizeof(*(*data)->bps_bo), GFP_KERNEL); in amdgpu_virt_init_ras_err_handler_data()
295 (*data)->bps = bps; in amdgpu_virt_init_ras_err_handler_data()
296 (*data)->bps_bo = bps_bo; in amdgpu_virt_init_ras_err_handler_data()
297 (*data)->count = 0; in amdgpu_virt_init_ras_err_handler_data()
298 (*data)->last_reserved = 0; in amdgpu_virt_init_ras_err_handler_data()
300 virt->ras_init_done = true; in amdgpu_virt_init_ras_err_handler_data()
305 kfree(bps); in amdgpu_virt_init_ras_err_handler_data()
309 return -ENOMEM; in amdgpu_virt_init_ras_err_handler_data()
314 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_release_bp()
315 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_release_bp()
322 for (i = data->last_reserved - 1; i >= 0; i--) { in amdgpu_virt_ras_release_bp()
323 bo = data->bps_bo[i]; in amdgpu_virt_ras_release_bp()
326 data->bps_bo[i] = bo; in amdgpu_virt_ras_release_bp()
328 data->last_reserved = i; in amdgpu_virt_ras_release_bp()
334 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_ras_err_handler_data()
335 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_release_ras_err_handler_data()
337 virt->ras_init_done = false; in amdgpu_virt_release_ras_err_handler_data()
344 kfree(data->bps); in amdgpu_virt_release_ras_err_handler_data()
345 kfree(data->bps_bo); in amdgpu_virt_release_ras_err_handler_data()
347 virt->virt_eh_data = NULL; in amdgpu_virt_release_ras_err_handler_data()
351 struct eeprom_table_record *bps, int pages) in amdgpu_virt_ras_add_bps() argument
353 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_add_bps()
354 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_add_bps()
359 memcpy(&data->bps[data->count], bps, pages * sizeof(*data->bps)); in amdgpu_virt_ras_add_bps()
360 data->count += pages; in amdgpu_virt_ras_add_bps()
365 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_reserve_bps()
366 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_reserve_bps()
367 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_virt_ras_reserve_bps()
368 struct ttm_resource_manager *man = &mgr->manager; in amdgpu_virt_ras_reserve_bps()
376 for (i = data->last_reserved; i < data->count; i++) { in amdgpu_virt_ras_reserve_bps()
377 bp = data->bps[i].retired_page; in amdgpu_virt_ras_reserve_bps()
385 amdgpu_vram_mgr_reserve_range(&adev->mman.vram_mgr, in amdgpu_virt_ras_reserve_bps()
388 data->bps_bo[i] = NULL; in amdgpu_virt_ras_reserve_bps()
394 data->bps_bo[i] = bo; in amdgpu_virt_ras_reserve_bps()
396 data->last_reserved = i + 1; in amdgpu_virt_ras_reserve_bps()
404 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_check_bad_page()
405 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_check_bad_page()
411 for (i = 0; i < data->count; i++) in amdgpu_virt_ras_check_bad_page()
412 if (retired_page == data->bps[i].retired_page) in amdgpu_virt_ras_check_bad_page()
426 if (adev->mman.fw_vram_usage_va) in amdgpu_virt_add_bad_page()
427 vram_usage_va = adev->mman.fw_vram_usage_va; in amdgpu_virt_add_bad_page()
429 vram_usage_va = adev->mman.drv_vram_usage_va; in amdgpu_virt_add_bad_page()
452 struct amd_sriov_msg_pf2vf_info_header *pf2vf_info = adev->virt.fw_reserve.p_pf2vf; in amdgpu_virt_read_pf2vf_data()
459 if (adev->virt.fw_reserve.p_pf2vf == NULL) in amdgpu_virt_read_pf2vf_data()
460 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
462 if (pf2vf_info->size > 1024) { in amdgpu_virt_read_pf2vf_data()
463 dev_err(adev->dev, "invalid pf2vf message size: 0x%x\n", pf2vf_info->size); in amdgpu_virt_read_pf2vf_data()
464 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
467 switch (pf2vf_info->version) { in amdgpu_virt_read_pf2vf_data()
469 checksum = ((struct amdgim_pf2vf_info_v1 *)pf2vf_info)->checksum; in amdgpu_virt_read_pf2vf_data()
471 adev->virt.fw_reserve.p_pf2vf, pf2vf_info->size, in amdgpu_virt_read_pf2vf_data()
472 adev->virt.fw_reserve.checksum_key, checksum); in amdgpu_virt_read_pf2vf_data()
474 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
477 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
480 adev->virt.gim_feature = in amdgpu_virt_read_pf2vf_data()
481 ((struct amdgim_pf2vf_info_v1 *)pf2vf_info)->feature_flags; in amdgpu_virt_read_pf2vf_data()
485 checksum = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->checksum; in amdgpu_virt_read_pf2vf_data()
487 adev->virt.fw_reserve.p_pf2vf, pf2vf_info->size, in amdgpu_virt_read_pf2vf_data()
490 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
493 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
496 adev->virt.vf2pf_update_interval_ms = in amdgpu_virt_read_pf2vf_data()
497 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->vf2pf_update_interval_ms; in amdgpu_virt_read_pf2vf_data()
498 adev->virt.gim_feature = in amdgpu_virt_read_pf2vf_data()
499 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->feature_flags.all; in amdgpu_virt_read_pf2vf_data()
500 adev->virt.reg_access = in amdgpu_virt_read_pf2vf_data()
501 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->reg_access_flags.all; in amdgpu_virt_read_pf2vf_data()
503 adev->virt.decode_max_dimension_pixels = 0; in amdgpu_virt_read_pf2vf_data()
504 adev->virt.decode_max_frame_pixels = 0; in amdgpu_virt_read_pf2vf_data()
505 adev->virt.encode_max_dimension_pixels = 0; in amdgpu_virt_read_pf2vf_data()
506 adev->virt.encode_max_frame_pixels = 0; in amdgpu_virt_read_pf2vf_data()
507 adev->virt.is_mm_bw_enabled = false; in amdgpu_virt_read_pf2vf_data()
509 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].decode_max_dimension_pi… in amdgpu_virt_read_pf2vf_data()
510 adev->virt.decode_max_dimension_pixels = max(tmp, adev->virt.decode_max_dimension_pixels); in amdgpu_virt_read_pf2vf_data()
512 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].decode_max_frame_pixels; in amdgpu_virt_read_pf2vf_data()
513 adev->virt.decode_max_frame_pixels = max(tmp, adev->virt.decode_max_frame_pixels); in amdgpu_virt_read_pf2vf_data()
515 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].encode_max_dimension_pi… in amdgpu_virt_read_pf2vf_data()
516 adev->virt.encode_max_dimension_pixels = max(tmp, adev->virt.encode_max_dimension_pixels); in amdgpu_virt_read_pf2vf_data()
518 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].encode_max_frame_pixels; in amdgpu_virt_read_pf2vf_data()
519 adev->virt.encode_max_frame_pixels = max(tmp, adev->virt.encode_max_frame_pixels); in amdgpu_virt_read_pf2vf_data()
521 if ((adev->virt.decode_max_dimension_pixels > 0) || (adev->virt.encode_max_dimension_pixels > 0)) in amdgpu_virt_read_pf2vf_data()
522 adev->virt.is_mm_bw_enabled = true; in amdgpu_virt_read_pf2vf_data()
524 adev->unique_id = in amdgpu_virt_read_pf2vf_data()
525 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->uuid; in amdgpu_virt_read_pf2vf_data()
526 adev->virt.ras_en_caps.all = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->ras_en_caps.all; in amdgpu_virt_read_pf2vf_data()
527 adev->virt.ras_telemetry_en_caps.all = in amdgpu_virt_read_pf2vf_data()
528 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->ras_telemetry_en_caps.all; in amdgpu_virt_read_pf2vf_data()
531 dev_err(adev->dev, "invalid pf2vf version: 0x%x\n", pf2vf_info->version); in amdgpu_virt_read_pf2vf_data()
532 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
536 if (adev->virt.vf2pf_update_interval_ms < 200 || adev->virt.vf2pf_update_interval_ms > 10000) in amdgpu_virt_read_pf2vf_data()
537 adev->virt.vf2pf_update_interval_ms = 2000; in amdgpu_virt_read_pf2vf_data()
545 vf2pf_info = (struct amd_sriov_msg_vf2pf_info *) adev->virt.fw_reserve.p_vf2pf; in amdgpu_virt_populate_vf2pf_ucode_info()
547 if (adev->virt.fw_reserve.p_vf2pf == NULL) in amdgpu_virt_populate_vf2pf_ucode_info()
550 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_VCE, adev->vce.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
551 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_UVD, adev->uvd.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
552 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MC, adev->gmc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
553 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_ME, adev->gfx.me_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
554 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_PFP, adev->gfx.pfp_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
555 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_CE, adev->gfx.ce_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
556 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC, adev->gfx.rlc_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
557 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLC, adev->gfx.rlc_srlc_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
558 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLG, adev->gfx.rlc_srlg_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
559 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLS, adev->gfx.rlc_srls_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
560 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MEC, adev->gfx.mec_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
561 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MEC2, adev->gfx.mec2_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
562 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SOS, adev->psp.sos.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
564 adev->psp.asd_context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
566 adev->psp.ras_context.context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
568 adev->psp.xgmi_context.context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
569 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SMC, adev->pm.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
570 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA, adev->sdma.instance[0].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
571 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA2, adev->sdma.instance[1].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
572 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_VCN, adev->vcn.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
573 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_DMCU, adev->dm.dmcu_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
580 vf2pf_info = (struct amd_sriov_msg_vf2pf_info *) adev->virt.fw_reserve.p_vf2pf; in amdgpu_virt_write_vf2pf_data()
582 if (adev->virt.fw_reserve.p_vf2pf == NULL) in amdgpu_virt_write_vf2pf_data()
583 return -EINVAL; in amdgpu_virt_write_vf2pf_data()
587 vf2pf_info->header.size = sizeof(struct amd_sriov_msg_vf2pf_info); in amdgpu_virt_write_vf2pf_data()
588 vf2pf_info->header.version = AMD_SRIOV_MSG_FW_VRAM_VF2PF_VER; in amdgpu_virt_write_vf2pf_data()
591 if (THIS_MODULE->version != NULL) in amdgpu_virt_write_vf2pf_data()
592 strcpy(vf2pf_info->driver_version, THIS_MODULE->version); in amdgpu_virt_write_vf2pf_data()
595 strcpy(vf2pf_info->driver_version, "N/A"); in amdgpu_virt_write_vf2pf_data()
597 vf2pf_info->pf2vf_version_required = 0; // no requirement, guest understands all in amdgpu_virt_write_vf2pf_data()
598 vf2pf_info->driver_cert = 0; in amdgpu_virt_write_vf2pf_data()
599 vf2pf_info->os_info.all = 0; in amdgpu_virt_write_vf2pf_data()
601 vf2pf_info->fb_usage = in amdgpu_virt_write_vf2pf_data()
602 ttm_resource_manager_usage(&adev->mman.vram_mgr.manager) >> 20; in amdgpu_virt_write_vf2pf_data()
603 vf2pf_info->fb_vis_usage = in amdgpu_virt_write_vf2pf_data()
604 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr) >> 20; in amdgpu_virt_write_vf2pf_data()
605 vf2pf_info->fb_size = adev->gmc.real_vram_size >> 20; in amdgpu_virt_write_vf2pf_data()
606 vf2pf_info->fb_vis_size = adev->gmc.visible_vram_size >> 20; in amdgpu_virt_write_vf2pf_data()
611 vf2pf_info->gfx_usage = 0; in amdgpu_virt_write_vf2pf_data()
612 vf2pf_info->compute_usage = 0; in amdgpu_virt_write_vf2pf_data()
613 vf2pf_info->encode_usage = 0; in amdgpu_virt_write_vf2pf_data()
614 vf2pf_info->decode_usage = 0; in amdgpu_virt_write_vf2pf_data()
616 vf2pf_info->dummy_page_addr = (uint64_t)adev->dummy_page_addr; in amdgpu_virt_write_vf2pf_data()
617 vf2pf_info->mes_info_addr = (uint64_t)adev->mes.resource_1_gpu_addr; in amdgpu_virt_write_vf2pf_data()
619 if (adev->mes.resource_1) { in amdgpu_virt_write_vf2pf_data()
620 vf2pf_info->mes_info_size = adev->mes.resource_1->tbo.base.size; in amdgpu_virt_write_vf2pf_data()
622 vf2pf_info->checksum = in amdgpu_virt_write_vf2pf_data()
636 adev->virt.vf2pf_update_retry_cnt++; in amdgpu_virt_update_vf2pf_work_item()
639 adev->virt.vf2pf_update_retry_cnt >= AMDGPU_VF2PF_UPDATE_MAX_RETRY_LIMIT) && in amdgpu_virt_update_vf2pf_work_item()
643 if (amdgpu_reset_domain_schedule(adev->reset_domain, in amdgpu_virt_update_vf2pf_work_item()
644 &adev->kfd.reset_work)) in amdgpu_virt_update_vf2pf_work_item()
647 dev_err(adev->dev, "Failed to queue work! at %s", __func__); in amdgpu_virt_update_vf2pf_work_item()
653 adev->virt.vf2pf_update_retry_cnt = 0; in amdgpu_virt_update_vf2pf_work_item()
657 schedule_delayed_work(&(adev->virt.vf2pf_work), adev->virt.vf2pf_update_interval_ms); in amdgpu_virt_update_vf2pf_work_item()
662 if (adev->virt.vf2pf_update_interval_ms != 0) { in amdgpu_virt_fini_data_exchange()
664 cancel_delayed_work_sync(&adev->virt.vf2pf_work); in amdgpu_virt_fini_data_exchange()
665 adev->virt.vf2pf_update_interval_ms = 0; in amdgpu_virt_fini_data_exchange()
671 adev->virt.fw_reserve.p_pf2vf = NULL; in amdgpu_virt_init_data_exchange()
672 adev->virt.fw_reserve.p_vf2pf = NULL; in amdgpu_virt_init_data_exchange()
673 adev->virt.vf2pf_update_interval_ms = 0; in amdgpu_virt_init_data_exchange()
674 adev->virt.vf2pf_update_retry_cnt = 0; in amdgpu_virt_init_data_exchange()
676 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
678 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
679 /* go through this logic in ip_init and reset to init workqueue*/ in amdgpu_virt_init_data_exchange()
682 INIT_DELAYED_WORK(&adev->virt.vf2pf_work, amdgpu_virt_update_vf2pf_work_item); in amdgpu_virt_init_data_exchange()
683 …schedule_delayed_work(&(adev->virt.vf2pf_work), msecs_to_jiffies(adev->virt.vf2pf_update_interval_… in amdgpu_virt_init_data_exchange()
684 } else if (adev->bios != NULL) { in amdgpu_virt_init_data_exchange()
686 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_init_data_exchange()
688 (adev->bios + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_init_data_exchange()
701 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
702 if (adev->mman.fw_vram_usage_va) { in amdgpu_virt_exchange_data()
703 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_exchange_data()
705 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
706 adev->virt.fw_reserve.p_vf2pf = in amdgpu_virt_exchange_data()
708 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
709 adev->virt.fw_reserve.ras_telemetry = in amdgpu_virt_exchange_data()
710 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_RAS_TELEMETRY_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
711 } else if (adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
712 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_exchange_data()
714 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
715 adev->virt.fw_reserve.p_vf2pf = in amdgpu_virt_exchange_data()
717 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
718 adev->virt.fw_reserve.ras_telemetry = in amdgpu_virt_exchange_data()
719 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_RAS_TELEMETRY_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
726 if (adev->virt.fw_reserve.p_pf2vf->version == 2) { in amdgpu_virt_exchange_data()
727 pf2vf_v2 = (struct amd_sriov_msg_pf2vf_info *)adev->virt.fw_reserve.p_pf2vf; in amdgpu_virt_exchange_data()
729 bp_block_offset = ((uint64_t)pf2vf_v2->bp_block_offset_low & 0xFFFFFFFF) | in amdgpu_virt_exchange_data()
730 ((((uint64_t)pf2vf_v2->bp_block_offset_high) << 32) & 0xFFFFFFFF00000000); in amdgpu_virt_exchange_data()
731 bp_block_size = pf2vf_v2->bp_block_size; in amdgpu_virt_exchange_data()
733 if (bp_block_size && !adev->virt.ras_init_done) in amdgpu_virt_exchange_data()
736 if (adev->virt.ras_init_done) in amdgpu_virt_exchange_data()
746 switch (adev->asic_type) { in amdgpu_detect_virtualization()
767 adev->virt.caps |= AMDGPU_SRIOV_CAPS_IS_VF; in amdgpu_detect_virtualization()
770 adev->virt.caps |= AMDGPU_SRIOV_CAPS_ENABLE_IOV; in amdgpu_detect_virtualization()
775 adev->virt.caps |= AMDGPU_PASSTHROUGH_MODE; in amdgpu_detect_virtualization()
780 switch (adev->asic_type) { in amdgpu_detect_virtualization()
808 DRM_ERROR("Unknown asic type: %d!\n", adev->asic_type); in amdgpu_detect_virtualization()
831 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_enable_access_debugfs()
833 return -EPERM; in amdgpu_virt_enable_access_debugfs()
841 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_disable_access_debugfs()
870 /* force set to GFXOFF state after reset, in amdgpu_virt_post_reset()
873 adev->gfx.is_poweron = false; in amdgpu_virt_post_reset()
876 adev->mes.ring[0].sched.ready = false; in amdgpu_virt_post_reset()
950 if (!adev->virt.is_mm_bw_enabled) in amdgpu_virt_update_sriov_video_codec()
955 encode[i].max_width = adev->virt.encode_max_dimension_pixels; in amdgpu_virt_update_sriov_video_codec()
956 encode[i].max_pixels_per_frame = adev->virt.encode_max_frame_pixels; in amdgpu_virt_update_sriov_video_codec()
966 decode[i].max_width = adev->virt.decode_max_dimension_pixels; in amdgpu_virt_update_sriov_video_codec()
967 decode[i].max_pixels_per_frame = adev->virt.decode_max_frame_pixels; in amdgpu_virt_update_sriov_video_codec()
1022 if (!adev->gfx.rlc.rlcg_reg_access_supported) { in amdgpu_virt_rlcg_reg_rw()
1023 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1028 if (adev->gfx.xcc_mask && (((1 << xcc_id) & adev->gfx.xcc_mask) == 0)) { in amdgpu_virt_rlcg_reg_rw()
1029 dev_err(adev->dev, "invalid xcc\n"); in amdgpu_virt_rlcg_reg_rw()
1036 reg_access_ctrl = &adev->gfx.rlc.reg_access_ctrl[xcc_id]; in amdgpu_virt_rlcg_reg_rw()
1037 scratch_reg0 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg0; in amdgpu_virt_rlcg_reg_rw()
1038 scratch_reg1 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg1; in amdgpu_virt_rlcg_reg_rw()
1039 scratch_reg2 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg2; in amdgpu_virt_rlcg_reg_rw()
1040 scratch_reg3 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg3; in amdgpu_virt_rlcg_reg_rw()
1042 spin_lock_irqsave(&adev->virt.rlcg_reg_lock, flags); in amdgpu_virt_rlcg_reg_rw()
1044 if (reg_access_ctrl->spare_int) in amdgpu_virt_rlcg_reg_rw()
1045 spare_int = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->spare_int; in amdgpu_virt_rlcg_reg_rw()
1047 if (offset == reg_access_ctrl->grbm_cntl) { in amdgpu_virt_rlcg_reg_rw()
1051 writel(v, ((void __iomem *)adev->rmmio) + (offset * 4)); in amdgpu_virt_rlcg_reg_rw()
1052 } else if (offset == reg_access_ctrl->grbm_idx) { in amdgpu_virt_rlcg_reg_rw()
1056 writel(v, ((void __iomem *)adev->rmmio) + (offset * 4)); in amdgpu_virt_rlcg_reg_rw()
1066 if (reg_access_ctrl->spare_int) in amdgpu_virt_rlcg_reg_rw()
1080 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1083 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1086 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1089 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1093 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1101 spin_unlock_irqrestore(&adev->virt.rlcg_reg_lock, flags); in amdgpu_virt_rlcg_reg_rw()
1163 if (adev->virt.ras_en_caps.bits.block_umc) in amdgpu_virt_get_ras_capability()
1164 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__UMC); in amdgpu_virt_get_ras_capability()
1165 if (adev->virt.ras_en_caps.bits.block_sdma) in amdgpu_virt_get_ras_capability()
1166 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SDMA); in amdgpu_virt_get_ras_capability()
1167 if (adev->virt.ras_en_caps.bits.block_gfx) in amdgpu_virt_get_ras_capability()
1168 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__GFX); in amdgpu_virt_get_ras_capability()
1169 if (adev->virt.ras_en_caps.bits.block_mmhub) in amdgpu_virt_get_ras_capability()
1170 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MMHUB); in amdgpu_virt_get_ras_capability()
1171 if (adev->virt.ras_en_caps.bits.block_athub) in amdgpu_virt_get_ras_capability()
1172 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__ATHUB); in amdgpu_virt_get_ras_capability()
1173 if (adev->virt.ras_en_caps.bits.block_pcie_bif) in amdgpu_virt_get_ras_capability()
1174 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__PCIE_BIF); in amdgpu_virt_get_ras_capability()
1175 if (adev->virt.ras_en_caps.bits.block_hdp) in amdgpu_virt_get_ras_capability()
1176 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__HDP); in amdgpu_virt_get_ras_capability()
1177 if (adev->virt.ras_en_caps.bits.block_xgmi_wafl) in amdgpu_virt_get_ras_capability()
1178 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__XGMI_WAFL); in amdgpu_virt_get_ras_capability()
1179 if (adev->virt.ras_en_caps.bits.block_df) in amdgpu_virt_get_ras_capability()
1180 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__DF); in amdgpu_virt_get_ras_capability()
1181 if (adev->virt.ras_en_caps.bits.block_smn) in amdgpu_virt_get_ras_capability()
1182 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SMN); in amdgpu_virt_get_ras_capability()
1183 if (adev->virt.ras_en_caps.bits.block_sem) in amdgpu_virt_get_ras_capability()
1184 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SEM); in amdgpu_virt_get_ras_capability()
1185 if (adev->virt.ras_en_caps.bits.block_mp0) in amdgpu_virt_get_ras_capability()
1186 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MP0); in amdgpu_virt_get_ras_capability()
1187 if (adev->virt.ras_en_caps.bits.block_mp1) in amdgpu_virt_get_ras_capability()
1188 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MP1); in amdgpu_virt_get_ras_capability()
1189 if (adev->virt.ras_en_caps.bits.block_fuse) in amdgpu_virt_get_ras_capability()
1190 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__FUSE); in amdgpu_virt_get_ras_capability()
1191 if (adev->virt.ras_en_caps.bits.block_mca) in amdgpu_virt_get_ras_capability()
1192 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MCA); in amdgpu_virt_get_ras_capability()
1193 if (adev->virt.ras_en_caps.bits.block_vcn) in amdgpu_virt_get_ras_capability()
1194 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__VCN); in amdgpu_virt_get_ras_capability()
1195 if (adev->virt.ras_en_caps.bits.block_jpeg) in amdgpu_virt_get_ras_capability()
1196 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__JPEG); in amdgpu_virt_get_ras_capability()
1197 if (adev->virt.ras_en_caps.bits.block_ih) in amdgpu_virt_get_ras_capability()
1198 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__IH); in amdgpu_virt_get_ras_capability()
1199 if (adev->virt.ras_en_caps.bits.block_mpio) in amdgpu_virt_get_ras_capability()
1200 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MPIO); in amdgpu_virt_get_ras_capability()
1202 if (adev->virt.ras_en_caps.bits.poison_propogation_mode) in amdgpu_virt_get_ras_capability()
1203 con->poison_supported = true; /* Poison is handled by host */ in amdgpu_virt_get_ras_capability()
1250 dev_err(adev->dev, "Unsupported SRIOV RAS telemetry block 0x%x\n", block); in amdgpu_ras_block_to_sriov()
1261 checksum = host_telemetry->header.checksum; in amdgpu_virt_cache_host_error_counts()
1262 used_size = host_telemetry->header.used_size; in amdgpu_virt_cache_host_error_counts()
1267 tmp = kmemdup(&host_telemetry->body.error_count, used_size, GFP_KERNEL); in amdgpu_virt_cache_host_error_counts()
1269 return -ENOMEM; in amdgpu_virt_cache_host_error_counts()
1274 memcpy(&adev->virt.count_cache, tmp, in amdgpu_virt_cache_host_error_counts()
1275 min(used_size, sizeof(adev->virt.count_cache))); in amdgpu_virt_cache_host_error_counts()
1284 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_err_count_internal()
1290 if (__ratelimit(&adev->virt.ras_telemetry_rs) || force_update) { in amdgpu_virt_req_ras_err_count_internal()
1291 if (!virt->ops->req_ras_err_count(adev)) in amdgpu_virt_req_ras_err_count_internal()
1293 adev->virt.fw_reserve.ras_telemetry); in amdgpu_virt_req_ras_err_count_internal()
1309 return -EOPNOTSUPP; in amdgpu_virt_req_ras_err_count()
1311 /* Host Access may be lost during reset, just return last cached data. */ in amdgpu_virt_req_ras_err_count()
1312 if (down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_virt_req_ras_err_count()
1314 up_read(&adev->reset_domain->sem); in amdgpu_virt_req_ras_err_count()
1317 err_data->ue_count = adev->virt.count_cache.block[sriov_block].ue_count; in amdgpu_virt_req_ras_err_count()
1318 err_data->ce_count = adev->virt.count_cache.block[sriov_block].ce_count; in amdgpu_virt_req_ras_err_count()
1319 err_data->de_count = adev->virt.count_cache.block[sriov_block].de_count; in amdgpu_virt_req_ras_err_count()