Lines Matching +full:pre +full:-

2  * Copyright(c) 2011-2016 Intel Corporation. All rights reserved.
176 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs()
177 struct intel_uncore *uncore = engine->uncore; in load_render_mocs()
178 u32 cnt = gvt->engine_mmio_list.mocs_mmio_offset_list_cnt; in load_render_mocs()
179 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs()
188 if (!HAS_ENGINE(engine->gt, ring_id)) in load_render_mocs()
215 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit()
216 int ring_id = req->engine->id; in restore_context_mmio_for_inhibit()
217 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit()
222 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
231 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit()
232 i915_mmio_reg_valid(mmio->reg); mmio++) { in restore_context_mmio_for_inhibit()
233 if (mmio->id != ring_id || !mmio->in_context) in restore_context_mmio_for_inhibit()
236 *cs++ = i915_mmio_reg_offset(mmio->reg); in restore_context_mmio_for_inhibit()
237 *cs++ = vgpu_vreg_t(vgpu, mmio->reg) | (mmio->mask << 16); in restore_context_mmio_for_inhibit()
239 *(cs-2), *(cs-1), vgpu->id, ring_id); in restore_context_mmio_for_inhibit()
245 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
269 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_control_for_inhibit()
296 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_l3cc_for_inhibit()
330 if (req->engine->id != RCS0) in intel_vgpu_restore_inhibit_context()
364 struct intel_uncore *uncore = engine->uncore; in handle_tlb_pending_event()
365 struct intel_vgpu_submission *s = &vgpu->submission; in handle_tlb_pending_event()
366 u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list; in handle_tlb_pending_event()
367 u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt; in handle_tlb_pending_event()
374 if (drm_WARN_ON(&engine->i915->drm, engine->id >= cnt)) in handle_tlb_pending_event()
377 if (!test_and_clear_bit(engine->id, (void *)s->tlb_handle_pending)) in handle_tlb_pending_event()
380 reg = _MMIO(regs[engine->id]); in handle_tlb_pending_event()
389 if (engine->id == RCS0 && GRAPHICS_VER(engine->i915) >= 9) in handle_tlb_pending_event()
398 engine->name); in handle_tlb_pending_event()
404 gvt_dbg_core("invalidate TLB for ring %s\n", engine->name); in handle_tlb_pending_event()
407 static void switch_mocs(struct intel_vgpu *pre, struct intel_vgpu *next, in switch_mocs() argument
417 struct intel_uncore *uncore = engine->uncore; in switch_mocs()
422 if (drm_WARN_ON(&engine->i915->drm, engine->id >= ARRAY_SIZE(regs))) in switch_mocs()
425 if (engine->id == RCS0 && GRAPHICS_VER(engine->i915) == 9) in switch_mocs()
428 if (!pre && !gen9_render_mocs.initialized) in switch_mocs()
431 offset.reg = regs[engine->id]; in switch_mocs()
433 if (pre) in switch_mocs()
434 old_v = vgpu_vreg_t(pre, offset); in switch_mocs()
436 old_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
440 new_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
448 if (engine->id == RCS0) { in switch_mocs()
451 if (pre) in switch_mocs()
452 old_v = vgpu_vreg_t(pre, l3_offset); in switch_mocs()
472 const u32 *reg_state = ce->lrc_reg_state; in is_inhibit_context()
481 static void switch_mmio(struct intel_vgpu *pre, in switch_mmio() argument
485 struct intel_uncore *uncore = engine->uncore; in switch_mmio()
490 if (GRAPHICS_VER(engine->i915) >= 9) in switch_mmio()
491 switch_mocs(pre, next, engine); in switch_mmio()
493 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio()
494 i915_mmio_reg_valid(mmio->reg); mmio++) { in switch_mmio()
495 if (mmio->id != engine->id) in switch_mmio()
502 if (GRAPHICS_VER(engine->i915) == 9 && mmio->in_context) in switch_mmio()
506 if (pre) { in switch_mmio()
507 vgpu_vreg_t(pre, mmio->reg) = in switch_mmio()
508 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
509 if (mmio->mask) in switch_mmio()
510 vgpu_vreg_t(pre, mmio->reg) &= in switch_mmio()
511 ~(mmio->mask << 16); in switch_mmio()
512 old_v = vgpu_vreg_t(pre, mmio->reg); in switch_mmio()
514 old_v = mmio->value = in switch_mmio()
515 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
520 s = &next->submission; in switch_mmio()
526 if (mmio->in_context && in switch_mmio()
527 !is_inhibit_context(s->shadow[engine->id])) in switch_mmio()
530 if (mmio->mask) in switch_mmio()
531 new_v = vgpu_vreg_t(next, mmio->reg) | in switch_mmio()
532 (mmio->mask << 16); in switch_mmio()
534 new_v = vgpu_vreg_t(next, mmio->reg); in switch_mmio()
536 if (mmio->in_context) in switch_mmio()
538 if (mmio->mask) in switch_mmio()
539 new_v = mmio->value | (mmio->mask << 16); in switch_mmio()
541 new_v = mmio->value; in switch_mmio()
544 intel_uncore_write_fw(uncore, mmio->reg, new_v); in switch_mmio()
546 trace_render_mmio(pre ? pre->id : 0, in switch_mmio()
547 next ? next->id : 0, in switch_mmio()
549 i915_mmio_reg_offset(mmio->reg), in switch_mmio()
558 * intel_gvt_switch_mmio - switch mmio context of specific engine
559 * @pre: the last vGPU that own the engine
563 * If pre is null indicates that host own the engine. If next is null
566 void intel_gvt_switch_mmio(struct intel_vgpu *pre, in intel_gvt_switch_mmio() argument
570 if (WARN(!pre && !next, "switch ring %s from host to HOST\n", in intel_gvt_switch_mmio()
571 engine->name)) in intel_gvt_switch_mmio()
574 gvt_dbg_render("switch ring %s from %s to %s\n", engine->name, in intel_gvt_switch_mmio()
575 pre ? "vGPU" : "host", next ? "vGPU" : "HOST"); in intel_gvt_switch_mmio()
582 intel_uncore_forcewake_get(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
583 switch_mmio(pre, next, engine); in intel_gvt_switch_mmio()
584 intel_uncore_forcewake_put(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
588 * intel_gvt_init_engine_mmio_context - Initiate the engine mmio list
596 if (GRAPHICS_VER(gvt->gt->i915) >= 9) { in intel_gvt_init_engine_mmio_context()
597 gvt->engine_mmio_list.mmio = gen9_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
598 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
599 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
600 gvt->engine_mmio_list.mocs_mmio_offset_list = gen9_mocs_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
601 gvt->engine_mmio_list.mocs_mmio_offset_list_cnt = ARRAY_SIZE(gen9_mocs_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
603 gvt->engine_mmio_list.mmio = gen8_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
604 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
605 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
608 for (mmio = gvt->engine_mmio_list.mmio; in intel_gvt_init_engine_mmio_context()
609 i915_mmio_reg_valid(mmio->reg); mmio++) { in intel_gvt_init_engine_mmio_context()
610 if (mmio->in_context) { in intel_gvt_init_engine_mmio_context()
611 gvt->engine_mmio_list.ctx_mmio_count[mmio->id]++; in intel_gvt_init_engine_mmio_context()
612 intel_gvt_mmio_set_sr_in_ctx(gvt, mmio->reg.reg); in intel_gvt_init_engine_mmio_context()