Lines Matching full:i915

78  * @ww: i915 gem ww acquire ctx, or NULL
79 * @i915: i915 device
104 struct drm_i915_private *i915, in i915_gem_shrink() argument
113 { &i915->mm.purge_list, ~0u }, in i915_gem_shrink()
115 &i915->mm.shrink_list, in i915_gem_shrink()
127 bool trylock_vm = !ww && intel_vm_no_concurrent_access_wa(i915); in i915_gem_shrink()
129 trace_i915_gem_shrink(i915, target, shrink); in i915_gem_shrink()
137 wakeref = intel_runtime_pm_get_if_in_use(&i915->runtime_pm); in i915_gem_shrink()
153 for_each_gt(gt, i915, i) in i915_gem_shrink()
194 spin_lock_irqsave(&i915->mm.obj_lock, flags); in i915_gem_shrink()
215 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in i915_gem_shrink()
239 spin_lock_irqsave(&i915->mm.obj_lock, flags); in i915_gem_shrink()
244 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in i915_gem_shrink()
250 intel_runtime_pm_put(&i915->runtime_pm, wakeref); in i915_gem_shrink()
262 * @i915: i915 device
274 unsigned long i915_gem_shrink_all(struct drm_i915_private *i915) in i915_gem_shrink_all() argument
279 with_intel_runtime_pm(&i915->runtime_pm, wakeref) { in i915_gem_shrink_all()
280 freed = i915_gem_shrink(NULL, i915, -1UL, NULL, in i915_gem_shrink_all()
291 struct drm_i915_private *i915 = shrinker->private_data; in i915_gem_shrinker_count() local
295 count = READ_ONCE(i915->mm.shrink_memory) >> PAGE_SHIFT; in i915_gem_shrinker_count()
296 num_objects = READ_ONCE(i915->mm.shrink_count); in i915_gem_shrinker_count()
308 i915->mm.shrinker->batch = in i915_gem_shrinker_count()
309 max((i915->mm.shrinker->batch + avg) >> 1, in i915_gem_shrinker_count()
319 struct drm_i915_private *i915 = shrinker->private_data; in i915_gem_shrinker_scan() local
324 freed = i915_gem_shrink(NULL, i915, in i915_gem_shrinker_scan()
332 with_intel_runtime_pm(&i915->runtime_pm, wakeref) { in i915_gem_shrinker_scan()
333 freed += i915_gem_shrink(NULL, i915, in i915_gem_shrinker_scan()
349 struct drm_i915_private *i915 = in i915_gem_shrinker_oom() local
357 with_intel_runtime_pm(&i915->runtime_pm, wakeref) in i915_gem_shrinker_oom()
358 freed_pages += i915_gem_shrink(NULL, i915, -1UL, NULL, in i915_gem_shrinker_oom()
368 spin_lock_irqsave(&i915->mm.obj_lock, flags); in i915_gem_shrinker_oom()
369 list_for_each_entry(obj, &i915->mm.shrink_list, mm.link) { in i915_gem_shrinker_oom()
375 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in i915_gem_shrinker_oom()
389 struct drm_i915_private *i915 = in i915_gem_shrinker_vmap() local
397 with_intel_runtime_pm(&i915->runtime_pm, wakeref) in i915_gem_shrinker_vmap()
398 freed_pages += i915_gem_shrink(NULL, i915, -1UL, NULL, in i915_gem_shrinker_vmap()
404 for_each_gt(gt, i915, i) { in i915_gem_shrinker_vmap()
429 void i915_gem_driver_register__shrinker(struct drm_i915_private *i915) in i915_gem_driver_register__shrinker() argument
431 i915->mm.shrinker = shrinker_alloc(0, "drm-i915_gem"); in i915_gem_driver_register__shrinker()
432 if (!i915->mm.shrinker) { in i915_gem_driver_register__shrinker()
433 drm_WARN_ON(&i915->drm, 1); in i915_gem_driver_register__shrinker()
435 i915->mm.shrinker->scan_objects = i915_gem_shrinker_scan; in i915_gem_driver_register__shrinker()
436 i915->mm.shrinker->count_objects = i915_gem_shrinker_count; in i915_gem_driver_register__shrinker()
437 i915->mm.shrinker->batch = 4096; in i915_gem_driver_register__shrinker()
438 i915->mm.shrinker->private_data = i915; in i915_gem_driver_register__shrinker()
440 shrinker_register(i915->mm.shrinker); in i915_gem_driver_register__shrinker()
443 i915->mm.oom_notifier.notifier_call = i915_gem_shrinker_oom; in i915_gem_driver_register__shrinker()
444 drm_WARN_ON(&i915->drm, register_oom_notifier(&i915->mm.oom_notifier)); in i915_gem_driver_register__shrinker()
446 i915->mm.vmap_notifier.notifier_call = i915_gem_shrinker_vmap; in i915_gem_driver_register__shrinker()
447 drm_WARN_ON(&i915->drm, in i915_gem_driver_register__shrinker()
448 register_vmap_purge_notifier(&i915->mm.vmap_notifier)); in i915_gem_driver_register__shrinker()
451 void i915_gem_driver_unregister__shrinker(struct drm_i915_private *i915) in i915_gem_driver_unregister__shrinker() argument
453 drm_WARN_ON(&i915->drm, in i915_gem_driver_unregister__shrinker()
454 unregister_vmap_purge_notifier(&i915->mm.vmap_notifier)); in i915_gem_driver_unregister__shrinker()
455 drm_WARN_ON(&i915->drm, in i915_gem_driver_unregister__shrinker()
456 unregister_oom_notifier(&i915->mm.oom_notifier)); in i915_gem_driver_unregister__shrinker()
457 shrinker_free(i915->mm.shrinker); in i915_gem_driver_unregister__shrinker()
460 void i915_gem_shrinker_taints_mutex(struct drm_i915_private *i915, in i915_gem_shrinker_taints_mutex() argument
486 struct drm_i915_private *i915 = obj_to_i915(obj); in i915_gem_object_make_unshrinkable() local
498 spin_lock_irqsave(&i915->mm.obj_lock, flags); in i915_gem_object_make_unshrinkable()
502 i915->mm.shrink_count--; in i915_gem_object_make_unshrinkable()
503 i915->mm.shrink_memory -= obj->base.size; in i915_gem_object_make_unshrinkable()
505 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in i915_gem_object_make_unshrinkable()
511 struct drm_i915_private *i915 = obj_to_i915(obj); in ___i915_gem_object_make_shrinkable() local
520 spin_lock_irqsave(&i915->mm.obj_lock, flags); in ___i915_gem_object_make_shrinkable()
526 i915->mm.shrink_count++; in ___i915_gem_object_make_shrinkable()
527 i915->mm.shrink_memory += obj->base.size; in ___i915_gem_object_make_shrinkable()
530 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in ___i915_gem_object_make_shrinkable()