@@ -135,7 +135,8 @@ struct drm_i915_gem_object {
unsigned long flags;
#define I915_BO_ALLOC_CONTIGUOUS (1<<0)
-#define I915_BO_ALLOC_FLAGS (I915_BO_ALLOC_CONTIGUOUS)
+#define I915_BO_ALLOC_VOLATILE (1<<1)
+#define I915_BO_ALLOC_FLAGS (I915_BO_ALLOC_CONTIGUOUS | I915_BO_ALLOC_VOLATILE)
/*
* Is the object to be mapped as read-only to the GPU
@@ -82,6 +82,9 @@ i915_memory_region_put_pages_buddy(struct drm_i915_gem_object *obj,
memory_region_free_pages(obj, pages);
mutex_unlock(&obj->memory_region->mm_lock);
+ if (obj->flags & I915_BO_ALLOC_VOLATILE)
+ obj->mm.madv = I915_MADV_WILLNEED;
+
obj->mm.dirty = false;
}
@@ -182,6 +185,9 @@ i915_memory_region_get_pages_buddy(struct drm_i915_gem_object *obj)
i915_sg_trim(st);
+ if (flags & I915_BO_ALLOC_VOLATILE)
+ obj->mm.madv = I915_MADV_DONTNEED;
+
__i915_gem_object_set_pages(obj, st, sg_page_sizes);
return 0;
@@ -243,7 +249,12 @@ i915_gem_object_create_region(struct intel_memory_region *mem,
obj->flags = flags;
mutex_lock(&mem->obj_lock);
- list_add(&obj->region_link, &mem->objects);
+
+ if (flags & I915_BO_ALLOC_VOLATILE)
+ list_add(&obj->region_link, &mem->purgeable);
+ else
+ list_add(&obj->region_link, &mem->objects);
+
mutex_unlock(&mem->obj_lock);
return obj;
@@ -289,12 +289,68 @@ static int igt_mock_continuous(void *arg)
return err;
}
+static int igt_mock_volatile(void *arg)
+{
+ struct intel_memory_region *mem = arg;
+ struct drm_i915_gem_object *obj;
+ int err;
+
+ obj = i915_gem_object_create_region(mem, PAGE_SIZE, 0);
+ if (IS_ERR(obj))
+ return PTR_ERR(obj);
+
+ err = i915_gem_object_pin_pages(obj);
+ if (err)
+ goto err_put;
+
+ i915_gem_object_unpin_pages(obj);
+
+ err = i915_memory_region_evict(mem, PAGE_SIZE);
+ if (err != -ENOSPC) {
+ pr_err("shrink memory region\n");
+ goto err_put;
+ }
+
+ i915_gem_object_put(obj);
+
+ obj = i915_gem_object_create_region(mem, PAGE_SIZE, I915_BO_ALLOC_VOLATILE);
+ if (IS_ERR(obj))
+ return PTR_ERR(obj);
+
+ if (!(obj->flags & I915_BO_ALLOC_VOLATILE)) {
+ pr_err("missing flags\n");
+ goto err_put;
+ }
+
+ err = i915_gem_object_pin_pages(obj);
+ if (err)
+ goto err_put;
+
+ i915_gem_object_unpin_pages(obj);
+
+ err = i915_memory_region_evict(mem, PAGE_SIZE);
+ if (err) {
+ pr_err("failed to shrink memory\n");
+ goto err_put;
+ }
+
+ if (i915_gem_object_has_pages(obj)) {
+ pr_err("object pages not discarded\n");
+ err = -EINVAL;
+ }
+
+err_put:
+ i915_gem_object_put(obj);
+ return err;
+}
+
int intel_memory_region_mock_selftests(void)
{
static const struct i915_subtest tests[] = {
SUBTEST(igt_mock_fill),
SUBTEST(igt_mock_evict),
SUBTEST(igt_mock_continuous),
+ SUBTEST(igt_mock_volatile),
};
struct intel_memory_region *mem;
struct drm_i915_private *i915;