Home | History | Annotate | Line # | Download | only in selftests
      1 /*	$NetBSD: i915_vma.c,v 1.2 2021/12/18 23:45:31 riastradh Exp $	*/
      2 
      3 /*
      4  * Copyright  2016 Intel Corporation
      5  *
      6  * Permission is hereby granted, free of charge, to any person obtaining a
      7  * copy of this software and associated documentation files (the "Software"),
      8  * to deal in the Software without restriction, including without limitation
      9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
     10  * and/or sell copies of the Software, and to permit persons to whom the
     11  * Software is furnished to do so, subject to the following conditions:
     12  *
     13  * The above copyright notice and this permission notice (including the next
     14  * paragraph) shall be included in all copies or substantial portions of the
     15  * Software.
     16  *
     17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     18  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     19  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
     20  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     21  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
     22  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
     23  * IN THE SOFTWARE.
     24  *
     25  */
     26 
     27 #include <sys/cdefs.h>
     28 __KERNEL_RCSID(0, "$NetBSD: i915_vma.c,v 1.2 2021/12/18 23:45:31 riastradh Exp $");
     29 
     30 #include <linux/prime_numbers.h>
     31 
     32 #include "gem/i915_gem_context.h"
     33 #include "gem/selftests/mock_context.h"
     34 
     35 #include "i915_scatterlist.h"
     36 #include "i915_selftest.h"
     37 
     38 #include "mock_gem_device.h"
     39 #include "mock_gtt.h"
     40 
     41 static bool assert_vma(struct i915_vma *vma,
     42 		       struct drm_i915_gem_object *obj,
     43 		       struct i915_gem_context *ctx)
     44 {
     45 	bool ok = true;
     46 
     47 	if (vma->vm != rcu_access_pointer(ctx->vm)) {
     48 		pr_err("VMA created with wrong VM\n");
     49 		ok = false;
     50 	}
     51 
     52 	if (vma->size != obj->base.size) {
     53 		pr_err("VMA created with wrong size, found %llu, expected %zu\n",
     54 		       vma->size, obj->base.size);
     55 		ok = false;
     56 	}
     57 
     58 	if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
     59 		pr_err("VMA created with wrong type [%d]\n",
     60 		       vma->ggtt_view.type);
     61 		ok = false;
     62 	}
     63 
     64 	return ok;
     65 }
     66 
     67 static struct i915_vma *
     68 checked_vma_instance(struct drm_i915_gem_object *obj,
     69 		     struct i915_address_space *vm,
     70 		     const struct i915_ggtt_view *view)
     71 {
     72 	struct i915_vma *vma;
     73 	bool ok = true;
     74 
     75 	vma = i915_vma_instance(obj, vm, view);
     76 	if (IS_ERR(vma))
     77 		return vma;
     78 
     79 	/* Manual checks, will be reinforced by i915_vma_compare! */
     80 	if (vma->vm != vm) {
     81 		pr_err("VMA's vm [%p] does not match request [%p]\n",
     82 		       vma->vm, vm);
     83 		ok = false;
     84 	}
     85 
     86 	if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) {
     87 		pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
     88 		       i915_vma_is_ggtt(vma), i915_is_ggtt(vm));
     89 		ok = false;
     90 	}
     91 
     92 	if (i915_vma_compare(vma, vm, view)) {
     93 		pr_err("i915_vma_compare failed with create parameters!\n");
     94 		return ERR_PTR(-EINVAL);
     95 	}
     96 
     97 	if (i915_vma_compare(vma, vma->vm,
     98 			     i915_vma_is_ggtt(vma) ? &vma->ggtt_view : NULL)) {
     99 		pr_err("i915_vma_compare failed with itself\n");
    100 		return ERR_PTR(-EINVAL);
    101 	}
    102 
    103 	if (!ok) {
    104 		pr_err("i915_vma_compare failed to detect the difference!\n");
    105 		return ERR_PTR(-EINVAL);
    106 	}
    107 
    108 	return vma;
    109 }
    110 
    111 static int create_vmas(struct drm_i915_private *i915,
    112 		       struct list_head *objects,
    113 		       struct list_head *contexts)
    114 {
    115 	struct drm_i915_gem_object *obj;
    116 	struct i915_gem_context *ctx;
    117 	int pinned;
    118 
    119 	list_for_each_entry(obj, objects, st_link) {
    120 		for (pinned = 0; pinned <= 1; pinned++) {
    121 			list_for_each_entry(ctx, contexts, link) {
    122 				struct i915_address_space *vm;
    123 				struct i915_vma *vma;
    124 				int err;
    125 
    126 				vm = i915_gem_context_get_vm_rcu(ctx);
    127 				vma = checked_vma_instance(obj, vm, NULL);
    128 				i915_vm_put(vm);
    129 				if (IS_ERR(vma))
    130 					return PTR_ERR(vma);
    131 
    132 				if (!assert_vma(vma, obj, ctx)) {
    133 					pr_err("VMA lookup/create failed\n");
    134 					return -EINVAL;
    135 				}
    136 
    137 				if (!pinned) {
    138 					err = i915_vma_pin(vma, 0, 0, PIN_USER);
    139 					if (err) {
    140 						pr_err("Failed to pin VMA\n");
    141 						return err;
    142 					}
    143 				} else {
    144 					i915_vma_unpin(vma);
    145 				}
    146 			}
    147 		}
    148 	}
    149 
    150 	return 0;
    151 }
    152 
    153 static int igt_vma_create(void *arg)
    154 {
    155 	struct i915_ggtt *ggtt = arg;
    156 	struct drm_i915_private *i915 = ggtt->vm.i915;
    157 	struct drm_i915_gem_object *obj, *on;
    158 	struct i915_gem_context *ctx, *cn;
    159 	unsigned long num_obj, num_ctx;
    160 	unsigned long no, nc;
    161 	IGT_TIMEOUT(end_time);
    162 	LIST_HEAD(contexts);
    163 	LIST_HEAD(objects);
    164 	int err = -ENOMEM;
    165 
    166 	/* Exercise creating many vma amonst many objections, checking the
    167 	 * vma creation and lookup routines.
    168 	 */
    169 
    170 	no = 0;
    171 	for_each_prime_number(num_obj, ULONG_MAX - 1) {
    172 		for (; no < num_obj; no++) {
    173 			obj = i915_gem_object_create_internal(i915, PAGE_SIZE);
    174 			if (IS_ERR(obj))
    175 				goto out;
    176 
    177 			list_add(&obj->st_link, &objects);
    178 		}
    179 
    180 		nc = 0;
    181 		for_each_prime_number(num_ctx, 2 * NUM_CONTEXT_TAG) {
    182 			for (; nc < num_ctx; nc++) {
    183 				ctx = mock_context(i915, "mock");
    184 				if (!ctx)
    185 					goto out;
    186 
    187 				list_move(&ctx->link, &contexts);
    188 			}
    189 
    190 			err = create_vmas(i915, &objects, &contexts);
    191 			if (err)
    192 				goto out;
    193 
    194 			if (igt_timeout(end_time,
    195 					"%s timed out: after %lu objects in %lu contexts\n",
    196 					__func__, no, nc))
    197 				goto end;
    198 		}
    199 
    200 		list_for_each_entry_safe(ctx, cn, &contexts, link) {
    201 			list_del_init(&ctx->link);
    202 			mock_context_close(ctx);
    203 		}
    204 
    205 		cond_resched();
    206 	}
    207 
    208 end:
    209 	/* Final pass to lookup all created contexts */
    210 	err = create_vmas(i915, &objects, &contexts);
    211 out:
    212 	list_for_each_entry_safe(ctx, cn, &contexts, link) {
    213 		list_del_init(&ctx->link);
    214 		mock_context_close(ctx);
    215 	}
    216 
    217 	list_for_each_entry_safe(obj, on, &objects, st_link)
    218 		i915_gem_object_put(obj);
    219 	return err;
    220 }
    221 
    222 struct pin_mode {
    223 	u64 size;
    224 	u64 flags;
    225 	bool (*assert)(const struct i915_vma *,
    226 		       const struct pin_mode *mode,
    227 		       int result);
    228 	const char *string;
    229 };
    230 
    231 static bool assert_pin_valid(const struct i915_vma *vma,
    232 			     const struct pin_mode *mode,
    233 			     int result)
    234 {
    235 	if (result)
    236 		return false;
    237 
    238 	if (i915_vma_misplaced(vma, mode->size, 0, mode->flags))
    239 		return false;
    240 
    241 	return true;
    242 }
    243 
    244 __maybe_unused
    245 static bool assert_pin_enospc(const struct i915_vma *vma,
    246 			      const struct pin_mode *mode,
    247 			      int result)
    248 {
    249 	return result == -ENOSPC;
    250 }
    251 
    252 __maybe_unused
    253 static bool assert_pin_einval(const struct i915_vma *vma,
    254 			      const struct pin_mode *mode,
    255 			      int result)
    256 {
    257 	return result == -EINVAL;
    258 }
    259 
    260 static int igt_vma_pin1(void *arg)
    261 {
    262 	struct i915_ggtt *ggtt = arg;
    263 	const struct pin_mode modes[] = {
    264 #define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
    265 #define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
    266 #define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
    267 #define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
    268 		VALID(0, PIN_GLOBAL),
    269 		VALID(0, PIN_GLOBAL | PIN_MAPPABLE),
    270 
    271 		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 4096),
    272 		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 8192),
    273 		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
    274 		VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
    275 		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
    276 
    277 		VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
    278 		INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
    279 		VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
    280 		INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
    281 		INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | round_down(U64_MAX, PAGE_SIZE)),
    282 
    283 		VALID(4096, PIN_GLOBAL),
    284 		VALID(8192, PIN_GLOBAL),
    285 		VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
    286 		VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
    287 		NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
    288 		VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
    289 		VALID(ggtt->vm.total, PIN_GLOBAL),
    290 		NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
    291 		NOSPACE(round_down(U64_MAX, PAGE_SIZE), PIN_GLOBAL),
    292 		INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
    293 		INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
    294 		INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (round_down(U64_MAX, PAGE_SIZE) - 4096)),
    295 
    296 		VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
    297 
    298 #if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
    299 		/* Misusing BIAS is a programming error (it is not controllable
    300 		 * from userspace) so when debugging is enabled, it explodes.
    301 		 * However, the tests are still quite interesting for checking
    302 		 * variable start, end and size.
    303 		 */
    304 		NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
    305 		NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
    306 		NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
    307 		NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
    308 #endif
    309 		{ },
    310 #undef NOSPACE
    311 #undef INVALID
    312 #undef __INVALID
    313 #undef VALID
    314 	}, *m;
    315 	struct drm_i915_gem_object *obj;
    316 	struct i915_vma *vma;
    317 	int err = -EINVAL;
    318 
    319 	/* Exercise all the weird and wonderful i915_vma_pin requests,
    320 	 * focusing on error handling of boundary conditions.
    321 	 */
    322 
    323 	GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
    324 
    325 	obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
    326 	if (IS_ERR(obj))
    327 		return PTR_ERR(obj);
    328 
    329 	vma = checked_vma_instance(obj, &ggtt->vm, NULL);
    330 	if (IS_ERR(vma))
    331 		goto out;
    332 
    333 	for (m = modes; m->assert; m++) {
    334 		err = i915_vma_pin(vma, m->size, 0, m->flags);
    335 		if (!m->assert(vma, m, err)) {
    336 			pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
    337 			       m->assert == assert_pin_valid ? "Failed" : "Unexpectedly succeeded",
    338 			       (int)(m - modes), m->string, m->size, m->flags,
    339 			       err);
    340 			if (!err)
    341 				i915_vma_unpin(vma);
    342 			err = -EINVAL;
    343 			goto out;
    344 		}
    345 
    346 		if (!err) {
    347 			i915_vma_unpin(vma);
    348 			err = i915_vma_unbind(vma);
    349 			if (err) {
    350 				pr_err("Failed to unbind single page from GGTT, err=%d\n", err);
    351 				goto out;
    352 			}
    353 		}
    354 
    355 		cond_resched();
    356 	}
    357 
    358 	err = 0;
    359 out:
    360 	i915_gem_object_put(obj);
    361 	return err;
    362 }
    363 
    364 static unsigned long rotated_index(const struct intel_rotation_info *r,
    365 				   unsigned int n,
    366 				   unsigned int x,
    367 				   unsigned int y)
    368 {
    369 	return (r->plane[n].stride * (r->plane[n].height - y - 1) +
    370 		r->plane[n].offset + x);
    371 }
    372 
    373 static struct scatterlist *
    374 assert_rotated(struct drm_i915_gem_object *obj,
    375 	       const struct intel_rotation_info *r, unsigned int n,
    376 	       struct scatterlist *sg)
    377 {
    378 	unsigned int x, y;
    379 
    380 	for (x = 0; x < r->plane[n].width; x++) {
    381 		for (y = 0; y < r->plane[n].height; y++) {
    382 			unsigned long src_idx;
    383 			dma_addr_t src;
    384 
    385 			if (!sg) {
    386 				pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
    387 				       n, x, y);
    388 				return ERR_PTR(-EINVAL);
    389 			}
    390 
    391 			src_idx = rotated_index(r, n, x, y);
    392 			src = i915_gem_object_get_dma_address(obj, src_idx);
    393 
    394 			if (sg_dma_len(sg) != PAGE_SIZE) {
    395 				pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
    396 				       sg_dma_len(sg), PAGE_SIZE,
    397 				       x, y, src_idx);
    398 				return ERR_PTR(-EINVAL);
    399 			}
    400 
    401 			if (sg_dma_address(sg) != src) {
    402 				pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
    403 				       x, y, src_idx);
    404 				return ERR_PTR(-EINVAL);
    405 			}
    406 
    407 			sg = sg_next(sg);
    408 		}
    409 	}
    410 
    411 	return sg;
    412 }
    413 
    414 static unsigned long remapped_index(const struct intel_remapped_info *r,
    415 				    unsigned int n,
    416 				    unsigned int x,
    417 				    unsigned int y)
    418 {
    419 	return (r->plane[n].stride * y +
    420 		r->plane[n].offset + x);
    421 }
    422 
    423 static struct scatterlist *
    424 assert_remapped(struct drm_i915_gem_object *obj,
    425 		const struct intel_remapped_info *r, unsigned int n,
    426 		struct scatterlist *sg)
    427 {
    428 	unsigned int x, y;
    429 	unsigned int left = 0;
    430 	unsigned int offset;
    431 
    432 	for (y = 0; y < r->plane[n].height; y++) {
    433 		for (x = 0; x < r->plane[n].width; x++) {
    434 			unsigned long src_idx;
    435 			dma_addr_t src;
    436 
    437 			if (!sg) {
    438 				pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
    439 				       n, x, y);
    440 				return ERR_PTR(-EINVAL);
    441 			}
    442 			if (!left) {
    443 				offset = 0;
    444 				left = sg_dma_len(sg);
    445 			}
    446 
    447 			src_idx = remapped_index(r, n, x, y);
    448 			src = i915_gem_object_get_dma_address(obj, src_idx);
    449 
    450 			if (left < PAGE_SIZE || left & (PAGE_SIZE-1)) {
    451 				pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
    452 				       sg_dma_len(sg), PAGE_SIZE,
    453 				       x, y, src_idx);
    454 				return ERR_PTR(-EINVAL);
    455 			}
    456 
    457 			if (sg_dma_address(sg) + offset != src) {
    458 				pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
    459 				       x, y, src_idx);
    460 				return ERR_PTR(-EINVAL);
    461 			}
    462 
    463 			left -= PAGE_SIZE;
    464 			offset += PAGE_SIZE;
    465 
    466 
    467 			if (!left)
    468 				sg = sg_next(sg);
    469 		}
    470 	}
    471 
    472 	return sg;
    473 }
    474 
    475 static unsigned int rotated_size(const struct intel_remapped_plane_info *a,
    476 				 const struct intel_remapped_plane_info *b)
    477 {
    478 	return a->width * a->height + b->width * b->height;
    479 }
    480 
    481 static int igt_vma_rotate_remap(void *arg)
    482 {
    483 	struct i915_ggtt *ggtt = arg;
    484 	struct i915_address_space *vm = &ggtt->vm;
    485 	struct drm_i915_gem_object *obj;
    486 	const struct intel_remapped_plane_info planes[] = {
    487 		{ .width = 1, .height = 1, .stride = 1 },
    488 		{ .width = 2, .height = 2, .stride = 2 },
    489 		{ .width = 4, .height = 4, .stride = 4 },
    490 		{ .width = 8, .height = 8, .stride = 8 },
    491 
    492 		{ .width = 3, .height = 5, .stride = 3 },
    493 		{ .width = 3, .height = 5, .stride = 4 },
    494 		{ .width = 3, .height = 5, .stride = 5 },
    495 
    496 		{ .width = 5, .height = 3, .stride = 5 },
    497 		{ .width = 5, .height = 3, .stride = 7 },
    498 		{ .width = 5, .height = 3, .stride = 9 },
    499 
    500 		{ .width = 4, .height = 6, .stride = 6 },
    501 		{ .width = 6, .height = 4, .stride = 6 },
    502 		{ }
    503 	}, *a, *b;
    504 	enum i915_ggtt_view_type types[] = {
    505 		I915_GGTT_VIEW_ROTATED,
    506 		I915_GGTT_VIEW_REMAPPED,
    507 		0,
    508 	}, *t;
    509 	const unsigned int max_pages = 64;
    510 	int err = -ENOMEM;
    511 
    512 	/* Create VMA for many different combinations of planes and check
    513 	 * that the page layout within the rotated VMA match our expectations.
    514 	 */
    515 
    516 	obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE);
    517 	if (IS_ERR(obj))
    518 		goto out;
    519 
    520 	for (t = types; *t; t++) {
    521 	for (a = planes; a->width; a++) {
    522 		for (b = planes + ARRAY_SIZE(planes); b-- != planes; ) {
    523 			struct i915_ggtt_view view;
    524 			unsigned int n, max_offset;
    525 
    526 			max_offset = max(a->stride * a->height,
    527 					 b->stride * b->height);
    528 			GEM_BUG_ON(max_offset > max_pages);
    529 			max_offset = max_pages - max_offset;
    530 
    531 			view.type = *t;
    532 			view.rotated.plane[0] = *a;
    533 			view.rotated.plane[1] = *b;
    534 
    535 			for_each_prime_number_from(view.rotated.plane[0].offset, 0, max_offset) {
    536 				for_each_prime_number_from(view.rotated.plane[1].offset, 0, max_offset) {
    537 					struct scatterlist *sg;
    538 					struct i915_vma *vma;
    539 
    540 					vma = checked_vma_instance(obj, vm, &view);
    541 					if (IS_ERR(vma)) {
    542 						err = PTR_ERR(vma);
    543 						goto out_object;
    544 					}
    545 
    546 					err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
    547 					if (err) {
    548 						pr_err("Failed to pin VMA, err=%d\n", err);
    549 						goto out_object;
    550 					}
    551 
    552 					if (view.type == I915_GGTT_VIEW_ROTATED &&
    553 					    vma->size != rotated_size(a, b) * PAGE_SIZE) {
    554 						pr_err("VMA is wrong size, expected %lu, found %llu\n",
    555 						       PAGE_SIZE * rotated_size(a, b), vma->size);
    556 						err = -EINVAL;
    557 						goto out_object;
    558 					}
    559 
    560 					if (view.type == I915_GGTT_VIEW_REMAPPED &&
    561 					    vma->size > rotated_size(a, b) * PAGE_SIZE) {
    562 						pr_err("VMA is wrong size, expected %lu, found %llu\n",
    563 						       PAGE_SIZE * rotated_size(a, b), vma->size);
    564 						err = -EINVAL;
    565 						goto out_object;
    566 					}
    567 
    568 					if (vma->pages->nents > rotated_size(a, b)) {
    569 						pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
    570 						       rotated_size(a, b), vma->pages->nents);
    571 						err = -EINVAL;
    572 						goto out_object;
    573 					}
    574 
    575 					if (vma->node.size < vma->size) {
    576 						pr_err("VMA binding too small, expected %llu, found %llu\n",
    577 						       vma->size, vma->node.size);
    578 						err = -EINVAL;
    579 						goto out_object;
    580 					}
    581 
    582 					if (vma->pages == obj->mm.pages) {
    583 						pr_err("VMA using unrotated object pages!\n");
    584 						err = -EINVAL;
    585 						goto out_object;
    586 					}
    587 
    588 					sg = vma->pages->sgl;
    589 					for (n = 0; n < ARRAY_SIZE(view.rotated.plane); n++) {
    590 						if (view.type == I915_GGTT_VIEW_ROTATED)
    591 							sg = assert_rotated(obj, &view.rotated, n, sg);
    592 						else
    593 							sg = assert_remapped(obj, &view.remapped, n, sg);
    594 						if (IS_ERR(sg)) {
    595 							pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d), (%d, %d, %d, %d)]\n",
    596 							       view.type == I915_GGTT_VIEW_ROTATED ?
    597 							       "rotated" : "remapped", n,
    598 							       view.rotated.plane[0].width,
    599 							       view.rotated.plane[0].height,
    600 							       view.rotated.plane[0].stride,
    601 							       view.rotated.plane[0].offset,
    602 							       view.rotated.plane[1].width,
    603 							       view.rotated.plane[1].height,
    604 							       view.rotated.plane[1].stride,
    605 							       view.rotated.plane[1].offset);
    606 							err = -EINVAL;
    607 							goto out_object;
    608 						}
    609 					}
    610 
    611 					i915_vma_unpin(vma);
    612 
    613 					cond_resched();
    614 				}
    615 			}
    616 		}
    617 	}
    618 	}
    619 
    620 out_object:
    621 	i915_gem_object_put(obj);
    622 out:
    623 	return err;
    624 }
    625 
    626 static bool assert_partial(struct drm_i915_gem_object *obj,
    627 			   struct i915_vma *vma,
    628 			   unsigned long offset,
    629 			   unsigned long size)
    630 {
    631 	struct sgt_iter sgt;
    632 	dma_addr_t dma;
    633 
    634 	for_each_sgt_daddr(dma, sgt, vma->pages) {
    635 		dma_addr_t src;
    636 
    637 		if (!size) {
    638 			pr_err("Partial scattergather list too long\n");
    639 			return false;
    640 		}
    641 
    642 		src = i915_gem_object_get_dma_address(obj, offset);
    643 		if (src != dma) {
    644 			pr_err("DMA mismatch for partial page offset %lu\n",
    645 			       offset);
    646 			return false;
    647 		}
    648 
    649 		offset++;
    650 		size--;
    651 	}
    652 
    653 	return true;
    654 }
    655 
    656 static bool assert_pin(struct i915_vma *vma,
    657 		       struct i915_ggtt_view *view,
    658 		       u64 size,
    659 		       const char *name)
    660 {
    661 	bool ok = true;
    662 
    663 	if (vma->size != size) {
    664 		pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
    665 		       name, size, vma->size);
    666 		ok = false;
    667 	}
    668 
    669 	if (vma->node.size < vma->size) {
    670 		pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
    671 		       name, vma->size, vma->node.size);
    672 		ok = false;
    673 	}
    674 
    675 	if (view && view->type != I915_GGTT_VIEW_NORMAL) {
    676 		if (memcmp(&vma->ggtt_view, view, sizeof(*view))) {
    677 			pr_err("(%s) VMA mismatch upon creation!\n",
    678 			       name);
    679 			ok = false;
    680 		}
    681 
    682 		if (vma->pages == vma->obj->mm.pages) {
    683 			pr_err("(%s) VMA using original object pages!\n",
    684 			       name);
    685 			ok = false;
    686 		}
    687 	} else {
    688 		if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
    689 			pr_err("Not the normal ggtt view! Found %d\n",
    690 			       vma->ggtt_view.type);
    691 			ok = false;
    692 		}
    693 
    694 		if (vma->pages != vma->obj->mm.pages) {
    695 			pr_err("VMA not using object pages!\n");
    696 			ok = false;
    697 		}
    698 	}
    699 
    700 	return ok;
    701 }
    702 
    703 static int igt_vma_partial(void *arg)
    704 {
    705 	struct i915_ggtt *ggtt = arg;
    706 	struct i915_address_space *vm = &ggtt->vm;
    707 	const unsigned int npages = 1021; /* prime! */
    708 	struct drm_i915_gem_object *obj;
    709 	const struct phase {
    710 		const char *name;
    711 	} phases[] = {
    712 		{ "create" },
    713 		{ "lookup" },
    714 		{ },
    715 	}, *p;
    716 	unsigned int sz, offset;
    717 	struct i915_vma *vma;
    718 	int err = -ENOMEM;
    719 
    720 	/* Create lots of different VMA for the object and check that
    721 	 * we are returned the same VMA when we later request the same range.
    722 	 */
    723 
    724 	obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE);
    725 	if (IS_ERR(obj))
    726 		goto out;
    727 
    728 	for (p = phases; p->name; p++) { /* exercise both create/lookup */
    729 		unsigned int count, nvma;
    730 
    731 		nvma = 0;
    732 		for_each_prime_number_from(sz, 1, npages) {
    733 			for_each_prime_number_from(offset, 0, npages - sz) {
    734 				struct i915_ggtt_view view;
    735 
    736 				view.type = I915_GGTT_VIEW_PARTIAL;
    737 				view.partial.offset = offset;
    738 				view.partial.size = sz;
    739 
    740 				if (sz == npages)
    741 					view.type = I915_GGTT_VIEW_NORMAL;
    742 
    743 				vma = checked_vma_instance(obj, vm, &view);
    744 				if (IS_ERR(vma)) {
    745 					err = PTR_ERR(vma);
    746 					goto out_object;
    747 				}
    748 
    749 				err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
    750 				if (err)
    751 					goto out_object;
    752 
    753 				if (!assert_pin(vma, &view, sz*PAGE_SIZE, p->name)) {
    754 					pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
    755 					       p->name, offset, sz);
    756 					err = -EINVAL;
    757 					goto out_object;
    758 				}
    759 
    760 				if (!assert_partial(obj, vma, offset, sz)) {
    761 					pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
    762 					       p->name, offset, sz);
    763 					err = -EINVAL;
    764 					goto out_object;
    765 				}
    766 
    767 				i915_vma_unpin(vma);
    768 				nvma++;
    769 
    770 				cond_resched();
    771 			}
    772 		}
    773 
    774 		count = 0;
    775 		list_for_each_entry(vma, &obj->vma.list, obj_link)
    776 			count++;
    777 		if (count != nvma) {
    778 			pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
    779 			       p->name, count, nvma);
    780 			err = -EINVAL;
    781 			goto out_object;
    782 		}
    783 
    784 		/* Check that we did create the whole object mapping */
    785 		vma = checked_vma_instance(obj, vm, NULL);
    786 		if (IS_ERR(vma)) {
    787 			err = PTR_ERR(vma);
    788 			goto out_object;
    789 		}
    790 
    791 		err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
    792 		if (err)
    793 			goto out_object;
    794 
    795 		if (!assert_pin(vma, NULL, obj->base.size, p->name)) {
    796 			pr_err("(%s) inconsistent full pin\n", p->name);
    797 			err = -EINVAL;
    798 			goto out_object;
    799 		}
    800 
    801 		i915_vma_unpin(vma);
    802 
    803 		count = 0;
    804 		list_for_each_entry(vma, &obj->vma.list, obj_link)
    805 			count++;
    806 		if (count != nvma) {
    807 			pr_err("(%s) allocated an extra full vma!\n", p->name);
    808 			err = -EINVAL;
    809 			goto out_object;
    810 		}
    811 	}
    812 
    813 out_object:
    814 	i915_gem_object_put(obj);
    815 out:
    816 	return err;
    817 }
    818 
    819 int i915_vma_mock_selftests(void)
    820 {
    821 	static const struct i915_subtest tests[] = {
    822 		SUBTEST(igt_vma_create),
    823 		SUBTEST(igt_vma_pin1),
    824 		SUBTEST(igt_vma_rotate_remap),
    825 		SUBTEST(igt_vma_partial),
    826 	};
    827 	struct drm_i915_private *i915;
    828 	struct i915_ggtt *ggtt;
    829 	int err;
    830 
    831 	i915 = mock_gem_device();
    832 	if (!i915)
    833 		return -ENOMEM;
    834 
    835 	ggtt = kmalloc(sizeof(*ggtt), GFP_KERNEL);
    836 	if (!ggtt) {
    837 		err = -ENOMEM;
    838 		goto out_put;
    839 	}
    840 	mock_init_ggtt(i915, ggtt);
    841 
    842 	err = i915_subtests(tests, ggtt);
    843 
    844 	mock_device_flush(i915);
    845 	i915_gem_drain_freed_objects(i915);
    846 	mock_fini_ggtt(ggtt);
    847 	kfree(ggtt);
    848 out_put:
    849 	drm_dev_put(&i915->drm);
    850 	return err;
    851 }
    852 
    853 static int igt_vma_remapped_gtt(void *arg)
    854 {
    855 	struct drm_i915_private *i915 = arg;
    856 	const struct intel_remapped_plane_info planes[] = {
    857 		{ .width = 1, .height = 1, .stride = 1 },
    858 		{ .width = 2, .height = 2, .stride = 2 },
    859 		{ .width = 4, .height = 4, .stride = 4 },
    860 		{ .width = 8, .height = 8, .stride = 8 },
    861 
    862 		{ .width = 3, .height = 5, .stride = 3 },
    863 		{ .width = 3, .height = 5, .stride = 4 },
    864 		{ .width = 3, .height = 5, .stride = 5 },
    865 
    866 		{ .width = 5, .height = 3, .stride = 5 },
    867 		{ .width = 5, .height = 3, .stride = 7 },
    868 		{ .width = 5, .height = 3, .stride = 9 },
    869 
    870 		{ .width = 4, .height = 6, .stride = 6 },
    871 		{ .width = 6, .height = 4, .stride = 6 },
    872 		{ }
    873 	}, *p;
    874 	enum i915_ggtt_view_type types[] = {
    875 		I915_GGTT_VIEW_ROTATED,
    876 		I915_GGTT_VIEW_REMAPPED,
    877 		0,
    878 	}, *t;
    879 	struct drm_i915_gem_object *obj;
    880 	intel_wakeref_t wakeref;
    881 	int err = 0;
    882 
    883 	obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE);
    884 	if (IS_ERR(obj))
    885 		return PTR_ERR(obj);
    886 
    887 	wakeref = intel_runtime_pm_get(&i915->runtime_pm);
    888 
    889 	for (t = types; *t; t++) {
    890 		for (p = planes; p->width; p++) {
    891 			struct i915_ggtt_view view = {
    892 				.type = *t,
    893 				.rotated.plane[0] = *p,
    894 			};
    895 			struct i915_vma *vma;
    896 			u32 __iomem *map;
    897 			unsigned int x, y;
    898 			int err;
    899 
    900 			i915_gem_object_lock(obj);
    901 			err = i915_gem_object_set_to_gtt_domain(obj, true);
    902 			i915_gem_object_unlock(obj);
    903 			if (err)
    904 				goto out;
    905 
    906 			vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE);
    907 			if (IS_ERR(vma)) {
    908 				err = PTR_ERR(vma);
    909 				goto out;
    910 			}
    911 
    912 			GEM_BUG_ON(vma->ggtt_view.type != *t);
    913 
    914 			map = i915_vma_pin_iomap(vma);
    915 			i915_vma_unpin(vma);
    916 			if (IS_ERR(map)) {
    917 				err = PTR_ERR(map);
    918 				goto out;
    919 			}
    920 
    921 			for (y = 0 ; y < p->height; y++) {
    922 				for (x = 0 ; x < p->width; x++) {
    923 					unsigned int offset;
    924 					u32 val = y << 16 | x;
    925 
    926 					if (*t == I915_GGTT_VIEW_ROTATED)
    927 						offset = (x * p->height + y) * PAGE_SIZE;
    928 					else
    929 						offset = (y * p->width + x) * PAGE_SIZE;
    930 
    931 					iowrite32(val, &map[offset / sizeof(*map)]);
    932 				}
    933 			}
    934 
    935 			i915_vma_unpin_iomap(vma);
    936 
    937 			vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE);
    938 			if (IS_ERR(vma)) {
    939 				err = PTR_ERR(vma);
    940 				goto out;
    941 			}
    942 
    943 			GEM_BUG_ON(vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL);
    944 
    945 			map = i915_vma_pin_iomap(vma);
    946 			i915_vma_unpin(vma);
    947 			if (IS_ERR(map)) {
    948 				err = PTR_ERR(map);
    949 				goto out;
    950 			}
    951 
    952 			for (y = 0 ; y < p->height; y++) {
    953 				for (x = 0 ; x < p->width; x++) {
    954 					unsigned int offset, src_idx;
    955 					u32 exp = y << 16 | x;
    956 					u32 val;
    957 
    958 					if (*t == I915_GGTT_VIEW_ROTATED)
    959 						src_idx = rotated_index(&view.rotated, 0, x, y);
    960 					else
    961 						src_idx = remapped_index(&view.remapped, 0, x, y);
    962 					offset = src_idx * PAGE_SIZE;
    963 
    964 					val = ioread32(&map[offset / sizeof(*map)]);
    965 					if (val != exp) {
    966 						pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
    967 						       *t == I915_GGTT_VIEW_ROTATED ? "Rotated" : "Remapped",
    968 						       val, exp);
    969 						i915_vma_unpin_iomap(vma);
    970 						goto out;
    971 					}
    972 				}
    973 			}
    974 			i915_vma_unpin_iomap(vma);
    975 
    976 			cond_resched();
    977 		}
    978 	}
    979 
    980 out:
    981 	intel_runtime_pm_put(&i915->runtime_pm, wakeref);
    982 	i915_gem_object_put(obj);
    983 
    984 	return err;
    985 }
    986 
    987 int i915_vma_live_selftests(struct drm_i915_private *i915)
    988 {
    989 	static const struct i915_subtest tests[] = {
    990 		SUBTEST(igt_vma_remapped_gtt),
    991 	};
    992 
    993 	return i915_subtests(tests, i915);
    994 }
    995