2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25 #include <linux/prime_numbers.h>
27 #include "../i915_selftest.h"
29 #include "mock_gem_device.h"
30 #include "mock_context.h"
32 static bool assert_vma(struct i915_vma
*vma
,
33 struct drm_i915_gem_object
*obj
,
34 struct i915_gem_context
*ctx
)
38 if (vma
->vm
!= &ctx
->ppgtt
->base
) {
39 pr_err("VMA created with wrong VM\n");
43 if (vma
->size
!= obj
->base
.size
) {
44 pr_err("VMA created with wrong size, found %llu, expected %zu\n",
45 vma
->size
, obj
->base
.size
);
49 if (vma
->ggtt_view
.type
!= I915_GGTT_VIEW_NORMAL
) {
50 pr_err("VMA created with wrong type [%d]\n",
58 static struct i915_vma
*
59 checked_vma_instance(struct drm_i915_gem_object
*obj
,
60 struct i915_address_space
*vm
,
61 struct i915_ggtt_view
*view
)
66 vma
= i915_vma_instance(obj
, vm
, view
);
70 /* Manual checks, will be reinforced by i915_vma_compare! */
72 pr_err("VMA's vm [%p] does not match request [%p]\n",
77 if (i915_is_ggtt(vm
) != i915_vma_is_ggtt(vma
)) {
78 pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
79 i915_vma_is_ggtt(vma
), i915_is_ggtt(vm
));
83 if (i915_vma_compare(vma
, vm
, view
)) {
84 pr_err("i915_vma_compare failed with create parmaters!\n");
85 return ERR_PTR(-EINVAL
);
88 if (i915_vma_compare(vma
, vma
->vm
,
89 i915_vma_is_ggtt(vma
) ? &vma
->ggtt_view
: NULL
)) {
90 pr_err("i915_vma_compare failed with itself\n");
91 return ERR_PTR(-EINVAL
);
95 pr_err("i915_vma_compare failed to detect the difference!\n");
96 return ERR_PTR(-EINVAL
);
102 static int create_vmas(struct drm_i915_private
*i915
,
103 struct list_head
*objects
,
104 struct list_head
*contexts
)
106 struct drm_i915_gem_object
*obj
;
107 struct i915_gem_context
*ctx
;
110 list_for_each_entry(obj
, objects
, st_link
) {
111 for (pinned
= 0; pinned
<= 1; pinned
++) {
112 list_for_each_entry(ctx
, contexts
, link
) {
113 struct i915_address_space
*vm
=
115 struct i915_vma
*vma
;
118 vma
= checked_vma_instance(obj
, vm
, NULL
);
122 if (!assert_vma(vma
, obj
, ctx
)) {
123 pr_err("VMA lookup/create failed\n");
128 err
= i915_vma_pin(vma
, 0, 0, PIN_USER
);
130 pr_err("Failed to pin VMA\n");
143 static int igt_vma_create(void *arg
)
145 struct drm_i915_private
*i915
= arg
;
146 struct drm_i915_gem_object
*obj
, *on
;
147 struct i915_gem_context
*ctx
, *cn
;
148 unsigned long num_obj
, num_ctx
;
149 unsigned long no
, nc
;
150 IGT_TIMEOUT(end_time
);
155 /* Exercise creating many vma amonst many objections, checking the
156 * vma creation and lookup routines.
160 for_each_prime_number(num_obj
, ULONG_MAX
- 1) {
161 for (; no
< num_obj
; no
++) {
162 obj
= i915_gem_object_create_internal(i915
, PAGE_SIZE
);
166 list_add(&obj
->st_link
, &objects
);
170 for_each_prime_number(num_ctx
, MAX_CONTEXT_HW_ID
) {
171 for (; nc
< num_ctx
; nc
++) {
172 ctx
= mock_context(i915
, "mock");
176 list_move(&ctx
->link
, &contexts
);
179 err
= create_vmas(i915
, &objects
, &contexts
);
183 if (igt_timeout(end_time
,
184 "%s timed out: after %lu objects in %lu contexts\n",
189 list_for_each_entry_safe(ctx
, cn
, &contexts
, link
) {
190 list_del_init(&ctx
->link
);
191 mock_context_close(ctx
);
196 /* Final pass to lookup all created contexts */
197 err
= create_vmas(i915
, &objects
, &contexts
);
199 list_for_each_entry_safe(ctx
, cn
, &contexts
, link
) {
200 list_del_init(&ctx
->link
);
201 mock_context_close(ctx
);
204 list_for_each_entry_safe(obj
, on
, &objects
, st_link
)
205 i915_gem_object_put(obj
);
212 bool (*assert)(const struct i915_vma
*,
213 const struct pin_mode
*mode
,
218 static bool assert_pin_valid(const struct i915_vma
*vma
,
219 const struct pin_mode
*mode
,
225 if (i915_vma_misplaced(vma
, mode
->size
, 0, mode
->flags
))
232 static bool assert_pin_enospc(const struct i915_vma
*vma
,
233 const struct pin_mode
*mode
,
236 return result
== -ENOSPC
;
240 static bool assert_pin_einval(const struct i915_vma
*vma
,
241 const struct pin_mode
*mode
,
244 return result
== -EINVAL
;
247 static int igt_vma_pin1(void *arg
)
249 struct drm_i915_private
*i915
= arg
;
250 const struct pin_mode modes
[] = {
251 #define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
252 #define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
253 #define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
254 #define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
255 VALID(0, PIN_GLOBAL
),
256 VALID(0, PIN_GLOBAL
| PIN_MAPPABLE
),
258 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| 4096),
259 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| 8192),
260 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (i915
->ggtt
.mappable_end
- 4096)),
261 VALID(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_BIAS
| (i915
->ggtt
.mappable_end
- 4096)),
262 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (i915
->ggtt
.base
.total
- 4096)),
264 VALID(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_FIXED
| (i915
->ggtt
.mappable_end
- 4096)),
265 INVALID(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_FIXED
| i915
->ggtt
.mappable_end
),
266 VALID(0, PIN_GLOBAL
| PIN_OFFSET_FIXED
| (i915
->ggtt
.base
.total
- 4096)),
267 INVALID(0, PIN_GLOBAL
| PIN_OFFSET_FIXED
| i915
->ggtt
.base
.total
),
268 INVALID(0, PIN_GLOBAL
| PIN_OFFSET_FIXED
| round_down(U64_MAX
, PAGE_SIZE
)),
270 VALID(4096, PIN_GLOBAL
),
271 VALID(8192, PIN_GLOBAL
),
272 VALID(i915
->ggtt
.mappable_end
- 4096, PIN_GLOBAL
| PIN_MAPPABLE
),
273 VALID(i915
->ggtt
.mappable_end
, PIN_GLOBAL
| PIN_MAPPABLE
),
274 NOSPACE(i915
->ggtt
.mappable_end
+ 4096, PIN_GLOBAL
| PIN_MAPPABLE
),
275 VALID(i915
->ggtt
.base
.total
- 4096, PIN_GLOBAL
),
276 VALID(i915
->ggtt
.base
.total
, PIN_GLOBAL
),
277 NOSPACE(i915
->ggtt
.base
.total
+ 4096, PIN_GLOBAL
),
278 NOSPACE(round_down(U64_MAX
, PAGE_SIZE
), PIN_GLOBAL
),
279 INVALID(8192, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_FIXED
| (i915
->ggtt
.mappable_end
- 4096)),
280 INVALID(8192, PIN_GLOBAL
| PIN_OFFSET_FIXED
| (i915
->ggtt
.base
.total
- 4096)),
281 INVALID(8192, PIN_GLOBAL
| PIN_OFFSET_FIXED
| (round_down(U64_MAX
, PAGE_SIZE
) - 4096)),
283 VALID(8192, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (i915
->ggtt
.mappable_end
- 4096)),
285 #if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
286 /* Misusing BIAS is a programming error (it is not controllable
287 * from userspace) so when debugging is enabled, it explodes.
288 * However, the tests are still quite interesting for checking
289 * variable start, end and size.
291 NOSPACE(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_BIAS
| i915
->ggtt
.mappable_end
),
292 NOSPACE(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| i915
->ggtt
.base
.total
),
293 NOSPACE(8192, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_BIAS
| (i915
->ggtt
.mappable_end
- 4096)),
294 NOSPACE(8192, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (i915
->ggtt
.base
.total
- 4096)),
302 struct drm_i915_gem_object
*obj
;
303 struct i915_vma
*vma
;
306 /* Exercise all the weird and wonderful i915_vma_pin requests,
307 * focusing on error handling of boundary conditions.
310 GEM_BUG_ON(!drm_mm_clean(&i915
->ggtt
.base
.mm
));
312 obj
= i915_gem_object_create_internal(i915
, PAGE_SIZE
);
316 vma
= checked_vma_instance(obj
, &i915
->ggtt
.base
, NULL
);
320 for (m
= modes
; m
->assert; m
++) {
321 err
= i915_vma_pin(vma
, m
->size
, 0, m
->flags
);
322 if (!m
->assert(vma
, m
, err
)) {
323 pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
324 m
->assert == assert_pin_valid
? "Failed" : "Unexpectedly succeeded",
325 (int)(m
- modes
), m
->string
, m
->size
, m
->flags
,
335 err
= i915_vma_unbind(vma
);
337 pr_err("Failed to unbind single page from GGTT, err=%d\n", err
);
345 i915_gem_object_put(obj
);
349 static unsigned long rotated_index(const struct intel_rotation_info
*r
,
354 return (r
->plane
[n
].stride
* (r
->plane
[n
].height
- y
- 1) +
355 r
->plane
[n
].offset
+ x
);
358 static struct scatterlist
*
359 assert_rotated(struct drm_i915_gem_object
*obj
,
360 const struct intel_rotation_info
*r
, unsigned int n
,
361 struct scatterlist
*sg
)
365 for (x
= 0; x
< r
->plane
[n
].width
; x
++) {
366 for (y
= 0; y
< r
->plane
[n
].height
; y
++) {
367 unsigned long src_idx
;
371 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
373 return ERR_PTR(-EINVAL
);
376 src_idx
= rotated_index(r
, n
, x
, y
);
377 src
= i915_gem_object_get_dma_address(obj
, src_idx
);
379 if (sg_dma_len(sg
) != PAGE_SIZE
) {
380 pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
381 sg_dma_len(sg
), PAGE_SIZE
,
383 return ERR_PTR(-EINVAL
);
386 if (sg_dma_address(sg
) != src
) {
387 pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
389 return ERR_PTR(-EINVAL
);
399 static unsigned int rotated_size(const struct intel_rotation_plane_info
*a
,
400 const struct intel_rotation_plane_info
*b
)
402 return a
->width
* a
->height
+ b
->width
* b
->height
;
405 static int igt_vma_rotate(void *arg
)
407 struct drm_i915_private
*i915
= arg
;
408 struct i915_address_space
*vm
= &i915
->ggtt
.base
;
409 struct drm_i915_gem_object
*obj
;
410 const struct intel_rotation_plane_info planes
[] = {
411 { .width
= 1, .height
= 1, .stride
= 1 },
412 { .width
= 2, .height
= 2, .stride
= 2 },
413 { .width
= 4, .height
= 4, .stride
= 4 },
414 { .width
= 8, .height
= 8, .stride
= 8 },
416 { .width
= 3, .height
= 5, .stride
= 3 },
417 { .width
= 3, .height
= 5, .stride
= 4 },
418 { .width
= 3, .height
= 5, .stride
= 5 },
420 { .width
= 5, .height
= 3, .stride
= 5 },
421 { .width
= 5, .height
= 3, .stride
= 7 },
422 { .width
= 5, .height
= 3, .stride
= 9 },
424 { .width
= 4, .height
= 6, .stride
= 6 },
425 { .width
= 6, .height
= 4, .stride
= 6 },
428 const unsigned int max_pages
= 64;
431 /* Create VMA for many different combinations of planes and check
432 * that the page layout within the rotated VMA match our expectations.
435 obj
= i915_gem_object_create_internal(i915
, max_pages
* PAGE_SIZE
);
439 for (a
= planes
; a
->width
; a
++) {
440 for (b
= planes
+ ARRAY_SIZE(planes
); b
-- != planes
; ) {
441 struct i915_ggtt_view view
;
442 unsigned int n
, max_offset
;
444 max_offset
= max(a
->stride
* a
->height
,
445 b
->stride
* b
->height
);
446 GEM_BUG_ON(max_offset
> max_pages
);
447 max_offset
= max_pages
- max_offset
;
449 view
.type
= I915_GGTT_VIEW_ROTATED
;
450 view
.rotated
.plane
[0] = *a
;
451 view
.rotated
.plane
[1] = *b
;
453 for_each_prime_number_from(view
.rotated
.plane
[0].offset
, 0, max_offset
) {
454 for_each_prime_number_from(view
.rotated
.plane
[1].offset
, 0, max_offset
) {
455 struct scatterlist
*sg
;
456 struct i915_vma
*vma
;
458 vma
= checked_vma_instance(obj
, vm
, &view
);
464 err
= i915_vma_pin(vma
, 0, 0, PIN_GLOBAL
);
466 pr_err("Failed to pin VMA, err=%d\n", err
);
470 if (vma
->size
!= rotated_size(a
, b
) * PAGE_SIZE
) {
471 pr_err("VMA is wrong size, expected %lu, found %llu\n",
472 PAGE_SIZE
* rotated_size(a
, b
), vma
->size
);
477 if (vma
->pages
->nents
!= rotated_size(a
, b
)) {
478 pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
479 rotated_size(a
, b
), vma
->pages
->nents
);
484 if (vma
->node
.size
< vma
->size
) {
485 pr_err("VMA binding too small, expected %llu, found %llu\n",
486 vma
->size
, vma
->node
.size
);
491 if (vma
->pages
== obj
->mm
.pages
) {
492 pr_err("VMA using unrotated object pages!\n");
497 sg
= vma
->pages
->sgl
;
498 for (n
= 0; n
< ARRAY_SIZE(view
.rotated
.plane
); n
++) {
499 sg
= assert_rotated(obj
, &view
.rotated
, n
, sg
);
501 pr_err("Inconsistent VMA pages for plane %d: [(%d, %d, %d, %d), (%d, %d, %d, %d)]\n", n
,
502 view
.rotated
.plane
[0].width
,
503 view
.rotated
.plane
[0].height
,
504 view
.rotated
.plane
[0].stride
,
505 view
.rotated
.plane
[0].offset
,
506 view
.rotated
.plane
[1].width
,
507 view
.rotated
.plane
[1].height
,
508 view
.rotated
.plane
[1].stride
,
509 view
.rotated
.plane
[1].offset
);
522 i915_gem_object_put(obj
);
527 static bool assert_partial(struct drm_i915_gem_object
*obj
,
528 struct i915_vma
*vma
,
529 unsigned long offset
,
535 for_each_sgt_dma(dma
, sgt
, vma
->pages
) {
539 pr_err("Partial scattergather list too long\n");
543 src
= i915_gem_object_get_dma_address(obj
, offset
);
545 pr_err("DMA mismatch for partial page offset %lu\n",
557 static bool assert_pin(struct i915_vma
*vma
,
558 struct i915_ggtt_view
*view
,
564 if (vma
->size
!= size
) {
565 pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
566 name
, size
, vma
->size
);
570 if (vma
->node
.size
< vma
->size
) {
571 pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
572 name
, vma
->size
, vma
->node
.size
);
576 if (view
&& view
->type
!= I915_GGTT_VIEW_NORMAL
) {
577 if (memcmp(&vma
->ggtt_view
, view
, sizeof(*view
))) {
578 pr_err("(%s) VMA mismatch upon creation!\n",
583 if (vma
->pages
== vma
->obj
->mm
.pages
) {
584 pr_err("(%s) VMA using original object pages!\n",
589 if (vma
->ggtt_view
.type
!= I915_GGTT_VIEW_NORMAL
) {
590 pr_err("Not the normal ggtt view! Found %d\n",
591 vma
->ggtt_view
.type
);
595 if (vma
->pages
!= vma
->obj
->mm
.pages
) {
596 pr_err("VMA not using object pages!\n");
604 static int igt_vma_partial(void *arg
)
606 struct drm_i915_private
*i915
= arg
;
607 struct i915_address_space
*vm
= &i915
->ggtt
.base
;
608 const unsigned int npages
= 1021; /* prime! */
609 struct drm_i915_gem_object
*obj
;
617 unsigned int sz
, offset
;
618 struct i915_vma
*vma
;
621 /* Create lots of different VMA for the object and check that
622 * we are returned the same VMA when we later request the same range.
625 obj
= i915_gem_object_create_internal(i915
, npages
*PAGE_SIZE
);
629 for (p
= phases
; p
->name
; p
++) { /* exercise both create/lookup */
630 unsigned int count
, nvma
;
633 for_each_prime_number_from(sz
, 1, npages
) {
634 for_each_prime_number_from(offset
, 0, npages
- sz
) {
635 struct i915_ggtt_view view
;
637 view
.type
= I915_GGTT_VIEW_PARTIAL
;
638 view
.partial
.offset
= offset
;
639 view
.partial
.size
= sz
;
642 view
.type
= I915_GGTT_VIEW_NORMAL
;
644 vma
= checked_vma_instance(obj
, vm
, &view
);
650 err
= i915_vma_pin(vma
, 0, 0, PIN_GLOBAL
);
654 if (!assert_pin(vma
, &view
, sz
*PAGE_SIZE
, p
->name
)) {
655 pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
656 p
->name
, offset
, sz
);
661 if (!assert_partial(obj
, vma
, offset
, sz
)) {
662 pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
663 p
->name
, offset
, sz
);
674 list_for_each_entry(vma
, &obj
->vma_list
, obj_link
)
677 pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
678 p
->name
, count
, nvma
);
683 /* Check that we did create the whole object mapping */
684 vma
= checked_vma_instance(obj
, vm
, NULL
);
690 err
= i915_vma_pin(vma
, 0, 0, PIN_GLOBAL
);
694 if (!assert_pin(vma
, NULL
, obj
->base
.size
, p
->name
)) {
695 pr_err("(%s) inconsistent full pin\n", p
->name
);
703 list_for_each_entry(vma
, &obj
->vma_list
, obj_link
)
706 pr_err("(%s) allocated an extra full vma!\n", p
->name
);
713 i915_gem_object_put(obj
);
718 int i915_vma_mock_selftests(void)
720 static const struct i915_subtest tests
[] = {
721 SUBTEST(igt_vma_create
),
722 SUBTEST(igt_vma_pin1
),
723 SUBTEST(igt_vma_rotate
),
724 SUBTEST(igt_vma_partial
),
726 struct drm_i915_private
*i915
;
729 i915
= mock_gem_device();
733 mutex_lock(&i915
->drm
.struct_mutex
);
734 err
= i915_subtests(tests
, i915
);
735 mutex_unlock(&i915
->drm
.struct_mutex
);
737 drm_dev_unref(&i915
->drm
);