2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25 #include <linux/prime_numbers.h>
27 #include "gem/i915_gem_context.h"
28 #include "gem/selftests/mock_context.h"
30 #include "i915_scatterlist.h"
31 #include "i915_selftest.h"
33 #include "mock_gem_device.h"
36 static bool assert_vma(struct i915_vma
*vma
,
37 struct drm_i915_gem_object
*obj
,
38 struct i915_gem_context
*ctx
)
42 if (vma
->vm
!= rcu_access_pointer(ctx
->vm
)) {
43 pr_err("VMA created with wrong VM\n");
47 if (vma
->size
!= obj
->base
.size
) {
48 pr_err("VMA created with wrong size, found %llu, expected %zu\n",
49 vma
->size
, obj
->base
.size
);
53 if (vma
->ggtt_view
.type
!= I915_GGTT_VIEW_NORMAL
) {
54 pr_err("VMA created with wrong type [%d]\n",
62 static struct i915_vma
*
63 checked_vma_instance(struct drm_i915_gem_object
*obj
,
64 struct i915_address_space
*vm
,
65 const struct i915_ggtt_view
*view
)
70 vma
= i915_vma_instance(obj
, vm
, view
);
74 /* Manual checks, will be reinforced by i915_vma_compare! */
76 pr_err("VMA's vm [%p] does not match request [%p]\n",
81 if (i915_is_ggtt(vm
) != i915_vma_is_ggtt(vma
)) {
82 pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
83 i915_vma_is_ggtt(vma
), i915_is_ggtt(vm
));
87 if (i915_vma_compare(vma
, vm
, view
)) {
88 pr_err("i915_vma_compare failed with create parameters!\n");
89 return ERR_PTR(-EINVAL
);
92 if (i915_vma_compare(vma
, vma
->vm
,
93 i915_vma_is_ggtt(vma
) ? &vma
->ggtt_view
: NULL
)) {
94 pr_err("i915_vma_compare failed with itself\n");
95 return ERR_PTR(-EINVAL
);
99 pr_err("i915_vma_compare failed to detect the difference!\n");
100 return ERR_PTR(-EINVAL
);
106 static int create_vmas(struct drm_i915_private
*i915
,
107 struct list_head
*objects
,
108 struct list_head
*contexts
)
110 struct drm_i915_gem_object
*obj
;
111 struct i915_gem_context
*ctx
;
114 list_for_each_entry(obj
, objects
, st_link
) {
115 for (pinned
= 0; pinned
<= 1; pinned
++) {
116 list_for_each_entry(ctx
, contexts
, link
) {
117 struct i915_address_space
*vm
;
118 struct i915_vma
*vma
;
121 vm
= i915_gem_context_get_vm_rcu(ctx
);
122 vma
= checked_vma_instance(obj
, vm
, NULL
);
127 if (!assert_vma(vma
, obj
, ctx
)) {
128 pr_err("VMA lookup/create failed\n");
133 err
= i915_vma_pin(vma
, 0, 0, PIN_USER
);
135 pr_err("Failed to pin VMA\n");
148 static int igt_vma_create(void *arg
)
150 struct i915_ggtt
*ggtt
= arg
;
151 struct drm_i915_private
*i915
= ggtt
->vm
.i915
;
152 struct drm_i915_gem_object
*obj
, *on
;
153 struct i915_gem_context
*ctx
, *cn
;
154 unsigned long num_obj
, num_ctx
;
155 unsigned long no
, nc
;
156 IGT_TIMEOUT(end_time
);
161 /* Exercise creating many vma amonst many objections, checking the
162 * vma creation and lookup routines.
166 for_each_prime_number(num_obj
, ULONG_MAX
- 1) {
167 for (; no
< num_obj
; no
++) {
168 obj
= i915_gem_object_create_internal(i915
, PAGE_SIZE
);
172 list_add(&obj
->st_link
, &objects
);
176 for_each_prime_number(num_ctx
, 2 * BITS_PER_LONG
) {
177 for (; nc
< num_ctx
; nc
++) {
178 ctx
= mock_context(i915
, "mock");
182 list_move(&ctx
->link
, &contexts
);
185 err
= create_vmas(i915
, &objects
, &contexts
);
189 if (igt_timeout(end_time
,
190 "%s timed out: after %lu objects in %lu contexts\n",
195 list_for_each_entry_safe(ctx
, cn
, &contexts
, link
) {
196 list_del_init(&ctx
->link
);
197 mock_context_close(ctx
);
204 /* Final pass to lookup all created contexts */
205 err
= create_vmas(i915
, &objects
, &contexts
);
207 list_for_each_entry_safe(ctx
, cn
, &contexts
, link
) {
208 list_del_init(&ctx
->link
);
209 mock_context_close(ctx
);
212 list_for_each_entry_safe(obj
, on
, &objects
, st_link
)
213 i915_gem_object_put(obj
);
220 bool (*assert)(const struct i915_vma
*,
221 const struct pin_mode
*mode
,
226 static bool assert_pin_valid(const struct i915_vma
*vma
,
227 const struct pin_mode
*mode
,
233 if (i915_vma_misplaced(vma
, mode
->size
, 0, mode
->flags
))
240 static bool assert_pin_enospc(const struct i915_vma
*vma
,
241 const struct pin_mode
*mode
,
244 return result
== -ENOSPC
;
248 static bool assert_pin_einval(const struct i915_vma
*vma
,
249 const struct pin_mode
*mode
,
252 return result
== -EINVAL
;
255 static int igt_vma_pin1(void *arg
)
257 struct i915_ggtt
*ggtt
= arg
;
258 const struct pin_mode modes
[] = {
259 #define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
260 #define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
261 #define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
262 #define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
263 VALID(0, PIN_GLOBAL
),
264 VALID(0, PIN_GLOBAL
| PIN_MAPPABLE
),
266 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| 4096),
267 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| 8192),
268 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (ggtt
->mappable_end
- 4096)),
269 VALID(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_BIAS
| (ggtt
->mappable_end
- 4096)),
270 VALID(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (ggtt
->vm
.total
- 4096)),
272 VALID(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_FIXED
| (ggtt
->mappable_end
- 4096)),
273 INVALID(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_FIXED
| ggtt
->mappable_end
),
274 VALID(0, PIN_GLOBAL
| PIN_OFFSET_FIXED
| (ggtt
->vm
.total
- 4096)),
275 INVALID(0, PIN_GLOBAL
| PIN_OFFSET_FIXED
| ggtt
->vm
.total
),
276 INVALID(0, PIN_GLOBAL
| PIN_OFFSET_FIXED
| round_down(U64_MAX
, PAGE_SIZE
)),
278 VALID(4096, PIN_GLOBAL
),
279 VALID(8192, PIN_GLOBAL
),
280 VALID(ggtt
->mappable_end
- 4096, PIN_GLOBAL
| PIN_MAPPABLE
),
281 VALID(ggtt
->mappable_end
, PIN_GLOBAL
| PIN_MAPPABLE
),
282 NOSPACE(ggtt
->mappable_end
+ 4096, PIN_GLOBAL
| PIN_MAPPABLE
),
283 VALID(ggtt
->vm
.total
- 4096, PIN_GLOBAL
),
284 VALID(ggtt
->vm
.total
, PIN_GLOBAL
),
285 NOSPACE(ggtt
->vm
.total
+ 4096, PIN_GLOBAL
),
286 NOSPACE(round_down(U64_MAX
, PAGE_SIZE
), PIN_GLOBAL
),
287 INVALID(8192, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_FIXED
| (ggtt
->mappable_end
- 4096)),
288 INVALID(8192, PIN_GLOBAL
| PIN_OFFSET_FIXED
| (ggtt
->vm
.total
- 4096)),
289 INVALID(8192, PIN_GLOBAL
| PIN_OFFSET_FIXED
| (round_down(U64_MAX
, PAGE_SIZE
) - 4096)),
291 VALID(8192, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (ggtt
->mappable_end
- 4096)),
293 #if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
294 /* Misusing BIAS is a programming error (it is not controllable
295 * from userspace) so when debugging is enabled, it explodes.
296 * However, the tests are still quite interesting for checking
297 * variable start, end and size.
299 NOSPACE(0, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_BIAS
| ggtt
->mappable_end
),
300 NOSPACE(0, PIN_GLOBAL
| PIN_OFFSET_BIAS
| ggtt
->vm
.total
),
301 NOSPACE(8192, PIN_GLOBAL
| PIN_MAPPABLE
| PIN_OFFSET_BIAS
| (ggtt
->mappable_end
- 4096)),
302 NOSPACE(8192, PIN_GLOBAL
| PIN_OFFSET_BIAS
| (ggtt
->vm
.total
- 4096)),
310 struct drm_i915_gem_object
*obj
;
311 struct i915_vma
*vma
;
314 /* Exercise all the weird and wonderful i915_vma_pin requests,
315 * focusing on error handling of boundary conditions.
318 GEM_BUG_ON(!drm_mm_clean(&ggtt
->vm
.mm
));
320 obj
= i915_gem_object_create_internal(ggtt
->vm
.i915
, PAGE_SIZE
);
324 vma
= checked_vma_instance(obj
, &ggtt
->vm
, NULL
);
328 for (m
= modes
; m
->assert; m
++) {
329 err
= i915_vma_pin(vma
, m
->size
, 0, m
->flags
);
330 if (!m
->assert(vma
, m
, err
)) {
331 pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
332 m
->assert == assert_pin_valid
? "Failed" : "Unexpectedly succeeded",
333 (int)(m
- modes
), m
->string
, m
->size
, m
->flags
,
343 err
= i915_vma_unbind(vma
);
345 pr_err("Failed to unbind single page from GGTT, err=%d\n", err
);
355 i915_gem_object_put(obj
);
359 static unsigned long rotated_index(const struct intel_rotation_info
*r
,
364 return (r
->plane
[n
].stride
* (r
->plane
[n
].height
- y
- 1) +
365 r
->plane
[n
].offset
+ x
);
368 static struct scatterlist
*
369 assert_rotated(struct drm_i915_gem_object
*obj
,
370 const struct intel_rotation_info
*r
, unsigned int n
,
371 struct scatterlist
*sg
)
375 for (x
= 0; x
< r
->plane
[n
].width
; x
++) {
376 for (y
= 0; y
< r
->plane
[n
].height
; y
++) {
377 unsigned long src_idx
;
381 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
383 return ERR_PTR(-EINVAL
);
386 src_idx
= rotated_index(r
, n
, x
, y
);
387 src
= i915_gem_object_get_dma_address(obj
, src_idx
);
389 if (sg_dma_len(sg
) != PAGE_SIZE
) {
390 pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
391 sg_dma_len(sg
), PAGE_SIZE
,
393 return ERR_PTR(-EINVAL
);
396 if (sg_dma_address(sg
) != src
) {
397 pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
399 return ERR_PTR(-EINVAL
);
409 static unsigned long remapped_index(const struct intel_remapped_info
*r
,
414 return (r
->plane
[n
].stride
* y
+
415 r
->plane
[n
].offset
+ x
);
418 static struct scatterlist
*
419 assert_remapped(struct drm_i915_gem_object
*obj
,
420 const struct intel_remapped_info
*r
, unsigned int n
,
421 struct scatterlist
*sg
)
424 unsigned int left
= 0;
427 for (y
= 0; y
< r
->plane
[n
].height
; y
++) {
428 for (x
= 0; x
< r
->plane
[n
].width
; x
++) {
429 unsigned long src_idx
;
433 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
435 return ERR_PTR(-EINVAL
);
439 left
= sg_dma_len(sg
);
442 src_idx
= remapped_index(r
, n
, x
, y
);
443 src
= i915_gem_object_get_dma_address(obj
, src_idx
);
445 if (left
< PAGE_SIZE
|| left
& (PAGE_SIZE
-1)) {
446 pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
447 sg_dma_len(sg
), PAGE_SIZE
,
449 return ERR_PTR(-EINVAL
);
452 if (sg_dma_address(sg
) + offset
!= src
) {
453 pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
455 return ERR_PTR(-EINVAL
);
470 static unsigned int rotated_size(const struct intel_remapped_plane_info
*a
,
471 const struct intel_remapped_plane_info
*b
)
473 return a
->width
* a
->height
+ b
->width
* b
->height
;
476 static int igt_vma_rotate_remap(void *arg
)
478 struct i915_ggtt
*ggtt
= arg
;
479 struct i915_address_space
*vm
= &ggtt
->vm
;
480 struct drm_i915_gem_object
*obj
;
481 const struct intel_remapped_plane_info planes
[] = {
482 { .width
= 1, .height
= 1, .stride
= 1 },
483 { .width
= 2, .height
= 2, .stride
= 2 },
484 { .width
= 4, .height
= 4, .stride
= 4 },
485 { .width
= 8, .height
= 8, .stride
= 8 },
487 { .width
= 3, .height
= 5, .stride
= 3 },
488 { .width
= 3, .height
= 5, .stride
= 4 },
489 { .width
= 3, .height
= 5, .stride
= 5 },
491 { .width
= 5, .height
= 3, .stride
= 5 },
492 { .width
= 5, .height
= 3, .stride
= 7 },
493 { .width
= 5, .height
= 3, .stride
= 9 },
495 { .width
= 4, .height
= 6, .stride
= 6 },
496 { .width
= 6, .height
= 4, .stride
= 6 },
499 enum i915_ggtt_view_type types
[] = {
500 I915_GGTT_VIEW_ROTATED
,
501 I915_GGTT_VIEW_REMAPPED
,
504 const unsigned int max_pages
= 64;
507 /* Create VMA for many different combinations of planes and check
508 * that the page layout within the rotated VMA match our expectations.
511 obj
= i915_gem_object_create_internal(vm
->i915
, max_pages
* PAGE_SIZE
);
515 for (t
= types
; *t
; t
++) {
516 for (a
= planes
; a
->width
; a
++) {
517 for (b
= planes
+ ARRAY_SIZE(planes
); b
-- != planes
; ) {
518 struct i915_ggtt_view view
;
519 unsigned int n
, max_offset
;
521 max_offset
= max(a
->stride
* a
->height
,
522 b
->stride
* b
->height
);
523 GEM_BUG_ON(max_offset
> max_pages
);
524 max_offset
= max_pages
- max_offset
;
527 view
.rotated
.plane
[0] = *a
;
528 view
.rotated
.plane
[1] = *b
;
530 for_each_prime_number_from(view
.rotated
.plane
[0].offset
, 0, max_offset
) {
531 for_each_prime_number_from(view
.rotated
.plane
[1].offset
, 0, max_offset
) {
532 struct scatterlist
*sg
;
533 struct i915_vma
*vma
;
535 vma
= checked_vma_instance(obj
, vm
, &view
);
541 err
= i915_vma_pin(vma
, 0, 0, PIN_GLOBAL
);
543 pr_err("Failed to pin VMA, err=%d\n", err
);
547 if (view
.type
== I915_GGTT_VIEW_ROTATED
&&
548 vma
->size
!= rotated_size(a
, b
) * PAGE_SIZE
) {
549 pr_err("VMA is wrong size, expected %lu, found %llu\n",
550 PAGE_SIZE
* rotated_size(a
, b
), vma
->size
);
555 if (view
.type
== I915_GGTT_VIEW_REMAPPED
&&
556 vma
->size
> rotated_size(a
, b
) * PAGE_SIZE
) {
557 pr_err("VMA is wrong size, expected %lu, found %llu\n",
558 PAGE_SIZE
* rotated_size(a
, b
), vma
->size
);
563 if (vma
->pages
->nents
> rotated_size(a
, b
)) {
564 pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
565 rotated_size(a
, b
), vma
->pages
->nents
);
570 if (vma
->node
.size
< vma
->size
) {
571 pr_err("VMA binding too small, expected %llu, found %llu\n",
572 vma
->size
, vma
->node
.size
);
577 if (vma
->pages
== obj
->mm
.pages
) {
578 pr_err("VMA using unrotated object pages!\n");
583 sg
= vma
->pages
->sgl
;
584 for (n
= 0; n
< ARRAY_SIZE(view
.rotated
.plane
); n
++) {
585 if (view
.type
== I915_GGTT_VIEW_ROTATED
)
586 sg
= assert_rotated(obj
, &view
.rotated
, n
, sg
);
588 sg
= assert_remapped(obj
, &view
.remapped
, n
, sg
);
590 pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d), (%d, %d, %d, %d)]\n",
591 view
.type
== I915_GGTT_VIEW_ROTATED
?
592 "rotated" : "remapped", n
,
593 view
.rotated
.plane
[0].width
,
594 view
.rotated
.plane
[0].height
,
595 view
.rotated
.plane
[0].stride
,
596 view
.rotated
.plane
[0].offset
,
597 view
.rotated
.plane
[1].width
,
598 view
.rotated
.plane
[1].height
,
599 view
.rotated
.plane
[1].stride
,
600 view
.rotated
.plane
[1].offset
);
616 i915_gem_object_put(obj
);
621 static bool assert_partial(struct drm_i915_gem_object
*obj
,
622 struct i915_vma
*vma
,
623 unsigned long offset
,
629 for_each_sgt_daddr(dma
, sgt
, vma
->pages
) {
633 pr_err("Partial scattergather list too long\n");
637 src
= i915_gem_object_get_dma_address(obj
, offset
);
639 pr_err("DMA mismatch for partial page offset %lu\n",
651 static bool assert_pin(struct i915_vma
*vma
,
652 struct i915_ggtt_view
*view
,
658 if (vma
->size
!= size
) {
659 pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
660 name
, size
, vma
->size
);
664 if (vma
->node
.size
< vma
->size
) {
665 pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
666 name
, vma
->size
, vma
->node
.size
);
670 if (view
&& view
->type
!= I915_GGTT_VIEW_NORMAL
) {
671 if (memcmp(&vma
->ggtt_view
, view
, sizeof(*view
))) {
672 pr_err("(%s) VMA mismatch upon creation!\n",
677 if (vma
->pages
== vma
->obj
->mm
.pages
) {
678 pr_err("(%s) VMA using original object pages!\n",
683 if (vma
->ggtt_view
.type
!= I915_GGTT_VIEW_NORMAL
) {
684 pr_err("Not the normal ggtt view! Found %d\n",
685 vma
->ggtt_view
.type
);
689 if (vma
->pages
!= vma
->obj
->mm
.pages
) {
690 pr_err("VMA not using object pages!\n");
698 static int igt_vma_partial(void *arg
)
700 struct i915_ggtt
*ggtt
= arg
;
701 struct i915_address_space
*vm
= &ggtt
->vm
;
702 const unsigned int npages
= 1021; /* prime! */
703 struct drm_i915_gem_object
*obj
;
711 unsigned int sz
, offset
;
712 struct i915_vma
*vma
;
715 /* Create lots of different VMA for the object and check that
716 * we are returned the same VMA when we later request the same range.
719 obj
= i915_gem_object_create_internal(vm
->i915
, npages
* PAGE_SIZE
);
723 for (p
= phases
; p
->name
; p
++) { /* exercise both create/lookup */
724 unsigned int count
, nvma
;
727 for_each_prime_number_from(sz
, 1, npages
) {
728 for_each_prime_number_from(offset
, 0, npages
- sz
) {
729 struct i915_ggtt_view view
;
731 view
.type
= I915_GGTT_VIEW_PARTIAL
;
732 view
.partial
.offset
= offset
;
733 view
.partial
.size
= sz
;
736 view
.type
= I915_GGTT_VIEW_NORMAL
;
738 vma
= checked_vma_instance(obj
, vm
, &view
);
744 err
= i915_vma_pin(vma
, 0, 0, PIN_GLOBAL
);
748 if (!assert_pin(vma
, &view
, sz
*PAGE_SIZE
, p
->name
)) {
749 pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
750 p
->name
, offset
, sz
);
755 if (!assert_partial(obj
, vma
, offset
, sz
)) {
756 pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
757 p
->name
, offset
, sz
);
770 list_for_each_entry(vma
, &obj
->vma
.list
, obj_link
)
773 pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
774 p
->name
, count
, nvma
);
779 /* Check that we did create the whole object mapping */
780 vma
= checked_vma_instance(obj
, vm
, NULL
);
786 err
= i915_vma_pin(vma
, 0, 0, PIN_GLOBAL
);
790 if (!assert_pin(vma
, NULL
, obj
->base
.size
, p
->name
)) {
791 pr_err("(%s) inconsistent full pin\n", p
->name
);
799 list_for_each_entry(vma
, &obj
->vma
.list
, obj_link
)
802 pr_err("(%s) allocated an extra full vma!\n", p
->name
);
809 i915_gem_object_put(obj
);
814 int i915_vma_mock_selftests(void)
816 static const struct i915_subtest tests
[] = {
817 SUBTEST(igt_vma_create
),
818 SUBTEST(igt_vma_pin1
),
819 SUBTEST(igt_vma_rotate_remap
),
820 SUBTEST(igt_vma_partial
),
822 struct drm_i915_private
*i915
;
823 struct i915_ggtt
*ggtt
;
826 i915
= mock_gem_device();
830 ggtt
= kmalloc(sizeof(*ggtt
), GFP_KERNEL
);
835 mock_init_ggtt(i915
, ggtt
);
837 err
= i915_subtests(tests
, ggtt
);
839 mock_device_flush(i915
);
840 i915_gem_drain_freed_objects(i915
);
841 mock_fini_ggtt(ggtt
);
844 mock_destroy_device(i915
);
848 static int igt_vma_remapped_gtt(void *arg
)
850 struct drm_i915_private
*i915
= arg
;
851 const struct intel_remapped_plane_info planes
[] = {
852 { .width
= 1, .height
= 1, .stride
= 1 },
853 { .width
= 2, .height
= 2, .stride
= 2 },
854 { .width
= 4, .height
= 4, .stride
= 4 },
855 { .width
= 8, .height
= 8, .stride
= 8 },
857 { .width
= 3, .height
= 5, .stride
= 3 },
858 { .width
= 3, .height
= 5, .stride
= 4 },
859 { .width
= 3, .height
= 5, .stride
= 5 },
861 { .width
= 5, .height
= 3, .stride
= 5 },
862 { .width
= 5, .height
= 3, .stride
= 7 },
863 { .width
= 5, .height
= 3, .stride
= 9 },
865 { .width
= 4, .height
= 6, .stride
= 6 },
866 { .width
= 6, .height
= 4, .stride
= 6 },
869 enum i915_ggtt_view_type types
[] = {
870 I915_GGTT_VIEW_ROTATED
,
871 I915_GGTT_VIEW_REMAPPED
,
874 struct drm_i915_gem_object
*obj
;
875 intel_wakeref_t wakeref
;
878 obj
= i915_gem_object_create_internal(i915
, 10 * 10 * PAGE_SIZE
);
882 wakeref
= intel_runtime_pm_get(&i915
->runtime_pm
);
884 for (t
= types
; *t
; t
++) {
885 for (p
= planes
; p
->width
; p
++) {
886 struct i915_ggtt_view view
= {
888 .rotated
.plane
[0] = *p
,
890 struct i915_vma
*vma
;
895 i915_gem_object_lock(obj
, NULL
);
896 err
= i915_gem_object_set_to_gtt_domain(obj
, true);
897 i915_gem_object_unlock(obj
);
901 vma
= i915_gem_object_ggtt_pin(obj
, &view
, 0, 0, PIN_MAPPABLE
);
907 GEM_BUG_ON(vma
->ggtt_view
.type
!= *t
);
909 map
= i915_vma_pin_iomap(vma
);
916 for (y
= 0 ; y
< p
->height
; y
++) {
917 for (x
= 0 ; x
< p
->width
; x
++) {
919 u32 val
= y
<< 16 | x
;
921 if (*t
== I915_GGTT_VIEW_ROTATED
)
922 offset
= (x
* p
->height
+ y
) * PAGE_SIZE
;
924 offset
= (y
* p
->width
+ x
) * PAGE_SIZE
;
926 iowrite32(val
, &map
[offset
/ sizeof(*map
)]);
930 i915_vma_unpin_iomap(vma
);
932 vma
= i915_gem_object_ggtt_pin(obj
, NULL
, 0, 0, PIN_MAPPABLE
);
938 GEM_BUG_ON(vma
->ggtt_view
.type
!= I915_GGTT_VIEW_NORMAL
);
940 map
= i915_vma_pin_iomap(vma
);
947 for (y
= 0 ; y
< p
->height
; y
++) {
948 for (x
= 0 ; x
< p
->width
; x
++) {
949 unsigned int offset
, src_idx
;
950 u32 exp
= y
<< 16 | x
;
953 if (*t
== I915_GGTT_VIEW_ROTATED
)
954 src_idx
= rotated_index(&view
.rotated
, 0, x
, y
);
956 src_idx
= remapped_index(&view
.remapped
, 0, x
, y
);
957 offset
= src_idx
* PAGE_SIZE
;
959 val
= ioread32(&map
[offset
/ sizeof(*map
)]);
961 pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
962 *t
== I915_GGTT_VIEW_ROTATED
? "Rotated" : "Remapped",
964 i915_vma_unpin_iomap(vma
);
969 i915_vma_unpin_iomap(vma
);
976 intel_runtime_pm_put(&i915
->runtime_pm
, wakeref
);
977 i915_gem_object_put(obj
);
982 int i915_vma_live_selftests(struct drm_i915_private
*i915
)
984 static const struct i915_subtest tests
[] = {
985 SUBTEST(igt_vma_remapped_gtt
),
988 return i915_subtests(tests
, i915
);