2 * SPDX-License-Identifier: MIT
4 * Copyright © 2016 Intel Corporation
8 #include "i915_selftest.h"
10 #include "mock_dmabuf.h"
11 #include "selftests/mock_gem_device.h"
13 static int igt_dmabuf_export(void *arg)
15 struct drm_i915_private *i915 = arg;
16 struct drm_i915_gem_object *obj;
17 struct dma_buf *dmabuf;
19 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
23 dmabuf = i915_gem_prime_export(&obj->base, 0);
24 i915_gem_object_put(obj);
26 pr_err("i915_gem_prime_export failed with err=%d\n",
27 (int)PTR_ERR(dmabuf));
28 return PTR_ERR(dmabuf);
35 static int igt_dmabuf_import_self(void *arg)
37 struct drm_i915_private *i915 = arg;
38 struct drm_i915_gem_object *obj, *import_obj;
39 struct drm_gem_object *import;
40 struct dma_buf *dmabuf;
43 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
47 dmabuf = i915_gem_prime_export(&obj->base, 0);
49 pr_err("i915_gem_prime_export failed with err=%d\n",
50 (int)PTR_ERR(dmabuf));
51 err = PTR_ERR(dmabuf);
55 import = i915_gem_prime_import(&i915->drm, dmabuf);
57 pr_err("i915_gem_prime_import failed with err=%d\n",
58 (int)PTR_ERR(import));
59 err = PTR_ERR(import);
63 if (import != &obj->base) {
64 pr_err("i915_gem_prime_import created a new object!\n");
68 import_obj = to_intel_bo(import);
70 i915_gem_object_lock(import_obj, NULL);
71 err = __i915_gem_object_get_pages(import_obj);
72 i915_gem_object_unlock(import_obj);
74 pr_err("Same object dma-buf get_pages failed!\n");
80 i915_gem_object_put(import_obj);
84 i915_gem_object_put(obj);
88 static int igt_dmabuf_import_same_driver_lmem(void *arg)
90 struct drm_i915_private *i915 = arg;
91 struct intel_memory_region *lmem = i915->mm.regions[INTEL_REGION_LMEM];
92 struct drm_i915_gem_object *obj;
93 struct drm_gem_object *import;
94 struct dma_buf *dmabuf;
100 force_different_devices = true;
102 obj = __i915_gem_object_create_user(i915, PAGE_SIZE, &lmem, 1);
104 pr_err("__i915_gem_object_create_user failed with err=%ld\n",
110 dmabuf = i915_gem_prime_export(&obj->base, 0);
111 if (IS_ERR(dmabuf)) {
112 pr_err("i915_gem_prime_export failed with err=%ld\n",
114 err = PTR_ERR(dmabuf);
119 * We expect an import of an LMEM-only object to fail with
120 * -EOPNOTSUPP because it can't be migrated to SMEM.
122 import = i915_gem_prime_import(&i915->drm, dmabuf);
123 if (!IS_ERR(import)) {
124 drm_gem_object_put(import);
125 pr_err("i915_gem_prime_import succeeded when it shouldn't have\n");
127 } else if (PTR_ERR(import) != -EOPNOTSUPP) {
128 pr_err("i915_gem_prime_import failed with the wrong err=%ld\n",
130 err = PTR_ERR(import);
135 i915_gem_object_put(obj);
137 force_different_devices = false;
141 static int igt_dmabuf_import_same_driver(struct drm_i915_private *i915,
142 struct intel_memory_region **regions,
143 unsigned int num_regions)
145 struct drm_i915_gem_object *obj, *import_obj;
146 struct drm_gem_object *import;
147 struct dma_buf *dmabuf;
148 struct dma_buf_attachment *import_attach;
153 force_different_devices = true;
155 obj = __i915_gem_object_create_user(i915, PAGE_SIZE,
156 regions, num_regions);
158 pr_err("__i915_gem_object_create_user failed with err=%ld\n",
164 dmabuf = i915_gem_prime_export(&obj->base, 0);
165 if (IS_ERR(dmabuf)) {
166 pr_err("i915_gem_prime_export failed with err=%ld\n",
168 err = PTR_ERR(dmabuf);
172 import = i915_gem_prime_import(&i915->drm, dmabuf);
173 if (IS_ERR(import)) {
174 pr_err("i915_gem_prime_import failed with err=%ld\n",
176 err = PTR_ERR(import);
180 if (import == &obj->base) {
181 pr_err("i915_gem_prime_import reused gem object!\n");
186 import_obj = to_intel_bo(import);
188 i915_gem_object_lock(import_obj, NULL);
189 err = __i915_gem_object_get_pages(import_obj);
191 pr_err("Different objects dma-buf get_pages failed!\n");
192 i915_gem_object_unlock(import_obj);
197 * If the exported object is not in system memory, something
198 * weird is going on. TODO: When p2p is supported, this is no
199 * longer considered weird.
201 if (obj->mm.region != i915->mm.regions[INTEL_REGION_SMEM]) {
202 pr_err("Exported dma-buf is not in system memory\n");
206 i915_gem_object_unlock(import_obj);
208 /* Now try a fake an importer */
209 import_attach = dma_buf_attach(dmabuf, obj->base.dev->dev);
210 if (IS_ERR(import_attach)) {
211 err = PTR_ERR(import_attach);
215 st = dma_buf_map_attachment(import_attach, DMA_BIDIRECTIONAL);
221 timeout = dma_resv_wait_timeout(dmabuf->resv, false, true, 5 * HZ);
223 pr_err("dmabuf wait for exclusive fence timed out.\n");
226 err = timeout > 0 ? 0 : timeout;
227 dma_buf_unmap_attachment(import_attach, st, DMA_BIDIRECTIONAL);
229 dma_buf_detach(dmabuf, import_attach);
231 i915_gem_object_put(import_obj);
235 i915_gem_object_put(obj);
237 force_different_devices = false;
241 static int igt_dmabuf_import_same_driver_smem(void *arg)
243 struct drm_i915_private *i915 = arg;
244 struct intel_memory_region *smem = i915->mm.regions[INTEL_REGION_SMEM];
246 return igt_dmabuf_import_same_driver(i915, &smem, 1);
249 static int igt_dmabuf_import_same_driver_lmem_smem(void *arg)
251 struct drm_i915_private *i915 = arg;
252 struct intel_memory_region *regions[2];
254 if (!i915->mm.regions[INTEL_REGION_LMEM])
257 regions[0] = i915->mm.regions[INTEL_REGION_LMEM];
258 regions[1] = i915->mm.regions[INTEL_REGION_SMEM];
259 return igt_dmabuf_import_same_driver(i915, regions, 2);
262 static int igt_dmabuf_import(void *arg)
264 struct drm_i915_private *i915 = arg;
265 struct drm_i915_gem_object *obj;
266 struct dma_buf *dmabuf;
267 void *obj_map, *dma_map;
268 struct dma_buf_map map;
269 u32 pattern[] = { 0, 0xaa, 0xcc, 0x55, 0xff };
272 dmabuf = mock_dmabuf(1);
274 return PTR_ERR(dmabuf);
276 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
278 pr_err("i915_gem_prime_import failed with err=%d\n",
284 if (obj->base.dev != &i915->drm) {
285 pr_err("i915_gem_prime_import created a non-i915 object!\n");
290 if (obj->base.size != PAGE_SIZE) {
291 pr_err("i915_gem_prime_import is wrong size found %lld, expected %ld\n",
292 (long long)obj->base.size, PAGE_SIZE);
297 err = dma_buf_vmap(dmabuf, &map);
298 dma_map = err ? NULL : map.vaddr;
300 pr_err("dma_buf_vmap failed\n");
305 if (0) { /* Can not yet map dmabuf */
306 obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB);
307 if (IS_ERR(obj_map)) {
308 err = PTR_ERR(obj_map);
309 pr_err("i915_gem_object_pin_map failed with err=%d\n", err);
313 for (i = 0; i < ARRAY_SIZE(pattern); i++) {
314 memset(dma_map, pattern[i], PAGE_SIZE);
315 if (memchr_inv(obj_map, pattern[i], PAGE_SIZE)) {
317 pr_err("imported vmap not all set to %x!\n", pattern[i]);
318 i915_gem_object_unpin_map(obj);
323 for (i = 0; i < ARRAY_SIZE(pattern); i++) {
324 memset(obj_map, pattern[i], PAGE_SIZE);
325 if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) {
327 pr_err("exported vmap not all set to %x!\n", pattern[i]);
328 i915_gem_object_unpin_map(obj);
333 i915_gem_object_unpin_map(obj);
338 dma_buf_vunmap(dmabuf, &map);
340 i915_gem_object_put(obj);
346 static int igt_dmabuf_import_ownership(void *arg)
348 struct drm_i915_private *i915 = arg;
349 struct drm_i915_gem_object *obj;
350 struct dma_buf *dmabuf;
351 struct dma_buf_map map;
355 dmabuf = mock_dmabuf(1);
357 return PTR_ERR(dmabuf);
359 err = dma_buf_vmap(dmabuf, &map);
360 ptr = err ? NULL : map.vaddr;
362 pr_err("dma_buf_vmap failed\n");
367 memset(ptr, 0xc5, PAGE_SIZE);
368 dma_buf_vunmap(dmabuf, &map);
370 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
372 pr_err("i915_gem_prime_import failed with err=%d\n",
380 err = i915_gem_object_pin_pages_unlocked(obj);
382 pr_err("i915_gem_object_pin_pages failed with err=%d\n", err);
387 i915_gem_object_unpin_pages(obj);
389 i915_gem_object_put(obj);
397 static int igt_dmabuf_export_vmap(void *arg)
399 struct drm_i915_private *i915 = arg;
400 struct drm_i915_gem_object *obj;
401 struct dma_buf *dmabuf;
402 struct dma_buf_map map;
406 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
410 dmabuf = i915_gem_prime_export(&obj->base, 0);
411 if (IS_ERR(dmabuf)) {
412 pr_err("i915_gem_prime_export failed with err=%d\n",
413 (int)PTR_ERR(dmabuf));
414 err = PTR_ERR(dmabuf);
417 i915_gem_object_put(obj);
419 err = dma_buf_vmap(dmabuf, &map);
420 ptr = err ? NULL : map.vaddr;
422 pr_err("dma_buf_vmap failed\n");
427 if (memchr_inv(ptr, 0, dmabuf->size)) {
428 pr_err("Exported object not initialiased to zero!\n");
433 memset(ptr, 0xc5, dmabuf->size);
436 dma_buf_vunmap(dmabuf, &map);
442 i915_gem_object_put(obj);
446 int i915_gem_dmabuf_mock_selftests(void)
448 static const struct i915_subtest tests[] = {
449 SUBTEST(igt_dmabuf_export),
450 SUBTEST(igt_dmabuf_import_self),
451 SUBTEST(igt_dmabuf_import),
452 SUBTEST(igt_dmabuf_import_ownership),
453 SUBTEST(igt_dmabuf_export_vmap),
455 struct drm_i915_private *i915;
458 i915 = mock_gem_device();
462 err = i915_subtests(tests, i915);
464 mock_destroy_device(i915);
468 int i915_gem_dmabuf_live_selftests(struct drm_i915_private *i915)
470 static const struct i915_subtest tests[] = {
471 SUBTEST(igt_dmabuf_export),
472 SUBTEST(igt_dmabuf_import_same_driver_lmem),
473 SUBTEST(igt_dmabuf_import_same_driver_smem),
474 SUBTEST(igt_dmabuf_import_same_driver_lmem_smem),
477 return i915_subtests(tests, i915);