blob: a7db23cb62b0fff5490832a2bb6a238553b3247d [file] [log] [blame]
Daniel Vetter3dba47e2013-08-06 22:27:37 +02001/*
2 * Copyright © 2009,2012,2013 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Eric Anholt <eric@anholt.net>
25 * Chris Wilson <chris@chris-wilson.co.uk>
26 * Daniel Vetter <daniel.vetter@ffwll.ch>
27 *
28 */
29
Chris Wilson77633492015-03-26 08:11:43 +000030/** @file gem_concurrent.c
Daniel Vetter3dba47e2013-08-06 22:27:37 +020031 *
Chris Wilson77633492015-03-26 08:11:43 +000032 * This is a test of pread/pwrite/mmap behavior when writing to active
Daniel Vetter3dba47e2013-08-06 22:27:37 +020033 * buffers.
34 *
35 * Based on gem_gtt_concurrent_blt.
36 */
37
Thomas Wood804e11f2015-08-17 17:57:43 +010038#include "igt.h"
Daniel Vetter3dba47e2013-08-06 22:27:37 +020039#include <stdlib.h>
40#include <stdio.h>
41#include <string.h>
Daniel Vetter3dba47e2013-08-06 22:27:37 +020042#include <fcntl.h>
43#include <inttypes.h>
44#include <errno.h>
45#include <sys/stat.h>
46#include <sys/time.h>
Chris Wilson99431a42013-08-14 11:03:34 +010047#include <sys/wait.h>
Daniel Vetterf5daeec2014-03-23 13:35:09 +010048
49#include <drm.h>
50
Daniel Vetter3dba47e2013-08-06 22:27:37 +020051#include "intel_bufmgr.h"
Daniel Vetter3dba47e2013-08-06 22:27:37 +020052
Chris Wilson77633492015-03-26 08:11:43 +000053IGT_TEST_DESCRIPTION("Test of pread/pwrite/mmap behavior when writing to active"
Thomas Woodb2ac2642014-11-28 11:02:44 +000054 " buffers.");
55
Chris Wilson6c428a62014-08-29 13:11:37 +010056int fd, devid, gen;
57struct intel_batchbuffer *batch;
Chris Wilson77633492015-03-26 08:11:43 +000058int all;
Chris Wilson6c428a62014-08-29 13:11:37 +010059
Daniel Vetter3dba47e2013-08-06 22:27:37 +020060static void
Chris Wilsonf2a045f2015-01-02 16:33:33 +053061nop_release_bo(drm_intel_bo *bo)
62{
63 drm_intel_bo_unreference(bo);
64}
65
66static void
Daniel Vetter43779e32013-08-14 14:50:50 +020067prw_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
Daniel Vetter3dba47e2013-08-06 22:27:37 +020068{
Chris Wilson0b4c33f2014-01-26 14:36:32 +000069 int size = width * height, i;
70 uint32_t *tmp;
Daniel Vetter3dba47e2013-08-06 22:27:37 +020071
Chris Wilson0b4c33f2014-01-26 14:36:32 +000072 tmp = malloc(4*size);
73 if (tmp) {
74 for (i = 0; i < size; i++)
75 tmp[i] = val;
76 drm_intel_bo_subdata(bo, 0, 4*size, tmp);
77 free(tmp);
78 } else {
79 for (i = 0; i < size; i++)
80 drm_intel_bo_subdata(bo, 4*i, 4, &val);
81 }
Daniel Vetter3dba47e2013-08-06 22:27:37 +020082}
83
84static void
Chris Wilsonc12f2922014-08-31 16:14:40 +010085prw_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Daniel Vetter3dba47e2013-08-06 22:27:37 +020086{
Chris Wilson0b4c33f2014-01-26 14:36:32 +000087 int size = width * height, i;
Chris Wilsonc12f2922014-08-31 16:14:40 +010088 uint32_t *vaddr;
Daniel Vetter3dba47e2013-08-06 22:27:37 +020089
Chris Wilsonc12f2922014-08-31 16:14:40 +010090 do_or_die(drm_intel_bo_map(tmp, true));
91 do_or_die(drm_intel_bo_get_subdata(bo, 0, 4*size, tmp->virtual));
92 vaddr = tmp->virtual;
93 for (i = 0; i < size; i++)
94 igt_assert_eq_u32(vaddr[i], val);
95 drm_intel_bo_unmap(tmp);
Daniel Vetter3dba47e2013-08-06 22:27:37 +020096}
97
98static drm_intel_bo *
Chris Wilson1d6e5d32016-01-03 13:44:17 +000099create_normal_bo(drm_intel_bufmgr *bufmgr, uint64_t size)
Daniel Vetter3dba47e2013-08-06 22:27:37 +0200100{
101 drm_intel_bo *bo;
102
Chris Wilson1d6e5d32016-01-03 13:44:17 +0000103 bo = drm_intel_bo_alloc(bufmgr, "bo", size, 0);
Daniel Vetter83440952013-08-13 12:35:58 +0200104 igt_assert(bo);
Daniel Vetter3dba47e2013-08-06 22:27:37 +0200105
106 return bo;
107}
108
Chris Wilson1d6e5d32016-01-03 13:44:17 +0000109static bool can_create_normal(void)
110{
111 return true;
112}
113
114static drm_intel_bo *
115create_private_bo(drm_intel_bufmgr *bufmgr, uint64_t size)
116{
117 drm_intel_bo *bo;
118 uint32_t handle;
119
120 /* XXX gem_create_with_flags(fd, size, I915_CREATE_PRIVATE); */
121
122 handle = gem_create(fd, size);
123 bo = gem_handle_to_libdrm_bo(bufmgr, fd, "stolen", handle);
124 gem_close(fd, handle);
125
126 return bo;
127}
128
129static bool can_create_private(void)
130{
131 return false;
132}
133
134static drm_intel_bo *
135create_stolen_bo(drm_intel_bufmgr *bufmgr, uint64_t size)
136{
137 drm_intel_bo *bo;
138 uint32_t handle;
139
140 /* XXX gem_create_with_flags(fd, size, I915_CREATE_STOLEN); */
141
142 handle = gem_create(fd, size);
143 bo = gem_handle_to_libdrm_bo(bufmgr, fd, "stolen", handle);
144 gem_close(fd, handle);
145
146 return bo;
147}
148
149static bool can_create_stolen(void)
150{
151 /* XXX check num_buffers against available stolen */
152 return false;
153}
154
155static drm_intel_bo *
156(*create_func)(drm_intel_bufmgr *bufmgr, uint64_t size);
157
Chris Wilson42291f22016-01-07 11:19:26 +0000158static bool create_cpu_require(void)
Chris Wilson1d6e5d32016-01-03 13:44:17 +0000159{
Chris Wilson42291f22016-01-07 11:19:26 +0000160 return create_func != create_stolen_bo;
Chris Wilson1d6e5d32016-01-03 13:44:17 +0000161}
162
163static drm_intel_bo *
164unmapped_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
165{
166 return create_func(bufmgr, (uint64_t)4*width*height);
167}
168
Chris Wilson46ec33e2015-10-20 14:40:50 +0100169static drm_intel_bo *
170snoop_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
171{
172 drm_intel_bo *bo;
173
174 igt_skip_on(gem_has_llc(fd));
175
176 bo = unmapped_create_bo(bufmgr, width, height);
177 gem_set_caching(fd, bo->handle, I915_CACHING_CACHED);
178 drm_intel_bo_disable_reuse(bo);
179
180 return bo;
181}
182
Daniel Vetter43779e32013-08-14 14:50:50 +0200183static void
184gtt_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
Daniel Vetter3dba47e2013-08-06 22:27:37 +0200185{
Chris Wilson3e766b82014-09-26 07:55:49 +0100186 uint32_t *vaddr = bo->virtual;
Daniel Vetter43779e32013-08-14 14:50:50 +0200187 int size = width * height;
Daniel Vetter43779e32013-08-14 14:50:50 +0200188
189 drm_intel_gem_bo_start_gtt_access(bo, true);
Daniel Vetter43779e32013-08-14 14:50:50 +0200190 while (size--)
191 *vaddr++ = val;
192}
193
194static void
Chris Wilsonc12f2922014-08-31 16:14:40 +0100195gtt_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Daniel Vetter43779e32013-08-14 14:50:50 +0200196{
Chris Wilson3e766b82014-09-26 07:55:49 +0100197 uint32_t *vaddr = bo->virtual;
198 int y;
Daniel Vetter43779e32013-08-14 14:50:50 +0200199
Chris Wilson3e766b82014-09-26 07:55:49 +0100200 /* GTT access is slow. So we just compare a few points */
Daniel Vetter43779e32013-08-14 14:50:50 +0200201 drm_intel_gem_bo_start_gtt_access(bo, false);
Chris Wilson3e766b82014-09-26 07:55:49 +0100202 for (y = 0; y < height; y++)
203 igt_assert_eq_u32(vaddr[y*width+y], val);
Daniel Vetter43779e32013-08-14 14:50:50 +0200204}
205
206static drm_intel_bo *
Chris Wilson86055df2014-08-29 17:36:29 +0100207map_bo(drm_intel_bo *bo)
Daniel Vetter43779e32013-08-14 14:50:50 +0200208{
Daniel Vetter43779e32013-08-14 14:50:50 +0200209 /* gtt map doesn't have a write parameter, so just keep the mapping
210 * around (to avoid the set_domain with the gtt write domain set) and
211 * manually tell the kernel when we start access the gtt. */
212 do_or_die(drm_intel_gem_bo_map_gtt(bo));
213
214 return bo;
215}
216
Chris Wilson86055df2014-08-29 17:36:29 +0100217static drm_intel_bo *
218tile_bo(drm_intel_bo *bo, int width)
219{
220 uint32_t tiling = I915_TILING_X;
221 uint32_t stride = width * 4;
222
223 do_or_die(drm_intel_bo_set_tiling(bo, &tiling, stride));
224
225 return bo;
226}
227
228static drm_intel_bo *
229gtt_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
230{
231 return map_bo(unmapped_create_bo(bufmgr, width, height));
232}
233
234static drm_intel_bo *
235gttX_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
236{
237 return tile_bo(gtt_create_bo(bufmgr, width, height), width);
238}
239
240static drm_intel_bo *
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530241wc_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
242{
243 drm_intel_bo *bo;
244
Daniel Vettera3e34ce2015-02-06 11:05:28 +0100245 gem_require_mmap_wc(fd);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530246
247 bo = unmapped_create_bo(bufmgr, width, height);
Ville Syrjäläb8a77dd2015-10-09 18:29:28 +0300248 bo->virtual = __gem_mmap__wc(fd, bo->handle, 0, bo->size, PROT_READ | PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530249 return bo;
250}
251
252static void
253wc_release_bo(drm_intel_bo *bo)
254{
255 munmap(bo->virtual, bo->size);
256 bo->virtual = NULL;
257
258 nop_release_bo(bo);
259}
260
261static drm_intel_bo *
Chris Wilson86055df2014-08-29 17:36:29 +0100262gpu_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
263{
264 return unmapped_create_bo(bufmgr, width, height);
265}
266
Chris Wilson86055df2014-08-29 17:36:29 +0100267static drm_intel_bo *
268gpuX_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
269{
270 return tile_bo(gpu_create_bo(bufmgr, width, height), width);
271}
272
Daniel Vetter43779e32013-08-14 14:50:50 +0200273static void
274cpu_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
275{
276 int size = width * height;
277 uint32_t *vaddr;
278
279 do_or_die(drm_intel_bo_map(bo, true));
280 vaddr = bo->virtual;
281 while (size--)
282 *vaddr++ = val;
283 drm_intel_bo_unmap(bo);
284}
285
286static void
Chris Wilsonc12f2922014-08-31 16:14:40 +0100287cpu_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Daniel Vetter43779e32013-08-14 14:50:50 +0200288{
289 int size = width * height;
290 uint32_t *vaddr;
291
292 do_or_die(drm_intel_bo_map(bo, false));
293 vaddr = bo->virtual;
294 while (size--)
Chris Wilson6c428a62014-08-29 13:11:37 +0100295 igt_assert_eq_u32(*vaddr++, val);
Daniel Vetter43779e32013-08-14 14:50:50 +0200296 drm_intel_bo_unmap(bo);
297}
298
Chris Wilson6c428a62014-08-29 13:11:37 +0100299static void
300gpu_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
301{
302 struct drm_i915_gem_relocation_entry reloc[1];
303 struct drm_i915_gem_exec_object2 gem_exec[2];
304 struct drm_i915_gem_execbuffer2 execbuf;
305 struct drm_i915_gem_pwrite gem_pwrite;
306 struct drm_i915_gem_create create;
307 uint32_t buf[10], *b;
Chris Wilson86055df2014-08-29 17:36:29 +0100308 uint32_t tiling, swizzle;
309
310 drm_intel_bo_get_tiling(bo, &tiling, &swizzle);
Chris Wilson6c428a62014-08-29 13:11:37 +0100311
312 memset(reloc, 0, sizeof(reloc));
313 memset(gem_exec, 0, sizeof(gem_exec));
314 memset(&execbuf, 0, sizeof(execbuf));
315
316 b = buf;
317 *b++ = XY_COLOR_BLT_CMD_NOLEN |
318 ((gen >= 8) ? 5 : 4) |
319 COLOR_BLT_WRITE_ALPHA | XY_COLOR_BLT_WRITE_RGB;
Chris Wilson86055df2014-08-29 17:36:29 +0100320 if (gen >= 4 && tiling) {
321 b[-1] |= XY_COLOR_BLT_TILED;
322 *b = width;
323 } else
324 *b = width << 2;
325 *b++ |= 0xf0 << 16 | 1 << 25 | 1 << 24;
Chris Wilson6c428a62014-08-29 13:11:37 +0100326 *b++ = 0;
327 *b++ = height << 16 | width;
328 reloc[0].offset = (b - buf) * sizeof(uint32_t);
329 reloc[0].target_handle = bo->handle;
330 reloc[0].read_domains = I915_GEM_DOMAIN_RENDER;
331 reloc[0].write_domain = I915_GEM_DOMAIN_RENDER;
332 *b++ = 0;
333 if (gen >= 8)
334 *b++ = 0;
335 *b++ = val;
336 *b++ = MI_BATCH_BUFFER_END;
337 if ((b - buf) & 1)
338 *b++ = 0;
339
340 gem_exec[0].handle = bo->handle;
341 gem_exec[0].flags = EXEC_OBJECT_NEEDS_FENCE;
342
343 create.handle = 0;
344 create.size = 4096;
345 drmIoctl(fd, DRM_IOCTL_I915_GEM_CREATE, &create);
346 gem_exec[1].handle = create.handle;
347 gem_exec[1].relocation_count = 1;
348 gem_exec[1].relocs_ptr = (uintptr_t)reloc;
349
350 execbuf.buffers_ptr = (uintptr_t)gem_exec;
351 execbuf.buffer_count = 2;
352 execbuf.batch_len = (b - buf) * sizeof(buf[0]);
Chris Wilson86055df2014-08-29 17:36:29 +0100353 if (gen >= 6)
354 execbuf.flags = I915_EXEC_BLT;
Chris Wilson6c428a62014-08-29 13:11:37 +0100355
356 gem_pwrite.handle = gem_exec[1].handle;
357 gem_pwrite.offset = 0;
358 gem_pwrite.size = execbuf.batch_len;
359 gem_pwrite.data_ptr = (uintptr_t)buf;
Daniel Stonede7ccdd2015-10-01 14:16:48 +0100360 do_ioctl(fd, DRM_IOCTL_I915_GEM_PWRITE, &gem_pwrite);
361 do_ioctl(fd, DRM_IOCTL_I915_GEM_EXECBUFFER2, &execbuf);
Chris Wilson6c428a62014-08-29 13:11:37 +0100362
363 drmIoctl(fd, DRM_IOCTL_GEM_CLOSE, &create.handle);
364}
365
366static void
Chris Wilsonc12f2922014-08-31 16:14:40 +0100367gpu_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Chris Wilson6c428a62014-08-29 13:11:37 +0100368{
Chris Wilson99b5ee82015-01-22 10:03:45 +0000369 intel_blt_copy(batch,
370 bo, 0, 0, 4*width,
371 tmp, 0, 0, 4*width,
372 width, height, 32);
Chris Wilsonc12f2922014-08-31 16:14:40 +0100373 cpu_cmp_bo(tmp, val, width, height, NULL);
Chris Wilson6c428a62014-08-29 13:11:37 +0100374}
375
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530376const struct access_mode {
377 const char *name;
Chris Wilson42291f22016-01-07 11:19:26 +0000378 bool (*require)(void);
Daniel Vetter43779e32013-08-14 14:50:50 +0200379 void (*set_bo)(drm_intel_bo *bo, uint32_t val, int w, int h);
Chris Wilsonc12f2922014-08-31 16:14:40 +0100380 void (*cmp_bo)(drm_intel_bo *bo, uint32_t val, int w, int h, drm_intel_bo *tmp);
Chris Wilson86055df2014-08-29 17:36:29 +0100381 drm_intel_bo *(*create_bo)(drm_intel_bufmgr *bufmgr, int width, int height);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530382 void (*release_bo)(drm_intel_bo *bo);
383} access_modes[] = {
384 {
385 .name = "prw",
386 .set_bo = prw_set_bo,
387 .cmp_bo = prw_cmp_bo,
388 .create_bo = unmapped_create_bo,
389 .release_bo = nop_release_bo,
390 },
391 {
392 .name = "cpu",
Chris Wilson1d6e5d32016-01-03 13:44:17 +0000393 .require = create_cpu_require,
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530394 .set_bo = cpu_set_bo,
395 .cmp_bo = cpu_cmp_bo,
396 .create_bo = unmapped_create_bo,
397 .release_bo = nop_release_bo,
398 },
399 {
Chris Wilson46ec33e2015-10-20 14:40:50 +0100400 .name = "snoop",
Chris Wilson1d6e5d32016-01-03 13:44:17 +0000401 .require = create_cpu_require,
Chris Wilson46ec33e2015-10-20 14:40:50 +0100402 .set_bo = cpu_set_bo,
403 .cmp_bo = cpu_cmp_bo,
404 .create_bo = snoop_create_bo,
405 .release_bo = nop_release_bo,
406 },
407 {
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530408 .name = "gtt",
409 .set_bo = gtt_set_bo,
410 .cmp_bo = gtt_cmp_bo,
411 .create_bo = gtt_create_bo,
412 .release_bo = nop_release_bo,
413 },
414 {
415 .name = "gttX",
416 .set_bo = gtt_set_bo,
417 .cmp_bo = gtt_cmp_bo,
418 .create_bo = gttX_create_bo,
419 .release_bo = nop_release_bo,
420 },
421 {
422 .name = "wc",
423 .set_bo = gtt_set_bo,
424 .cmp_bo = gtt_cmp_bo,
425 .create_bo = wc_create_bo,
426 .release_bo = wc_release_bo,
427 },
428 {
429 .name = "gpu",
430 .set_bo = gpu_set_bo,
431 .cmp_bo = gpu_cmp_bo,
432 .create_bo = gpu_create_bo,
433 .release_bo = nop_release_bo,
434 },
435 {
436 .name = "gpuX",
437 .set_bo = gpu_set_bo,
438 .cmp_bo = gpu_cmp_bo,
439 .create_bo = gpuX_create_bo,
440 .release_bo = nop_release_bo,
441 },
Daniel Vetter43779e32013-08-14 14:50:50 +0200442};
443
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000444int num_buffers;
Chris Wilson86055df2014-08-29 17:36:29 +0100445const int width = 512, height = 512;
Chris Wilson59c55622014-08-29 13:11:37 +0100446igt_render_copyfunc_t rendercopy;
447
Chris Wilson99b5ee82015-01-22 10:03:45 +0000448struct buffers {
449 const struct access_mode *mode;
450 drm_intel_bufmgr *bufmgr;
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000451 drm_intel_bo **src, **dst;
Chris Wilson35b0ac92015-03-16 11:55:46 +0000452 drm_intel_bo *dummy, *spare;
Chris Wilson99b5ee82015-01-22 10:03:45 +0000453 int count;
454};
455
456static void *buffers_init(struct buffers *data,
457 const struct access_mode *mode,
458 int _fd)
459{
460 data->mode = mode;
461 data->count = 0;
462
463 data->bufmgr = drm_intel_bufmgr_gem_init(_fd, 4096);
464 igt_assert(data->bufmgr);
465
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000466 data->src = malloc(2*sizeof(drm_intel_bo *)*num_buffers);
467 igt_assert(data->src);
468 data->dst = data->src + num_buffers;
469
Chris Wilson99b5ee82015-01-22 10:03:45 +0000470 drm_intel_bufmgr_gem_enable_reuse(data->bufmgr);
471 return intel_batchbuffer_alloc(data->bufmgr, devid);
472}
473
474static void buffers_destroy(struct buffers *data)
475{
476 if (data->count == 0)
477 return;
478
479 for (int i = 0; i < data->count; i++) {
480 data->mode->release_bo(data->src[i]);
481 data->mode->release_bo(data->dst[i]);
482 }
483 data->mode->release_bo(data->dummy);
Chris Wilson35b0ac92015-03-16 11:55:46 +0000484 data->mode->release_bo(data->spare);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000485 data->count = 0;
486}
487
488static void buffers_create(struct buffers *data,
489 int count)
490{
491 igt_assert(data->bufmgr);
492
493 buffers_destroy(data);
494
495 for (int i = 0; i < count; i++) {
496 data->src[i] =
497 data->mode->create_bo(data->bufmgr, width, height);
498 data->dst[i] =
499 data->mode->create_bo(data->bufmgr, width, height);
500 }
501 data->dummy = data->mode->create_bo(data->bufmgr, width, height);
Chris Wilson35b0ac92015-03-16 11:55:46 +0000502 data->spare = data->mode->create_bo(data->bufmgr, width, height);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000503 data->count = count;
504}
505
506static void buffers_fini(struct buffers *data)
507{
508 if (data->bufmgr == NULL)
509 return;
510
511 buffers_destroy(data);
512
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000513 free(data->src);
514 data->src = NULL;
515 data->dst = NULL;
516
Chris Wilson99b5ee82015-01-22 10:03:45 +0000517 intel_batchbuffer_free(batch);
518 drm_intel_bufmgr_destroy(data->bufmgr);
519 data->bufmgr = NULL;
520}
521
Chris Wilson59c55622014-08-29 13:11:37 +0100522typedef void (*do_copy)(drm_intel_bo *dst, drm_intel_bo *src);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100523typedef struct igt_hang_ring (*do_hang)(void);
Chris Wilson59c55622014-08-29 13:11:37 +0100524
525static void render_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
526{
527 struct igt_buf d = {
528 .bo = dst,
529 .size = width * height * 4,
Chris Wilson59c55622014-08-29 13:11:37 +0100530 .num_tiles = width * height * 4,
531 .stride = width * 4,
532 }, s = {
533 .bo = src,
534 .size = width * height * 4,
Chris Wilson59c55622014-08-29 13:11:37 +0100535 .num_tiles = width * height * 4,
536 .stride = width * 4,
537 };
Chris Wilson86055df2014-08-29 17:36:29 +0100538 uint32_t swizzle;
539
540 drm_intel_bo_get_tiling(dst, &d.tiling, &swizzle);
541 drm_intel_bo_get_tiling(src, &s.tiling, &swizzle);
542
Chris Wilson59c55622014-08-29 13:11:37 +0100543 rendercopy(batch, NULL,
544 &s, 0, 0,
545 width, height,
546 &d, 0, 0);
547}
548
549static void blt_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
550{
Chris Wilson86055df2014-08-29 17:36:29 +0100551 intel_blt_copy(batch,
552 src, 0, 0, 4*width,
553 dst, 0, 0, 4*width,
554 width, height, 32);
Chris Wilson59c55622014-08-29 13:11:37 +0100555}
Daniel Vetter5a598c92013-08-14 15:08:05 +0200556
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530557static void cpu_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
558{
559 const int size = width * height * sizeof(uint32_t);
560 void *d, *s;
561
562 gem_set_domain(fd, src->handle, I915_GEM_DOMAIN_CPU, 0);
563 gem_set_domain(fd, dst->handle, I915_GEM_DOMAIN_CPU, I915_GEM_DOMAIN_CPU);
Ville Syrjäläf52e7ec2015-10-09 19:11:39 +0300564 s = gem_mmap__cpu(fd, src->handle, 0, size, PROT_READ);
565 d = gem_mmap__cpu(fd, dst->handle, 0, size, PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530566
567 memcpy(d, s, size);
568
569 munmap(d, size);
570 munmap(s, size);
571}
572
573static void gtt_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
574{
575 const int size = width * height * sizeof(uint32_t);
576 void *d, *s;
577
578 gem_set_domain(fd, src->handle, I915_GEM_DOMAIN_GTT, 0);
579 gem_set_domain(fd, dst->handle, I915_GEM_DOMAIN_GTT, I915_GEM_DOMAIN_GTT);
580
Ville Syrjäläf52e7ec2015-10-09 19:11:39 +0300581 s = gem_mmap__gtt(fd, src->handle, size, PROT_READ);
582 d = gem_mmap__gtt(fd, dst->handle, size, PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530583
584 memcpy(d, s, size);
585
586 munmap(d, size);
587 munmap(s, size);
588}
589
590static void wc_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
591{
592 const int size = width * height * sizeof(uint32_t);
593 void *d, *s;
594
595 gem_set_domain(fd, src->handle, I915_GEM_DOMAIN_GTT, 0);
596 gem_set_domain(fd, dst->handle, I915_GEM_DOMAIN_GTT, I915_GEM_DOMAIN_GTT);
597
Ville Syrjäläf52e7ec2015-10-09 19:11:39 +0300598 s = gem_mmap__wc(fd, src->handle, 0, size, PROT_READ);
599 d = gem_mmap__wc(fd, dst->handle, 0, size, PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530600
601 memcpy(d, s, size);
602
603 munmap(d, size);
604 munmap(s, size);
605}
606
Chris Wilson16bafdf2014-09-04 09:26:24 +0100607static struct igt_hang_ring no_hang(void)
608{
609 return (struct igt_hang_ring){0, 0};
610}
611
612static struct igt_hang_ring bcs_hang(void)
613{
Daniel Vetter3cd45de2015-02-10 17:46:43 +0100614 return igt_hang_ring(fd, I915_EXEC_BLT);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100615}
616
617static struct igt_hang_ring rcs_hang(void)
618{
Daniel Vetter3cd45de2015-02-10 17:46:43 +0100619 return igt_hang_ring(fd, I915_EXEC_RENDER);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100620}
621
622static void hang_require(void)
623{
Daniel Vetterc66b2422015-02-06 10:49:20 +0100624 igt_require_hang_ring(fd, -1);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100625}
626
Chris Wilson197db862015-12-09 20:54:10 +0000627static void check_gpu(void)
628{
629 unsigned missed_irq = 0;
630 FILE *file;
631
632 gem_quiescent_gpu(fd);
633
634 file = igt_debugfs_fopen("i915_ring_missed_irq", "r");
635 if (file) {
636 fscanf(file, "%x", &missed_irq);
637 fclose(file);
638 }
639 file = igt_debugfs_fopen("i915_ring_missed_irq", "w");
640 if (file) {
641 fwrite("0\n", 1, 2, file);
642 fclose(file);
643 }
644 igt_assert_eq(missed_irq, 0);
645}
646
Chris Wilson8bf09f32015-12-17 09:16:42 +0000647static void do_basic0(struct buffers *buffers,
648 do_copy do_copy_func,
649 do_hang do_hang_func)
650{
651 gem_quiescent_gpu(fd);
652
653 buffers->mode->set_bo(buffers->src[0], 0xdeadbeef, width, height);
654 for (int i = 0; i < buffers->count; i++) {
655 struct igt_hang_ring hang = do_hang_func();
656
657 do_copy_func(buffers->dst[i], buffers->src[0]);
658 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef, width, height, buffers->dummy);
659
660 igt_post_hang_ring(fd, hang);
661 }
662}
663
664static void do_basic1(struct buffers *buffers,
665 do_copy do_copy_func,
666 do_hang do_hang_func)
Chris Wilson197db862015-12-09 20:54:10 +0000667{
668 gem_quiescent_gpu(fd);
669
670 for (int i = 0; i < buffers->count; i++) {
671 struct igt_hang_ring hang = do_hang_func();
672
673 buffers->mode->set_bo(buffers->src[i], i, width, height);
674 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
Chris Wilson8bf09f32015-12-17 09:16:42 +0000675
Chris Wilson197db862015-12-09 20:54:10 +0000676 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson8bf09f32015-12-17 09:16:42 +0000677 usleep(0); /* let someone else claim the mutex */
Chris Wilson197db862015-12-09 20:54:10 +0000678 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
679
680 igt_post_hang_ring(fd, hang);
681 }
682}
683
Chris Wilson8bf09f32015-12-17 09:16:42 +0000684static void do_basicN(struct buffers *buffers,
685 do_copy do_copy_func,
686 do_hang do_hang_func)
687{
688 struct igt_hang_ring hang;
689
690 gem_quiescent_gpu(fd);
691
692 for (int i = 0; i < buffers->count; i++) {
693 buffers->mode->set_bo(buffers->src[i], i, width, height);
694 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
695 }
696
697 hang = do_hang_func();
698
699 for (int i = 0; i < buffers->count; i++) {
700 do_copy_func(buffers->dst[i], buffers->src[i]);
701 usleep(0); /* let someone else claim the mutex */
702 }
703
704 for (int i = 0; i < buffers->count; i++)
705 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
706
707 igt_post_hang_ring(fd, hang);
708}
709
Chris Wilson99b5ee82015-01-22 10:03:45 +0000710static void do_overwrite_source(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100711 do_copy do_copy_func,
712 do_hang do_hang_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +0200713{
Chris Wilson16bafdf2014-09-04 09:26:24 +0100714 struct igt_hang_ring hang;
Daniel Vetter5a598c92013-08-14 15:08:05 +0200715 int i;
716
717 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000718 for (i = 0; i < buffers->count; i++) {
719 buffers->mode->set_bo(buffers->src[i], i, width, height);
720 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200721 }
Chris Wilson99b5ee82015-01-22 10:03:45 +0000722 for (i = 0; i < buffers->count; i++)
723 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100724 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000725 for (i = buffers->count; i--; )
726 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
727 for (i = 0; i < buffers->count; i++)
728 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100729 igt_post_hang_ring(fd, hang);
730}
731
Chris Wilsona1726762015-03-16 16:29:57 +0000732static void do_overwrite_source_read(struct buffers *buffers,
733 do_copy do_copy_func,
734 do_hang do_hang_func,
735 int do_rcs)
736{
737 const int half = buffers->count/2;
738 struct igt_hang_ring hang;
739 int i;
740
741 gem_quiescent_gpu(fd);
742 for (i = 0; i < half; i++) {
743 buffers->mode->set_bo(buffers->src[i], i, width, height);
744 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
745 buffers->mode->set_bo(buffers->dst[i+half], ~i, width, height);
746 }
747 for (i = 0; i < half; i++) {
748 do_copy_func(buffers->dst[i], buffers->src[i]);
749 if (do_rcs)
750 render_copy_bo(buffers->dst[i+half], buffers->src[i]);
751 else
752 blt_copy_bo(buffers->dst[i+half], buffers->src[i]);
753 }
754 hang = do_hang_func();
755 for (i = half; i--; )
756 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
757 for (i = 0; i < half; i++) {
758 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
759 buffers->mode->cmp_bo(buffers->dst[i+half], i, width, height, buffers->dummy);
760 }
761 igt_post_hang_ring(fd, hang);
762}
763
764static void do_overwrite_source_read_bcs(struct buffers *buffers,
765 do_copy do_copy_func,
766 do_hang do_hang_func)
767{
768 do_overwrite_source_read(buffers, do_copy_func, do_hang_func, 0);
769}
770
771static void do_overwrite_source_read_rcs(struct buffers *buffers,
772 do_copy do_copy_func,
773 do_hang do_hang_func)
774{
775 do_overwrite_source_read(buffers, do_copy_func, do_hang_func, 1);
776}
777
Chris Wilson99b5ee82015-01-22 10:03:45 +0000778static void do_overwrite_source__rev(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100779 do_copy do_copy_func,
780 do_hang do_hang_func)
781{
782 struct igt_hang_ring hang;
783 int i;
784
785 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000786 for (i = 0; i < buffers->count; i++) {
787 buffers->mode->set_bo(buffers->src[i], i, width, height);
788 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100789 }
Chris Wilson99b5ee82015-01-22 10:03:45 +0000790 for (i = 0; i < buffers->count; i++)
791 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100792 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000793 for (i = 0; i < buffers->count; i++)
794 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
795 for (i = buffers->count; i--; )
796 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100797 igt_post_hang_ring(fd, hang);
798}
799
Chris Wilson99b5ee82015-01-22 10:03:45 +0000800static void do_overwrite_source__one(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100801 do_copy do_copy_func,
802 do_hang do_hang_func)
803{
804 struct igt_hang_ring hang;
805
806 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000807 buffers->mode->set_bo(buffers->src[0], 0, width, height);
808 buffers->mode->set_bo(buffers->dst[0], ~0, width, height);
809 do_copy_func(buffers->dst[0], buffers->src[0]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100810 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000811 buffers->mode->set_bo(buffers->src[0], 0xdeadbeef, width, height);
812 buffers->mode->cmp_bo(buffers->dst[0], 0, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100813 igt_post_hang_ring(fd, hang);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200814}
815
Chris Wilsona72d4052015-03-18 14:15:22 +0000816static void do_intermix(struct buffers *buffers,
817 do_copy do_copy_func,
818 do_hang do_hang_func,
819 int do_rcs)
820{
821 const int half = buffers->count/2;
822 struct igt_hang_ring hang;
823 int i;
824
825 gem_quiescent_gpu(fd);
826 for (i = 0; i < buffers->count; i++) {
827 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef^~i, width, height);
828 buffers->mode->set_bo(buffers->dst[i], i, width, height);
829 }
830 for (i = 0; i < half; i++) {
831 if (do_rcs == 1 || (do_rcs == -1 && i & 1))
832 render_copy_bo(buffers->dst[i], buffers->src[i]);
833 else
834 blt_copy_bo(buffers->dst[i], buffers->src[i]);
835
836 do_copy_func(buffers->dst[i+half], buffers->src[i]);
837
838 if (do_rcs == 1 || (do_rcs == -1 && (i & 1) == 0))
839 render_copy_bo(buffers->dst[i], buffers->dst[i+half]);
840 else
841 blt_copy_bo(buffers->dst[i], buffers->dst[i+half]);
842
843 do_copy_func(buffers->dst[i+half], buffers->src[i+half]);
844 }
845 hang = do_hang_func();
846 for (i = 0; i < 2*half; i++)
847 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef^~i, width, height, buffers->dummy);
848 igt_post_hang_ring(fd, hang);
849}
850
851static void do_intermix_rcs(struct buffers *buffers,
852 do_copy do_copy_func,
853 do_hang do_hang_func)
854{
855 do_intermix(buffers, do_copy_func, do_hang_func, 1);
856}
857
858static void do_intermix_bcs(struct buffers *buffers,
859 do_copy do_copy_func,
860 do_hang do_hang_func)
861{
862 do_intermix(buffers, do_copy_func, do_hang_func, 0);
863}
864
865static void do_intermix_both(struct buffers *buffers,
866 do_copy do_copy_func,
867 do_hang do_hang_func)
868{
869 do_intermix(buffers, do_copy_func, do_hang_func, -1);
870}
871
Chris Wilson99b5ee82015-01-22 10:03:45 +0000872static void do_early_read(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100873 do_copy do_copy_func,
874 do_hang do_hang_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +0200875{
Chris Wilson16bafdf2014-09-04 09:26:24 +0100876 struct igt_hang_ring hang;
Daniel Vetter5a598c92013-08-14 15:08:05 +0200877 int i;
878
879 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000880 for (i = buffers->count; i--; )
881 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
882 for (i = 0; i < buffers->count; i++)
883 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100884 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000885 for (i = buffers->count; i--; )
886 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100887 igt_post_hang_ring(fd, hang);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200888}
889
Chris Wilson35b0ac92015-03-16 11:55:46 +0000890static void do_read_read_bcs(struct buffers *buffers,
891 do_copy do_copy_func,
892 do_hang do_hang_func)
893{
894 struct igt_hang_ring hang;
895 int i;
896
897 gem_quiescent_gpu(fd);
898 for (i = buffers->count; i--; )
899 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
900 for (i = 0; i < buffers->count; i++) {
901 do_copy_func(buffers->dst[i], buffers->src[i]);
902 blt_copy_bo(buffers->spare, buffers->src[i]);
903 }
904 cpu_cmp_bo(buffers->spare, 0xdeadbeef^(buffers->count-1), width, height, NULL);
905 hang = do_hang_func();
906 for (i = buffers->count; i--; )
907 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
908 igt_post_hang_ring(fd, hang);
909}
910
Chris Wilson0c266522015-11-11 16:37:16 +0000911static void do_write_read_bcs(struct buffers *buffers,
912 do_copy do_copy_func,
913 do_hang do_hang_func)
914{
915 struct igt_hang_ring hang;
916 int i;
917
918 gem_quiescent_gpu(fd);
919 for (i = buffers->count; i--; )
920 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
921 for (i = 0; i < buffers->count; i++) {
922 blt_copy_bo(buffers->spare, buffers->src[i]);
923 do_copy_func(buffers->dst[i], buffers->spare);
924 }
925 hang = do_hang_func();
926 for (i = buffers->count; i--; )
927 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
928 igt_post_hang_ring(fd, hang);
929}
930
Chris Wilson35b0ac92015-03-16 11:55:46 +0000931static void do_read_read_rcs(struct buffers *buffers,
932 do_copy do_copy_func,
933 do_hang do_hang_func)
934{
935 struct igt_hang_ring hang;
936 int i;
937
938 gem_quiescent_gpu(fd);
939 for (i = buffers->count; i--; )
940 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
941 for (i = 0; i < buffers->count; i++) {
942 do_copy_func(buffers->dst[i], buffers->src[i]);
943 render_copy_bo(buffers->spare, buffers->src[i]);
944 }
945 cpu_cmp_bo(buffers->spare, 0xdeadbeef^(buffers->count-1), width, height, NULL);
946 hang = do_hang_func();
947 for (i = buffers->count; i--; )
948 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
949 igt_post_hang_ring(fd, hang);
950}
951
Chris Wilson0c266522015-11-11 16:37:16 +0000952static void do_write_read_rcs(struct buffers *buffers,
953 do_copy do_copy_func,
954 do_hang do_hang_func)
955{
956 struct igt_hang_ring hang;
957 int i;
958
959 gem_quiescent_gpu(fd);
960 for (i = buffers->count; i--; )
961 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
962 for (i = 0; i < buffers->count; i++) {
963 render_copy_bo(buffers->spare, buffers->src[i]);
964 do_copy_func(buffers->dst[i], buffers->spare);
965 }
966 hang = do_hang_func();
967 for (i = buffers->count; i--; )
968 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
969 igt_post_hang_ring(fd, hang);
970}
971
Chris Wilson99b5ee82015-01-22 10:03:45 +0000972static void do_gpu_read_after_write(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100973 do_copy do_copy_func,
974 do_hang do_hang_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +0200975{
Chris Wilson16bafdf2014-09-04 09:26:24 +0100976 struct igt_hang_ring hang;
Daniel Vetter5a598c92013-08-14 15:08:05 +0200977 int i;
978
979 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000980 for (i = buffers->count; i--; )
981 buffers->mode->set_bo(buffers->src[i], 0xabcdabcd, width, height);
982 for (i = 0; i < buffers->count; i++)
983 do_copy_func(buffers->dst[i], buffers->src[i]);
984 for (i = buffers->count; i--; )
985 do_copy_func(buffers->dummy, buffers->dst[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100986 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000987 for (i = buffers->count; i--; )
988 buffers->mode->cmp_bo(buffers->dst[i], 0xabcdabcd, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100989 igt_post_hang_ring(fd, hang);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200990}
991
Chris Wilson99b5ee82015-01-22 10:03:45 +0000992typedef void (*do_test)(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100993 do_copy do_copy_func,
994 do_hang do_hang_func);
Daniel Vetterec283d62013-08-14 15:18:37 +0200995
Chris Wilson99b5ee82015-01-22 10:03:45 +0000996typedef void (*run_wrap)(struct buffers *buffers,
Chris Wilson59c55622014-08-29 13:11:37 +0100997 do_test do_test_func,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100998 do_copy do_copy_func,
999 do_hang do_hang_func);
Daniel Vetterec283d62013-08-14 15:18:37 +02001000
Chris Wilson99b5ee82015-01-22 10:03:45 +00001001static void run_single(struct buffers *buffers,
Chris Wilson59c55622014-08-29 13:11:37 +01001002 do_test do_test_func,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001003 do_copy do_copy_func,
1004 do_hang do_hang_func)
Daniel Vetterec283d62013-08-14 15:18:37 +02001005{
Chris Wilson99b5ee82015-01-22 10:03:45 +00001006 do_test_func(buffers, do_copy_func, do_hang_func);
Chris Wilson197db862015-12-09 20:54:10 +00001007 check_gpu();
Daniel Vetterec283d62013-08-14 15:18:37 +02001008}
1009
Chris Wilson99b5ee82015-01-22 10:03:45 +00001010static void run_interruptible(struct buffers *buffers,
Chris Wilson59c55622014-08-29 13:11:37 +01001011 do_test do_test_func,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001012 do_copy do_copy_func,
1013 do_hang do_hang_func)
Daniel Vetterec283d62013-08-14 15:18:37 +02001014{
1015 int loop;
1016
1017 for (loop = 0; loop < 10; loop++)
Chris Wilson99b5ee82015-01-22 10:03:45 +00001018 do_test_func(buffers, do_copy_func, do_hang_func);
Chris Wilson197db862015-12-09 20:54:10 +00001019 check_gpu();
Daniel Vetterec283d62013-08-14 15:18:37 +02001020}
1021
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001022static void __run_forked(struct buffers *buffers,
1023 int num_children, int loops,
1024 do_test do_test_func,
1025 do_copy do_copy_func,
1026 do_hang do_hang_func)
1027
Daniel Vetterec283d62013-08-14 15:18:37 +02001028{
Chris Wilson1ca607b2013-08-16 09:44:13 +01001029 const int old_num_buffers = num_buffers;
Daniel Vetterec283d62013-08-14 15:18:37 +02001030
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001031 num_buffers /= num_children;
Chris Wilson1ca607b2013-08-16 09:44:13 +01001032 num_buffers += 2;
1033
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001034 igt_fork(child, num_children) {
Daniel Vettercd1f2202013-08-29 10:06:51 +02001035 /* recreate process local variables */
Chris Wilson99b5ee82015-01-22 10:03:45 +00001036 buffers->count = 0;
Micah Fedkec81d2932015-07-22 21:54:02 +00001037 fd = drm_open_driver(DRIVER_INTEL);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301038
Chris Wilson99b5ee82015-01-22 10:03:45 +00001039 batch = buffers_init(buffers, buffers->mode, fd);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301040
Chris Wilson99b5ee82015-01-22 10:03:45 +00001041 buffers_create(buffers, num_buffers);
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001042 for (int loop = 0; loop < loops; loop++)
Chris Wilson99b5ee82015-01-22 10:03:45 +00001043 do_test_func(buffers, do_copy_func, do_hang_func);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301044
Chris Wilson99b5ee82015-01-22 10:03:45 +00001045 buffers_fini(buffers);
Daniel Vetterec283d62013-08-14 15:18:37 +02001046 }
Daniel Vettercd1f2202013-08-29 10:06:51 +02001047
1048 igt_waitchildren();
Chris Wilson197db862015-12-09 20:54:10 +00001049 check_gpu();
Chris Wilson1ca607b2013-08-16 09:44:13 +01001050
Chris Wilson1ca607b2013-08-16 09:44:13 +01001051 num_buffers = old_num_buffers;
Daniel Vetterec283d62013-08-14 15:18:37 +02001052}
Daniel Vetter5a598c92013-08-14 15:08:05 +02001053
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001054static void run_forked(struct buffers *buffers,
1055 do_test do_test_func,
1056 do_copy do_copy_func,
1057 do_hang do_hang_func)
1058{
1059 __run_forked(buffers, sysconf(_SC_NPROCESSORS_ONLN), 10,
1060 do_test_func, do_copy_func, do_hang_func);
1061}
1062
1063static void run_bomb(struct buffers *buffers,
1064 do_test do_test_func,
1065 do_copy do_copy_func,
1066 do_hang do_hang_func)
1067{
1068 __run_forked(buffers, 8*sysconf(_SC_NPROCESSORS_ONLN), 10,
1069 do_test_func, do_copy_func, do_hang_func);
1070}
1071
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301072static void bit17_require(void)
1073{
1074 struct drm_i915_gem_get_tiling2 {
1075 uint32_t handle;
1076 uint32_t tiling_mode;
1077 uint32_t swizzle_mode;
1078 uint32_t phys_swizzle_mode;
1079 } arg;
1080#define DRM_IOCTL_I915_GEM_GET_TILING2 DRM_IOWR (DRM_COMMAND_BASE + DRM_I915_GEM_GET_TILING, struct drm_i915_gem_get_tiling2)
1081
1082 memset(&arg, 0, sizeof(arg));
1083 arg.handle = gem_create(fd, 4096);
1084 gem_set_tiling(fd, arg.handle, I915_TILING_X, 512);
1085
Daniel Stonede7ccdd2015-10-01 14:16:48 +01001086 do_ioctl(fd, DRM_IOCTL_I915_GEM_GET_TILING2, &arg);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301087 gem_close(fd, arg.handle);
1088 igt_require(arg.phys_swizzle_mode == arg.swizzle_mode);
1089}
1090
1091static void cpu_require(void)
1092{
1093 bit17_require();
1094}
1095
1096static void gtt_require(void)
1097{
1098}
1099
1100static void wc_require(void)
1101{
1102 bit17_require();
Daniel Vettera3e34ce2015-02-06 11:05:28 +01001103 gem_require_mmap_wc(fd);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301104}
1105
Chris Wilson08188752014-09-03 13:38:30 +01001106static void bcs_require(void)
1107{
1108}
1109
1110static void rcs_require(void)
1111{
1112 igt_require(rendercopy);
1113}
1114
Chris Wilson16bafdf2014-09-04 09:26:24 +01001115static void no_require(void)
1116{
1117}
1118
Daniel Vetter5a598c92013-08-14 15:08:05 +02001119static void
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001120run_basic_modes(const char *prefix,
1121 const struct access_mode *mode,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001122 const char *suffix,
Daniel Vetterec283d62013-08-14 15:18:37 +02001123 run_wrap run_wrap_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +02001124{
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301125 const struct {
Chris Wilson59c55622014-08-29 13:11:37 +01001126 const char *prefix;
1127 do_copy copy;
Chris Wilson08188752014-09-03 13:38:30 +01001128 void (*require)(void);
Chris Wilson59c55622014-08-29 13:11:37 +01001129 } pipelines[] = {
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301130 { "cpu", cpu_copy_bo, cpu_require },
1131 { "gtt", gtt_copy_bo, gtt_require },
1132 { "wc", wc_copy_bo, wc_require },
Daniel Vetter3e9b4e32015-02-06 23:10:26 +01001133 { "blt", blt_copy_bo, bcs_require },
1134 { "render", render_copy_bo, rcs_require },
Chris Wilson59c55622014-08-29 13:11:37 +01001135 { NULL, NULL }
Chris Wilson77633492015-03-26 08:11:43 +00001136 }, *pskip = pipelines + 3, *p;
Chris Wilson16bafdf2014-09-04 09:26:24 +01001137 const struct {
1138 const char *suffix;
1139 do_hang hang;
1140 void (*require)(void);
1141 } hangs[] = {
1142 { "", no_hang, no_require },
Daniel Vetterfbcc7ba2015-01-22 09:43:10 +01001143 { "-hang-blt", bcs_hang, hang_require },
1144 { "-hang-render", rcs_hang, hang_require },
Chris Wilson16bafdf2014-09-04 09:26:24 +01001145 { NULL, NULL },
1146 }, *h;
Chris Wilson99b5ee82015-01-22 10:03:45 +00001147 struct buffers buffers;
Daniel Vetter5a598c92013-08-14 15:08:05 +02001148
Chris Wilson16bafdf2014-09-04 09:26:24 +01001149 for (h = hangs; h->suffix; h++) {
Chris Wilson77633492015-03-26 08:11:43 +00001150 if (!all && *h->suffix)
1151 continue;
1152
1153 for (p = all ? pipelines : pskip; p->prefix; p++) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001154 igt_fixture {
Chris Wilson99b5ee82015-01-22 10:03:45 +00001155 batch = buffers_init(&buffers, mode, fd);
Daniel Vetter60115082015-01-22 10:01:28 +01001156 }
Chris Wilson16bafdf2014-09-04 09:26:24 +01001157
Chris Wilson8bf09f32015-12-17 09:16:42 +00001158 igt_subtest_f("%s-%s-%s-sanitycheck0%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson197db862015-12-09 20:54:10 +00001159 h->require();
1160 p->require();
1161 buffers_create(&buffers, num_buffers);
Chris Wilson8bf09f32015-12-17 09:16:42 +00001162 run_wrap_func(&buffers, do_basic0,
1163 p->copy, h->hang);
1164 }
1165
1166 igt_subtest_f("%s-%s-%s-sanitycheck1%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
1167 h->require();
1168 p->require();
1169 buffers_create(&buffers, num_buffers);
1170 run_wrap_func(&buffers, do_basic1,
1171 p->copy, h->hang);
1172 }
1173
1174 igt_subtest_f("%s-%s-%s-sanitycheckN%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
1175 h->require();
1176 p->require();
1177 buffers_create(&buffers, num_buffers);
1178 run_wrap_func(&buffers, do_basicN,
Chris Wilson197db862015-12-09 20:54:10 +00001179 p->copy, h->hang);
1180 }
1181
Chris Wilson16bafdf2014-09-04 09:26:24 +01001182 /* try to overwrite the source values */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001183 igt_subtest_f("%s-%s-%s-overwrite-source-one%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001184 h->require();
1185 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001186 buffers_create(&buffers, num_buffers);
1187 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001188 do_overwrite_source__one,
1189 p->copy, h->hang);
1190 }
1191
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001192 igt_subtest_f("%s-%s-%s-overwrite-source%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001193 h->require();
1194 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001195 buffers_create(&buffers, num_buffers);
1196 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001197 do_overwrite_source,
1198 p->copy, h->hang);
1199 }
Chris Wilsona1726762015-03-16 16:29:57 +00001200
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001201 igt_subtest_f("%s-%s-%s-overwrite-source-read-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona1726762015-03-16 16:29:57 +00001202 h->require();
1203 p->require();
1204 buffers_create(&buffers, num_buffers);
1205 run_wrap_func(&buffers,
1206 do_overwrite_source_read_bcs,
1207 p->copy, h->hang);
1208 }
1209
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001210 igt_subtest_f("%s-%s-%s-overwrite-source-read-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona1726762015-03-16 16:29:57 +00001211 h->require();
1212 p->require();
1213 igt_require(rendercopy);
1214 buffers_create(&buffers, num_buffers);
1215 run_wrap_func(&buffers,
1216 do_overwrite_source_read_rcs,
1217 p->copy, h->hang);
1218 }
1219
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001220 igt_subtest_f("%s-%s-%s-overwrite-source-rev%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001221 h->require();
1222 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001223 buffers_create(&buffers, num_buffers);
1224 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001225 do_overwrite_source__rev,
1226 p->copy, h->hang);
1227 }
1228
Chris Wilsona72d4052015-03-18 14:15:22 +00001229 /* try to intermix copies with GPU copies*/
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001230 igt_subtest_f("%s-%s-%s-intermix-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona72d4052015-03-18 14:15:22 +00001231 h->require();
1232 p->require();
1233 igt_require(rendercopy);
1234 buffers_create(&buffers, num_buffers);
1235 run_wrap_func(&buffers,
1236 do_intermix_rcs,
1237 p->copy, h->hang);
1238 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001239 igt_subtest_f("%s-%s-%s-intermix-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona72d4052015-03-18 14:15:22 +00001240 h->require();
1241 p->require();
1242 igt_require(rendercopy);
1243 buffers_create(&buffers, num_buffers);
1244 run_wrap_func(&buffers,
1245 do_intermix_bcs,
1246 p->copy, h->hang);
1247 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001248 igt_subtest_f("%s-%s-%s-intermix-both%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona72d4052015-03-18 14:15:22 +00001249 h->require();
1250 p->require();
1251 igt_require(rendercopy);
1252 buffers_create(&buffers, num_buffers);
1253 run_wrap_func(&buffers,
1254 do_intermix_both,
1255 p->copy, h->hang);
1256 }
1257
Chris Wilson16bafdf2014-09-04 09:26:24 +01001258 /* try to read the results before the copy completes */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001259 igt_subtest_f("%s-%s-%s-early-read%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001260 h->require();
1261 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001262 buffers_create(&buffers, num_buffers);
1263 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001264 do_early_read,
1265 p->copy, h->hang);
1266 }
1267
Chris Wilson35b0ac92015-03-16 11:55:46 +00001268 /* concurrent reads */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001269 igt_subtest_f("%s-%s-%s-read-read-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson35b0ac92015-03-16 11:55:46 +00001270 h->require();
1271 p->require();
1272 buffers_create(&buffers, num_buffers);
1273 run_wrap_func(&buffers,
1274 do_read_read_bcs,
1275 p->copy, h->hang);
1276 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001277 igt_subtest_f("%s-%s-%s-read-read-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson35b0ac92015-03-16 11:55:46 +00001278 h->require();
1279 p->require();
1280 igt_require(rendercopy);
1281 buffers_create(&buffers, num_buffers);
1282 run_wrap_func(&buffers,
1283 do_read_read_rcs,
1284 p->copy, h->hang);
1285 }
1286
Chris Wilson0c266522015-11-11 16:37:16 +00001287 /* split copying between rings */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001288 igt_subtest_f("%s-%s-%s-write-read-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson0c266522015-11-11 16:37:16 +00001289 h->require();
1290 p->require();
1291 buffers_create(&buffers, num_buffers);
1292 run_wrap_func(&buffers,
1293 do_write_read_bcs,
1294 p->copy, h->hang);
1295 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001296 igt_subtest_f("%s-%s-%s-write-read-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson0c266522015-11-11 16:37:16 +00001297 h->require();
1298 p->require();
1299 igt_require(rendercopy);
1300 buffers_create(&buffers, num_buffers);
1301 run_wrap_func(&buffers,
1302 do_write_read_rcs,
1303 p->copy, h->hang);
1304 }
1305
Chris Wilson16bafdf2014-09-04 09:26:24 +01001306 /* and finally try to trick the kernel into loosing the pending write */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001307 igt_subtest_f("%s-%s-%s-gpu-read-after-write%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001308 h->require();
1309 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001310 buffers_create(&buffers, num_buffers);
1311 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001312 do_gpu_read_after_write,
1313 p->copy, h->hang);
1314 }
1315
1316 igt_fixture {
Chris Wilson99b5ee82015-01-22 10:03:45 +00001317 buffers_fini(&buffers);
Chris Wilson16bafdf2014-09-04 09:26:24 +01001318 }
Chris Wilson08188752014-09-03 13:38:30 +01001319 }
Chris Wilson59c55622014-08-29 13:11:37 +01001320 }
Daniel Vetter5a598c92013-08-14 15:08:05 +02001321}
Daniel Vetter43779e32013-08-14 14:50:50 +02001322
1323static void
Chris Wilson42291f22016-01-07 11:19:26 +00001324run_modes(const char *style, const struct access_mode *mode, unsigned allow_mem)
Daniel Vetter43779e32013-08-14 14:50:50 +02001325{
Chris Wilson42291f22016-01-07 11:19:26 +00001326 if (mode->require && !mode->require())
1327 return;
Chris Wilson1d6e5d32016-01-03 13:44:17 +00001328
Chris Wilson42291f22016-01-07 11:19:26 +00001329 igt_debug("%s: using 2x%d buffers, each 1MiB\n",
1330 style, num_buffers);
1331 if (!__intel_check_memory(2*num_buffers, 1024*1024, allow_mem,
1332 NULL, NULL))
1333 return;
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001334
Chris Wilson42291f22016-01-07 11:19:26 +00001335 run_basic_modes(style, mode, "", run_single);
1336 run_basic_modes(style, mode, "-forked", run_forked);
Daniel Vetter3dba47e2013-08-06 22:27:37 +02001337
Chris Wilson6c428a62014-08-29 13:11:37 +01001338 igt_fork_signal_helper();
Chris Wilson42291f22016-01-07 11:19:26 +00001339 run_basic_modes(style, mode, "-interruptible", run_interruptible);
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001340 run_basic_modes(style, mode, "-bomb", run_bomb);
Chris Wilson6c428a62014-08-29 13:11:37 +01001341 igt_stop_signal_helper();
Daniel Vetter43779e32013-08-14 14:50:50 +02001342}
1343
Daniel Vetter071e9ca2013-10-31 16:23:26 +01001344igt_main
Daniel Vetter43779e32013-08-14 14:50:50 +02001345{
Chris Wilson1d6e5d32016-01-03 13:44:17 +00001346 const struct {
1347 const char *name;
1348 drm_intel_bo *(*create)(drm_intel_bufmgr *, uint64_t size);
1349 bool (*require)(void);
1350 } create[] = {
1351 { "", create_normal_bo, can_create_normal},
1352 { "private-", create_private_bo, can_create_private },
1353 { "stolen-", create_stolen_bo, can_create_stolen },
1354 { NULL, NULL }
1355 }, *c;
Chris Wilson42291f22016-01-07 11:19:26 +00001356 uint64_t pin_sz = 0;
1357 void *pinned = NULL;
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001358 int i;
Daniel Vetter43779e32013-08-14 14:50:50 +02001359
Daniel Vetter43779e32013-08-14 14:50:50 +02001360 igt_skip_on_simulation();
1361
Chris Wilson77633492015-03-26 08:11:43 +00001362 if (strstr(igt_test_name(), "all"))
1363 all = true;
1364
Daniel Vetter2dbd9982013-08-14 15:48:54 +02001365 igt_fixture {
Micah Fedkec81d2932015-07-22 21:54:02 +00001366 fd = drm_open_driver(DRIVER_INTEL);
Chris Wilson6c428a62014-08-29 13:11:37 +01001367 devid = intel_get_drm_devid(fd);
1368 gen = intel_gen(devid);
Chris Wilson59c55622014-08-29 13:11:37 +01001369 rendercopy = igt_get_render_copyfunc(devid);
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001370 }
Daniel Vetter43779e32013-08-14 14:50:50 +02001371
Chris Wilson1d6e5d32016-01-03 13:44:17 +00001372 for (c = create; c->name; c++) {
1373 char name[80];
1374
1375 create_func = c->create;
1376
1377 igt_fixture {
1378 num_buffers = gem_mappable_aperture_size() / (1024 * 1024) / 4;
1379 }
1380
1381 if (c->require()) {
1382 snprintf(name, sizeof(name), "%s%s", c->name, "small");
1383 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
Chris Wilson42291f22016-01-07 11:19:26 +00001384 run_modes(name, &access_modes[i], CHECK_RAM);
Chris Wilson1d6e5d32016-01-03 13:44:17 +00001385 }
1386
1387 igt_fixture {
1388 num_buffers = gem_mappable_aperture_size() / (1024 * 1024);
1389 }
1390
1391 if (c->require()) {
1392 snprintf(name, sizeof(name), "%s%s", c->name, "thrash");
1393 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
Chris Wilson42291f22016-01-07 11:19:26 +00001394 run_modes(name, &access_modes[i], CHECK_RAM);
Chris Wilson1d6e5d32016-01-03 13:44:17 +00001395 }
1396
1397 igt_fixture {
1398 num_buffers = gem_aperture_size(fd) / (1024 * 1024);
1399 }
1400
1401 if (c->require()) {
1402 snprintf(name, sizeof(name), "%s%s", c->name, "full");
1403 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
Chris Wilson42291f22016-01-07 11:19:26 +00001404 run_modes(name, &access_modes[i], CHECK_RAM);
1405 }
1406
1407 igt_fixture {
1408 num_buffers = gem_mappable_aperture_size() / (1024 * 1024);
1409 pin_sz = intel_get_avail_ram_mb() - num_buffers;
1410
1411 igt_debug("Pinning %ld MiB\n", pin_sz);
1412 pin_sz *= 1024 * 1024;
1413
1414 if (posix_memalign(&pinned, 4096, pin_sz) ||
1415 mlock(pinned, pin_sz) ||
1416 madvise(pinned, pin_sz, MADV_DONTFORK)) {
1417 free(pinned);
1418 pinned = NULL;
1419 }
1420 igt_require(pinned);
1421 }
1422
1423 if (c->require()) {
1424 snprintf(name, sizeof(name), "%s%s", c->name, "swap");
1425 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
1426 run_modes(name, &access_modes[i], CHECK_RAM | CHECK_SWAP);
1427 }
1428
1429 igt_fixture {
1430 if (pinned) {
1431 munlock(pinned, pin_sz);
1432 free(pinned);
1433 pinned = NULL;
1434 }
Chris Wilson1d6e5d32016-01-03 13:44:17 +00001435 }
Daniel Vetter2dbd9982013-08-14 15:48:54 +02001436 }
Daniel Vetter3dba47e2013-08-06 22:27:37 +02001437}