blob: b2338da2c123dff7f4fe716eabc99be27ea8a839 [file] [log] [blame]
Daniel Vetter3dba47e2013-08-06 22:27:37 +02001/*
2 * Copyright © 2009,2012,2013 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Eric Anholt <eric@anholt.net>
25 * Chris Wilson <chris@chris-wilson.co.uk>
26 * Daniel Vetter <daniel.vetter@ffwll.ch>
27 *
28 */
29
Chris Wilson77633492015-03-26 08:11:43 +000030/** @file gem_concurrent.c
Daniel Vetter3dba47e2013-08-06 22:27:37 +020031 *
Chris Wilson77633492015-03-26 08:11:43 +000032 * This is a test of pread/pwrite/mmap behavior when writing to active
Daniel Vetter3dba47e2013-08-06 22:27:37 +020033 * buffers.
34 *
35 * Based on gem_gtt_concurrent_blt.
36 */
37
Thomas Wood804e11f2015-08-17 17:57:43 +010038#include "igt.h"
Daniel Vetter3dba47e2013-08-06 22:27:37 +020039#include <stdlib.h>
40#include <stdio.h>
41#include <string.h>
Daniel Vetter3dba47e2013-08-06 22:27:37 +020042#include <fcntl.h>
43#include <inttypes.h>
44#include <errno.h>
45#include <sys/stat.h>
46#include <sys/time.h>
Chris Wilson99431a42013-08-14 11:03:34 +010047#include <sys/wait.h>
Daniel Vetterf5daeec2014-03-23 13:35:09 +010048
49#include <drm.h>
50
Daniel Vetter3dba47e2013-08-06 22:27:37 +020051#include "intel_bufmgr.h"
Daniel Vetter3dba47e2013-08-06 22:27:37 +020052
Chris Wilson77633492015-03-26 08:11:43 +000053IGT_TEST_DESCRIPTION("Test of pread/pwrite/mmap behavior when writing to active"
Thomas Woodb2ac2642014-11-28 11:02:44 +000054 " buffers.");
55
Chris Wilson6c428a62014-08-29 13:11:37 +010056int fd, devid, gen;
57struct intel_batchbuffer *batch;
Chris Wilson77633492015-03-26 08:11:43 +000058int all;
Chris Wilson6c428a62014-08-29 13:11:37 +010059
Daniel Vetter3dba47e2013-08-06 22:27:37 +020060static void
Chris Wilsonf2a045f2015-01-02 16:33:33 +053061nop_release_bo(drm_intel_bo *bo)
62{
63 drm_intel_bo_unreference(bo);
64}
65
66static void
Daniel Vetter43779e32013-08-14 14:50:50 +020067prw_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
Daniel Vetter3dba47e2013-08-06 22:27:37 +020068{
Chris Wilson0b4c33f2014-01-26 14:36:32 +000069 int size = width * height, i;
70 uint32_t *tmp;
Daniel Vetter3dba47e2013-08-06 22:27:37 +020071
Chris Wilson0b4c33f2014-01-26 14:36:32 +000072 tmp = malloc(4*size);
73 if (tmp) {
74 for (i = 0; i < size; i++)
75 tmp[i] = val;
76 drm_intel_bo_subdata(bo, 0, 4*size, tmp);
77 free(tmp);
78 } else {
79 for (i = 0; i < size; i++)
80 drm_intel_bo_subdata(bo, 4*i, 4, &val);
81 }
Daniel Vetter3dba47e2013-08-06 22:27:37 +020082}
83
84static void
Chris Wilsonc12f2922014-08-31 16:14:40 +010085prw_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Daniel Vetter3dba47e2013-08-06 22:27:37 +020086{
Chris Wilson0b4c33f2014-01-26 14:36:32 +000087 int size = width * height, i;
Chris Wilsonc12f2922014-08-31 16:14:40 +010088 uint32_t *vaddr;
Daniel Vetter3dba47e2013-08-06 22:27:37 +020089
Chris Wilsonc12f2922014-08-31 16:14:40 +010090 do_or_die(drm_intel_bo_map(tmp, true));
91 do_or_die(drm_intel_bo_get_subdata(bo, 0, 4*size, tmp->virtual));
92 vaddr = tmp->virtual;
93 for (i = 0; i < size; i++)
94 igt_assert_eq_u32(vaddr[i], val);
95 drm_intel_bo_unmap(tmp);
Daniel Vetter3dba47e2013-08-06 22:27:37 +020096}
97
98static drm_intel_bo *
Chris Wilson86055df2014-08-29 17:36:29 +010099unmapped_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
Daniel Vetter3dba47e2013-08-06 22:27:37 +0200100{
101 drm_intel_bo *bo;
102
103 bo = drm_intel_bo_alloc(bufmgr, "bo", 4*width*height, 0);
Daniel Vetter83440952013-08-13 12:35:58 +0200104 igt_assert(bo);
Daniel Vetter3dba47e2013-08-06 22:27:37 +0200105
106 return bo;
107}
108
Chris Wilson46ec33e2015-10-20 14:40:50 +0100109static drm_intel_bo *
110snoop_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
111{
112 drm_intel_bo *bo;
113
114 igt_skip_on(gem_has_llc(fd));
115
116 bo = unmapped_create_bo(bufmgr, width, height);
117 gem_set_caching(fd, bo->handle, I915_CACHING_CACHED);
118 drm_intel_bo_disable_reuse(bo);
119
120 return bo;
121}
122
Daniel Vetter43779e32013-08-14 14:50:50 +0200123static void
124gtt_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
Daniel Vetter3dba47e2013-08-06 22:27:37 +0200125{
Chris Wilson3e766b82014-09-26 07:55:49 +0100126 uint32_t *vaddr = bo->virtual;
Daniel Vetter43779e32013-08-14 14:50:50 +0200127 int size = width * height;
Daniel Vetter43779e32013-08-14 14:50:50 +0200128
129 drm_intel_gem_bo_start_gtt_access(bo, true);
Daniel Vetter43779e32013-08-14 14:50:50 +0200130 while (size--)
131 *vaddr++ = val;
132}
133
134static void
Chris Wilsonc12f2922014-08-31 16:14:40 +0100135gtt_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Daniel Vetter43779e32013-08-14 14:50:50 +0200136{
Chris Wilson3e766b82014-09-26 07:55:49 +0100137 uint32_t *vaddr = bo->virtual;
138 int y;
Daniel Vetter43779e32013-08-14 14:50:50 +0200139
Chris Wilson3e766b82014-09-26 07:55:49 +0100140 /* GTT access is slow. So we just compare a few points */
Daniel Vetter43779e32013-08-14 14:50:50 +0200141 drm_intel_gem_bo_start_gtt_access(bo, false);
Chris Wilson3e766b82014-09-26 07:55:49 +0100142 for (y = 0; y < height; y++)
143 igt_assert_eq_u32(vaddr[y*width+y], val);
Daniel Vetter43779e32013-08-14 14:50:50 +0200144}
145
146static drm_intel_bo *
Chris Wilson86055df2014-08-29 17:36:29 +0100147map_bo(drm_intel_bo *bo)
Daniel Vetter43779e32013-08-14 14:50:50 +0200148{
Daniel Vetter43779e32013-08-14 14:50:50 +0200149 /* gtt map doesn't have a write parameter, so just keep the mapping
150 * around (to avoid the set_domain with the gtt write domain set) and
151 * manually tell the kernel when we start access the gtt. */
152 do_or_die(drm_intel_gem_bo_map_gtt(bo));
153
154 return bo;
155}
156
Chris Wilson86055df2014-08-29 17:36:29 +0100157static drm_intel_bo *
158tile_bo(drm_intel_bo *bo, int width)
159{
160 uint32_t tiling = I915_TILING_X;
161 uint32_t stride = width * 4;
162
163 do_or_die(drm_intel_bo_set_tiling(bo, &tiling, stride));
164
165 return bo;
166}
167
168static drm_intel_bo *
169gtt_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
170{
171 return map_bo(unmapped_create_bo(bufmgr, width, height));
172}
173
174static drm_intel_bo *
175gttX_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
176{
177 return tile_bo(gtt_create_bo(bufmgr, width, height), width);
178}
179
180static drm_intel_bo *
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530181wc_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
182{
183 drm_intel_bo *bo;
184
Daniel Vettera3e34ce2015-02-06 11:05:28 +0100185 gem_require_mmap_wc(fd);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530186
187 bo = unmapped_create_bo(bufmgr, width, height);
Ville Syrjäläb8a77dd2015-10-09 18:29:28 +0300188 bo->virtual = __gem_mmap__wc(fd, bo->handle, 0, bo->size, PROT_READ | PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530189 return bo;
190}
191
192static void
193wc_release_bo(drm_intel_bo *bo)
194{
195 munmap(bo->virtual, bo->size);
196 bo->virtual = NULL;
197
198 nop_release_bo(bo);
199}
200
201static drm_intel_bo *
Chris Wilson86055df2014-08-29 17:36:29 +0100202gpu_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
203{
204 return unmapped_create_bo(bufmgr, width, height);
205}
206
207
208static drm_intel_bo *
209gpuX_create_bo(drm_intel_bufmgr *bufmgr, int width, int height)
210{
211 return tile_bo(gpu_create_bo(bufmgr, width, height), width);
212}
213
Daniel Vetter43779e32013-08-14 14:50:50 +0200214static void
215cpu_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
216{
217 int size = width * height;
218 uint32_t *vaddr;
219
220 do_or_die(drm_intel_bo_map(bo, true));
221 vaddr = bo->virtual;
222 while (size--)
223 *vaddr++ = val;
224 drm_intel_bo_unmap(bo);
225}
226
227static void
Chris Wilsonc12f2922014-08-31 16:14:40 +0100228cpu_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Daniel Vetter43779e32013-08-14 14:50:50 +0200229{
230 int size = width * height;
231 uint32_t *vaddr;
232
233 do_or_die(drm_intel_bo_map(bo, false));
234 vaddr = bo->virtual;
235 while (size--)
Chris Wilson6c428a62014-08-29 13:11:37 +0100236 igt_assert_eq_u32(*vaddr++, val);
Daniel Vetter43779e32013-08-14 14:50:50 +0200237 drm_intel_bo_unmap(bo);
238}
239
Chris Wilson6c428a62014-08-29 13:11:37 +0100240static void
241gpu_set_bo(drm_intel_bo *bo, uint32_t val, int width, int height)
242{
243 struct drm_i915_gem_relocation_entry reloc[1];
244 struct drm_i915_gem_exec_object2 gem_exec[2];
245 struct drm_i915_gem_execbuffer2 execbuf;
246 struct drm_i915_gem_pwrite gem_pwrite;
247 struct drm_i915_gem_create create;
248 uint32_t buf[10], *b;
Chris Wilson86055df2014-08-29 17:36:29 +0100249 uint32_t tiling, swizzle;
250
251 drm_intel_bo_get_tiling(bo, &tiling, &swizzle);
Chris Wilson6c428a62014-08-29 13:11:37 +0100252
253 memset(reloc, 0, sizeof(reloc));
254 memset(gem_exec, 0, sizeof(gem_exec));
255 memset(&execbuf, 0, sizeof(execbuf));
256
257 b = buf;
258 *b++ = XY_COLOR_BLT_CMD_NOLEN |
259 ((gen >= 8) ? 5 : 4) |
260 COLOR_BLT_WRITE_ALPHA | XY_COLOR_BLT_WRITE_RGB;
Chris Wilson86055df2014-08-29 17:36:29 +0100261 if (gen >= 4 && tiling) {
262 b[-1] |= XY_COLOR_BLT_TILED;
263 *b = width;
264 } else
265 *b = width << 2;
266 *b++ |= 0xf0 << 16 | 1 << 25 | 1 << 24;
Chris Wilson6c428a62014-08-29 13:11:37 +0100267 *b++ = 0;
268 *b++ = height << 16 | width;
269 reloc[0].offset = (b - buf) * sizeof(uint32_t);
270 reloc[0].target_handle = bo->handle;
271 reloc[0].read_domains = I915_GEM_DOMAIN_RENDER;
272 reloc[0].write_domain = I915_GEM_DOMAIN_RENDER;
273 *b++ = 0;
274 if (gen >= 8)
275 *b++ = 0;
276 *b++ = val;
277 *b++ = MI_BATCH_BUFFER_END;
278 if ((b - buf) & 1)
279 *b++ = 0;
280
281 gem_exec[0].handle = bo->handle;
282 gem_exec[0].flags = EXEC_OBJECT_NEEDS_FENCE;
283
284 create.handle = 0;
285 create.size = 4096;
286 drmIoctl(fd, DRM_IOCTL_I915_GEM_CREATE, &create);
287 gem_exec[1].handle = create.handle;
288 gem_exec[1].relocation_count = 1;
289 gem_exec[1].relocs_ptr = (uintptr_t)reloc;
290
291 execbuf.buffers_ptr = (uintptr_t)gem_exec;
292 execbuf.buffer_count = 2;
293 execbuf.batch_len = (b - buf) * sizeof(buf[0]);
Chris Wilson86055df2014-08-29 17:36:29 +0100294 if (gen >= 6)
295 execbuf.flags = I915_EXEC_BLT;
Chris Wilson6c428a62014-08-29 13:11:37 +0100296
297 gem_pwrite.handle = gem_exec[1].handle;
298 gem_pwrite.offset = 0;
299 gem_pwrite.size = execbuf.batch_len;
300 gem_pwrite.data_ptr = (uintptr_t)buf;
Daniel Stonede7ccdd2015-10-01 14:16:48 +0100301 do_ioctl(fd, DRM_IOCTL_I915_GEM_PWRITE, &gem_pwrite);
302 do_ioctl(fd, DRM_IOCTL_I915_GEM_EXECBUFFER2, &execbuf);
Chris Wilson6c428a62014-08-29 13:11:37 +0100303
304 drmIoctl(fd, DRM_IOCTL_GEM_CLOSE, &create.handle);
305}
306
307static void
Chris Wilsonc12f2922014-08-31 16:14:40 +0100308gpu_cmp_bo(drm_intel_bo *bo, uint32_t val, int width, int height, drm_intel_bo *tmp)
Chris Wilson6c428a62014-08-29 13:11:37 +0100309{
Chris Wilson99b5ee82015-01-22 10:03:45 +0000310 intel_blt_copy(batch,
311 bo, 0, 0, 4*width,
312 tmp, 0, 0, 4*width,
313 width, height, 32);
Chris Wilsonc12f2922014-08-31 16:14:40 +0100314 cpu_cmp_bo(tmp, val, width, height, NULL);
Chris Wilson6c428a62014-08-29 13:11:37 +0100315}
316
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530317const struct access_mode {
318 const char *name;
Daniel Vetter43779e32013-08-14 14:50:50 +0200319 void (*set_bo)(drm_intel_bo *bo, uint32_t val, int w, int h);
Chris Wilsonc12f2922014-08-31 16:14:40 +0100320 void (*cmp_bo)(drm_intel_bo *bo, uint32_t val, int w, int h, drm_intel_bo *tmp);
Chris Wilson86055df2014-08-29 17:36:29 +0100321 drm_intel_bo *(*create_bo)(drm_intel_bufmgr *bufmgr, int width, int height);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530322 void (*release_bo)(drm_intel_bo *bo);
323} access_modes[] = {
324 {
325 .name = "prw",
326 .set_bo = prw_set_bo,
327 .cmp_bo = prw_cmp_bo,
328 .create_bo = unmapped_create_bo,
329 .release_bo = nop_release_bo,
330 },
331 {
332 .name = "cpu",
333 .set_bo = cpu_set_bo,
334 .cmp_bo = cpu_cmp_bo,
335 .create_bo = unmapped_create_bo,
336 .release_bo = nop_release_bo,
337 },
338 {
Chris Wilson46ec33e2015-10-20 14:40:50 +0100339 .name = "snoop",
340 .set_bo = cpu_set_bo,
341 .cmp_bo = cpu_cmp_bo,
342 .create_bo = snoop_create_bo,
343 .release_bo = nop_release_bo,
344 },
345 {
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530346 .name = "gtt",
347 .set_bo = gtt_set_bo,
348 .cmp_bo = gtt_cmp_bo,
349 .create_bo = gtt_create_bo,
350 .release_bo = nop_release_bo,
351 },
352 {
353 .name = "gttX",
354 .set_bo = gtt_set_bo,
355 .cmp_bo = gtt_cmp_bo,
356 .create_bo = gttX_create_bo,
357 .release_bo = nop_release_bo,
358 },
359 {
360 .name = "wc",
361 .set_bo = gtt_set_bo,
362 .cmp_bo = gtt_cmp_bo,
363 .create_bo = wc_create_bo,
364 .release_bo = wc_release_bo,
365 },
366 {
367 .name = "gpu",
368 .set_bo = gpu_set_bo,
369 .cmp_bo = gpu_cmp_bo,
370 .create_bo = gpu_create_bo,
371 .release_bo = nop_release_bo,
372 },
373 {
374 .name = "gpuX",
375 .set_bo = gpu_set_bo,
376 .cmp_bo = gpu_cmp_bo,
377 .create_bo = gpuX_create_bo,
378 .release_bo = nop_release_bo,
379 },
Daniel Vetter43779e32013-08-14 14:50:50 +0200380};
381
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000382int num_buffers;
Chris Wilson86055df2014-08-29 17:36:29 +0100383const int width = 512, height = 512;
Chris Wilson59c55622014-08-29 13:11:37 +0100384igt_render_copyfunc_t rendercopy;
385
Chris Wilson99b5ee82015-01-22 10:03:45 +0000386struct buffers {
387 const struct access_mode *mode;
388 drm_intel_bufmgr *bufmgr;
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000389 drm_intel_bo **src, **dst;
Chris Wilson35b0ac92015-03-16 11:55:46 +0000390 drm_intel_bo *dummy, *spare;
Chris Wilson99b5ee82015-01-22 10:03:45 +0000391 int count;
392};
393
394static void *buffers_init(struct buffers *data,
395 const struct access_mode *mode,
396 int _fd)
397{
398 data->mode = mode;
399 data->count = 0;
400
401 data->bufmgr = drm_intel_bufmgr_gem_init(_fd, 4096);
402 igt_assert(data->bufmgr);
403
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000404 data->src = malloc(2*sizeof(drm_intel_bo *)*num_buffers);
405 igt_assert(data->src);
406 data->dst = data->src + num_buffers;
407
Chris Wilson99b5ee82015-01-22 10:03:45 +0000408 drm_intel_bufmgr_gem_enable_reuse(data->bufmgr);
409 return intel_batchbuffer_alloc(data->bufmgr, devid);
410}
411
412static void buffers_destroy(struct buffers *data)
413{
414 if (data->count == 0)
415 return;
416
417 for (int i = 0; i < data->count; i++) {
418 data->mode->release_bo(data->src[i]);
419 data->mode->release_bo(data->dst[i]);
420 }
421 data->mode->release_bo(data->dummy);
Chris Wilson35b0ac92015-03-16 11:55:46 +0000422 data->mode->release_bo(data->spare);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000423 data->count = 0;
424}
425
426static void buffers_create(struct buffers *data,
427 int count)
428{
429 igt_assert(data->bufmgr);
430
431 buffers_destroy(data);
432
433 for (int i = 0; i < count; i++) {
434 data->src[i] =
435 data->mode->create_bo(data->bufmgr, width, height);
436 data->dst[i] =
437 data->mode->create_bo(data->bufmgr, width, height);
438 }
439 data->dummy = data->mode->create_bo(data->bufmgr, width, height);
Chris Wilson35b0ac92015-03-16 11:55:46 +0000440 data->spare = data->mode->create_bo(data->bufmgr, width, height);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000441 data->count = count;
442}
443
444static void buffers_fini(struct buffers *data)
445{
446 if (data->bufmgr == NULL)
447 return;
448
449 buffers_destroy(data);
450
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000451 free(data->src);
452 data->src = NULL;
453 data->dst = NULL;
454
Chris Wilson99b5ee82015-01-22 10:03:45 +0000455 intel_batchbuffer_free(batch);
456 drm_intel_bufmgr_destroy(data->bufmgr);
457 data->bufmgr = NULL;
458}
459
Chris Wilson59c55622014-08-29 13:11:37 +0100460typedef void (*do_copy)(drm_intel_bo *dst, drm_intel_bo *src);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100461typedef struct igt_hang_ring (*do_hang)(void);
Chris Wilson59c55622014-08-29 13:11:37 +0100462
463static void render_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
464{
465 struct igt_buf d = {
466 .bo = dst,
467 .size = width * height * 4,
Chris Wilson59c55622014-08-29 13:11:37 +0100468 .num_tiles = width * height * 4,
469 .stride = width * 4,
470 }, s = {
471 .bo = src,
472 .size = width * height * 4,
Chris Wilson59c55622014-08-29 13:11:37 +0100473 .num_tiles = width * height * 4,
474 .stride = width * 4,
475 };
Chris Wilson86055df2014-08-29 17:36:29 +0100476 uint32_t swizzle;
477
478 drm_intel_bo_get_tiling(dst, &d.tiling, &swizzle);
479 drm_intel_bo_get_tiling(src, &s.tiling, &swizzle);
480
Chris Wilson59c55622014-08-29 13:11:37 +0100481 rendercopy(batch, NULL,
482 &s, 0, 0,
483 width, height,
484 &d, 0, 0);
485}
486
487static void blt_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
488{
Chris Wilson86055df2014-08-29 17:36:29 +0100489 intel_blt_copy(batch,
490 src, 0, 0, 4*width,
491 dst, 0, 0, 4*width,
492 width, height, 32);
Chris Wilson59c55622014-08-29 13:11:37 +0100493}
Daniel Vetter5a598c92013-08-14 15:08:05 +0200494
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530495static void cpu_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
496{
497 const int size = width * height * sizeof(uint32_t);
498 void *d, *s;
499
500 gem_set_domain(fd, src->handle, I915_GEM_DOMAIN_CPU, 0);
501 gem_set_domain(fd, dst->handle, I915_GEM_DOMAIN_CPU, I915_GEM_DOMAIN_CPU);
Ville Syrjäläf52e7ec2015-10-09 19:11:39 +0300502 s = gem_mmap__cpu(fd, src->handle, 0, size, PROT_READ);
503 d = gem_mmap__cpu(fd, dst->handle, 0, size, PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530504
505 memcpy(d, s, size);
506
507 munmap(d, size);
508 munmap(s, size);
509}
510
511static void gtt_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
512{
513 const int size = width * height * sizeof(uint32_t);
514 void *d, *s;
515
516 gem_set_domain(fd, src->handle, I915_GEM_DOMAIN_GTT, 0);
517 gem_set_domain(fd, dst->handle, I915_GEM_DOMAIN_GTT, I915_GEM_DOMAIN_GTT);
518
Ville Syrjäläf52e7ec2015-10-09 19:11:39 +0300519 s = gem_mmap__gtt(fd, src->handle, size, PROT_READ);
520 d = gem_mmap__gtt(fd, dst->handle, size, PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530521
522 memcpy(d, s, size);
523
524 munmap(d, size);
525 munmap(s, size);
526}
527
528static void wc_copy_bo(drm_intel_bo *dst, drm_intel_bo *src)
529{
530 const int size = width * height * sizeof(uint32_t);
531 void *d, *s;
532
533 gem_set_domain(fd, src->handle, I915_GEM_DOMAIN_GTT, 0);
534 gem_set_domain(fd, dst->handle, I915_GEM_DOMAIN_GTT, I915_GEM_DOMAIN_GTT);
535
Ville Syrjäläf52e7ec2015-10-09 19:11:39 +0300536 s = gem_mmap__wc(fd, src->handle, 0, size, PROT_READ);
537 d = gem_mmap__wc(fd, dst->handle, 0, size, PROT_WRITE);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530538
539 memcpy(d, s, size);
540
541 munmap(d, size);
542 munmap(s, size);
543}
544
Chris Wilson16bafdf2014-09-04 09:26:24 +0100545static struct igt_hang_ring no_hang(void)
546{
547 return (struct igt_hang_ring){0, 0};
548}
549
550static struct igt_hang_ring bcs_hang(void)
551{
Daniel Vetter3cd45de2015-02-10 17:46:43 +0100552 return igt_hang_ring(fd, I915_EXEC_BLT);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100553}
554
555static struct igt_hang_ring rcs_hang(void)
556{
Daniel Vetter3cd45de2015-02-10 17:46:43 +0100557 return igt_hang_ring(fd, I915_EXEC_RENDER);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100558}
559
560static void hang_require(void)
561{
Daniel Vetterc66b2422015-02-06 10:49:20 +0100562 igt_require_hang_ring(fd, -1);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100563}
564
Chris Wilson197db862015-12-09 20:54:10 +0000565static void check_gpu(void)
566{
567 unsigned missed_irq = 0;
568 FILE *file;
569
570 gem_quiescent_gpu(fd);
571
572 file = igt_debugfs_fopen("i915_ring_missed_irq", "r");
573 if (file) {
574 fscanf(file, "%x", &missed_irq);
575 fclose(file);
576 }
577 file = igt_debugfs_fopen("i915_ring_missed_irq", "w");
578 if (file) {
579 fwrite("0\n", 1, 2, file);
580 fclose(file);
581 }
582 igt_assert_eq(missed_irq, 0);
583}
584
Chris Wilson8bf09f32015-12-17 09:16:42 +0000585static void do_basic0(struct buffers *buffers,
586 do_copy do_copy_func,
587 do_hang do_hang_func)
588{
589 gem_quiescent_gpu(fd);
590
591 buffers->mode->set_bo(buffers->src[0], 0xdeadbeef, width, height);
592 for (int i = 0; i < buffers->count; i++) {
593 struct igt_hang_ring hang = do_hang_func();
594
595 do_copy_func(buffers->dst[i], buffers->src[0]);
596 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef, width, height, buffers->dummy);
597
598 igt_post_hang_ring(fd, hang);
599 }
600}
601
602static void do_basic1(struct buffers *buffers,
603 do_copy do_copy_func,
604 do_hang do_hang_func)
Chris Wilson197db862015-12-09 20:54:10 +0000605{
606 gem_quiescent_gpu(fd);
607
608 for (int i = 0; i < buffers->count; i++) {
609 struct igt_hang_ring hang = do_hang_func();
610
611 buffers->mode->set_bo(buffers->src[i], i, width, height);
612 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
Chris Wilson8bf09f32015-12-17 09:16:42 +0000613
Chris Wilson197db862015-12-09 20:54:10 +0000614 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson8bf09f32015-12-17 09:16:42 +0000615 usleep(0); /* let someone else claim the mutex */
Chris Wilson197db862015-12-09 20:54:10 +0000616 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
617
618 igt_post_hang_ring(fd, hang);
619 }
620}
621
Chris Wilson8bf09f32015-12-17 09:16:42 +0000622static void do_basicN(struct buffers *buffers,
623 do_copy do_copy_func,
624 do_hang do_hang_func)
625{
626 struct igt_hang_ring hang;
627
628 gem_quiescent_gpu(fd);
629
630 for (int i = 0; i < buffers->count; i++) {
631 buffers->mode->set_bo(buffers->src[i], i, width, height);
632 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
633 }
634
635 hang = do_hang_func();
636
637 for (int i = 0; i < buffers->count; i++) {
638 do_copy_func(buffers->dst[i], buffers->src[i]);
639 usleep(0); /* let someone else claim the mutex */
640 }
641
642 for (int i = 0; i < buffers->count; i++)
643 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
644
645 igt_post_hang_ring(fd, hang);
646}
647
Chris Wilson99b5ee82015-01-22 10:03:45 +0000648static void do_overwrite_source(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100649 do_copy do_copy_func,
650 do_hang do_hang_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +0200651{
Chris Wilson16bafdf2014-09-04 09:26:24 +0100652 struct igt_hang_ring hang;
Daniel Vetter5a598c92013-08-14 15:08:05 +0200653 int i;
654
655 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000656 for (i = 0; i < buffers->count; i++) {
657 buffers->mode->set_bo(buffers->src[i], i, width, height);
658 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200659 }
Chris Wilson99b5ee82015-01-22 10:03:45 +0000660 for (i = 0; i < buffers->count; i++)
661 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100662 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000663 for (i = buffers->count; i--; )
664 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
665 for (i = 0; i < buffers->count; i++)
666 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100667 igt_post_hang_ring(fd, hang);
668}
669
Chris Wilsona1726762015-03-16 16:29:57 +0000670static void do_overwrite_source_read(struct buffers *buffers,
671 do_copy do_copy_func,
672 do_hang do_hang_func,
673 int do_rcs)
674{
675 const int half = buffers->count/2;
676 struct igt_hang_ring hang;
677 int i;
678
679 gem_quiescent_gpu(fd);
680 for (i = 0; i < half; i++) {
681 buffers->mode->set_bo(buffers->src[i], i, width, height);
682 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
683 buffers->mode->set_bo(buffers->dst[i+half], ~i, width, height);
684 }
685 for (i = 0; i < half; i++) {
686 do_copy_func(buffers->dst[i], buffers->src[i]);
687 if (do_rcs)
688 render_copy_bo(buffers->dst[i+half], buffers->src[i]);
689 else
690 blt_copy_bo(buffers->dst[i+half], buffers->src[i]);
691 }
692 hang = do_hang_func();
693 for (i = half; i--; )
694 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
695 for (i = 0; i < half; i++) {
696 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
697 buffers->mode->cmp_bo(buffers->dst[i+half], i, width, height, buffers->dummy);
698 }
699 igt_post_hang_ring(fd, hang);
700}
701
702static void do_overwrite_source_read_bcs(struct buffers *buffers,
703 do_copy do_copy_func,
704 do_hang do_hang_func)
705{
706 do_overwrite_source_read(buffers, do_copy_func, do_hang_func, 0);
707}
708
709static void do_overwrite_source_read_rcs(struct buffers *buffers,
710 do_copy do_copy_func,
711 do_hang do_hang_func)
712{
713 do_overwrite_source_read(buffers, do_copy_func, do_hang_func, 1);
714}
715
Chris Wilson99b5ee82015-01-22 10:03:45 +0000716static void do_overwrite_source__rev(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100717 do_copy do_copy_func,
718 do_hang do_hang_func)
719{
720 struct igt_hang_ring hang;
721 int i;
722
723 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000724 for (i = 0; i < buffers->count; i++) {
725 buffers->mode->set_bo(buffers->src[i], i, width, height);
726 buffers->mode->set_bo(buffers->dst[i], ~i, width, height);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100727 }
Chris Wilson99b5ee82015-01-22 10:03:45 +0000728 for (i = 0; i < buffers->count; i++)
729 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100730 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000731 for (i = 0; i < buffers->count; i++)
732 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
733 for (i = buffers->count; i--; )
734 buffers->mode->cmp_bo(buffers->dst[i], i, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100735 igt_post_hang_ring(fd, hang);
736}
737
Chris Wilson99b5ee82015-01-22 10:03:45 +0000738static void do_overwrite_source__one(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100739 do_copy do_copy_func,
740 do_hang do_hang_func)
741{
742 struct igt_hang_ring hang;
743
744 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000745 buffers->mode->set_bo(buffers->src[0], 0, width, height);
746 buffers->mode->set_bo(buffers->dst[0], ~0, width, height);
747 do_copy_func(buffers->dst[0], buffers->src[0]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100748 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000749 buffers->mode->set_bo(buffers->src[0], 0xdeadbeef, width, height);
750 buffers->mode->cmp_bo(buffers->dst[0], 0, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100751 igt_post_hang_ring(fd, hang);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200752}
753
Chris Wilsona72d4052015-03-18 14:15:22 +0000754static void do_intermix(struct buffers *buffers,
755 do_copy do_copy_func,
756 do_hang do_hang_func,
757 int do_rcs)
758{
759 const int half = buffers->count/2;
760 struct igt_hang_ring hang;
761 int i;
762
763 gem_quiescent_gpu(fd);
764 for (i = 0; i < buffers->count; i++) {
765 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef^~i, width, height);
766 buffers->mode->set_bo(buffers->dst[i], i, width, height);
767 }
768 for (i = 0; i < half; i++) {
769 if (do_rcs == 1 || (do_rcs == -1 && i & 1))
770 render_copy_bo(buffers->dst[i], buffers->src[i]);
771 else
772 blt_copy_bo(buffers->dst[i], buffers->src[i]);
773
774 do_copy_func(buffers->dst[i+half], buffers->src[i]);
775
776 if (do_rcs == 1 || (do_rcs == -1 && (i & 1) == 0))
777 render_copy_bo(buffers->dst[i], buffers->dst[i+half]);
778 else
779 blt_copy_bo(buffers->dst[i], buffers->dst[i+half]);
780
781 do_copy_func(buffers->dst[i+half], buffers->src[i+half]);
782 }
783 hang = do_hang_func();
784 for (i = 0; i < 2*half; i++)
785 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef^~i, width, height, buffers->dummy);
786 igt_post_hang_ring(fd, hang);
787}
788
789static void do_intermix_rcs(struct buffers *buffers,
790 do_copy do_copy_func,
791 do_hang do_hang_func)
792{
793 do_intermix(buffers, do_copy_func, do_hang_func, 1);
794}
795
796static void do_intermix_bcs(struct buffers *buffers,
797 do_copy do_copy_func,
798 do_hang do_hang_func)
799{
800 do_intermix(buffers, do_copy_func, do_hang_func, 0);
801}
802
803static void do_intermix_both(struct buffers *buffers,
804 do_copy do_copy_func,
805 do_hang do_hang_func)
806{
807 do_intermix(buffers, do_copy_func, do_hang_func, -1);
808}
809
Chris Wilson99b5ee82015-01-22 10:03:45 +0000810static void do_early_read(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100811 do_copy do_copy_func,
812 do_hang do_hang_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +0200813{
Chris Wilson16bafdf2014-09-04 09:26:24 +0100814 struct igt_hang_ring hang;
Daniel Vetter5a598c92013-08-14 15:08:05 +0200815 int i;
816
817 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000818 for (i = buffers->count; i--; )
819 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef, width, height);
820 for (i = 0; i < buffers->count; i++)
821 do_copy_func(buffers->dst[i], buffers->src[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100822 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000823 for (i = buffers->count; i--; )
824 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100825 igt_post_hang_ring(fd, hang);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200826}
827
Chris Wilson35b0ac92015-03-16 11:55:46 +0000828static void do_read_read_bcs(struct buffers *buffers,
829 do_copy do_copy_func,
830 do_hang do_hang_func)
831{
832 struct igt_hang_ring hang;
833 int i;
834
835 gem_quiescent_gpu(fd);
836 for (i = buffers->count; i--; )
837 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
838 for (i = 0; i < buffers->count; i++) {
839 do_copy_func(buffers->dst[i], buffers->src[i]);
840 blt_copy_bo(buffers->spare, buffers->src[i]);
841 }
842 cpu_cmp_bo(buffers->spare, 0xdeadbeef^(buffers->count-1), width, height, NULL);
843 hang = do_hang_func();
844 for (i = buffers->count; i--; )
845 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
846 igt_post_hang_ring(fd, hang);
847}
848
Chris Wilson0c266522015-11-11 16:37:16 +0000849static void do_write_read_bcs(struct buffers *buffers,
850 do_copy do_copy_func,
851 do_hang do_hang_func)
852{
853 struct igt_hang_ring hang;
854 int i;
855
856 gem_quiescent_gpu(fd);
857 for (i = buffers->count; i--; )
858 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
859 for (i = 0; i < buffers->count; i++) {
860 blt_copy_bo(buffers->spare, buffers->src[i]);
861 do_copy_func(buffers->dst[i], buffers->spare);
862 }
863 hang = do_hang_func();
864 for (i = buffers->count; i--; )
865 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
866 igt_post_hang_ring(fd, hang);
867}
868
Chris Wilson35b0ac92015-03-16 11:55:46 +0000869static void do_read_read_rcs(struct buffers *buffers,
870 do_copy do_copy_func,
871 do_hang do_hang_func)
872{
873 struct igt_hang_ring hang;
874 int i;
875
876 gem_quiescent_gpu(fd);
877 for (i = buffers->count; i--; )
878 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
879 for (i = 0; i < buffers->count; i++) {
880 do_copy_func(buffers->dst[i], buffers->src[i]);
881 render_copy_bo(buffers->spare, buffers->src[i]);
882 }
883 cpu_cmp_bo(buffers->spare, 0xdeadbeef^(buffers->count-1), width, height, NULL);
884 hang = do_hang_func();
885 for (i = buffers->count; i--; )
886 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
887 igt_post_hang_ring(fd, hang);
888}
889
Chris Wilson0c266522015-11-11 16:37:16 +0000890static void do_write_read_rcs(struct buffers *buffers,
891 do_copy do_copy_func,
892 do_hang do_hang_func)
893{
894 struct igt_hang_ring hang;
895 int i;
896
897 gem_quiescent_gpu(fd);
898 for (i = buffers->count; i--; )
899 buffers->mode->set_bo(buffers->src[i], 0xdeadbeef ^ i, width, height);
900 for (i = 0; i < buffers->count; i++) {
901 render_copy_bo(buffers->spare, buffers->src[i]);
902 do_copy_func(buffers->dst[i], buffers->spare);
903 }
904 hang = do_hang_func();
905 for (i = buffers->count; i--; )
906 buffers->mode->cmp_bo(buffers->dst[i], 0xdeadbeef ^ i, width, height, buffers->dummy);
907 igt_post_hang_ring(fd, hang);
908}
909
Chris Wilson99b5ee82015-01-22 10:03:45 +0000910static void do_gpu_read_after_write(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100911 do_copy do_copy_func,
912 do_hang do_hang_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +0200913{
Chris Wilson16bafdf2014-09-04 09:26:24 +0100914 struct igt_hang_ring hang;
Daniel Vetter5a598c92013-08-14 15:08:05 +0200915 int i;
916
917 gem_quiescent_gpu(fd);
Chris Wilson99b5ee82015-01-22 10:03:45 +0000918 for (i = buffers->count; i--; )
919 buffers->mode->set_bo(buffers->src[i], 0xabcdabcd, width, height);
920 for (i = 0; i < buffers->count; i++)
921 do_copy_func(buffers->dst[i], buffers->src[i]);
922 for (i = buffers->count; i--; )
923 do_copy_func(buffers->dummy, buffers->dst[i]);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100924 hang = do_hang_func();
Chris Wilson99b5ee82015-01-22 10:03:45 +0000925 for (i = buffers->count; i--; )
926 buffers->mode->cmp_bo(buffers->dst[i], 0xabcdabcd, width, height, buffers->dummy);
Chris Wilson16bafdf2014-09-04 09:26:24 +0100927 igt_post_hang_ring(fd, hang);
Daniel Vetter5a598c92013-08-14 15:08:05 +0200928}
929
Chris Wilson99b5ee82015-01-22 10:03:45 +0000930typedef void (*do_test)(struct buffers *buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100931 do_copy do_copy_func,
932 do_hang do_hang_func);
Daniel Vetterec283d62013-08-14 15:18:37 +0200933
Chris Wilson99b5ee82015-01-22 10:03:45 +0000934typedef void (*run_wrap)(struct buffers *buffers,
Chris Wilson59c55622014-08-29 13:11:37 +0100935 do_test do_test_func,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100936 do_copy do_copy_func,
937 do_hang do_hang_func);
Daniel Vetterec283d62013-08-14 15:18:37 +0200938
Chris Wilson99b5ee82015-01-22 10:03:45 +0000939static void run_single(struct buffers *buffers,
Chris Wilson59c55622014-08-29 13:11:37 +0100940 do_test do_test_func,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100941 do_copy do_copy_func,
942 do_hang do_hang_func)
Daniel Vetterec283d62013-08-14 15:18:37 +0200943{
Chris Wilson99b5ee82015-01-22 10:03:45 +0000944 do_test_func(buffers, do_copy_func, do_hang_func);
Chris Wilson197db862015-12-09 20:54:10 +0000945 check_gpu();
Daniel Vetterec283d62013-08-14 15:18:37 +0200946}
947
Chris Wilson99b5ee82015-01-22 10:03:45 +0000948static void run_interruptible(struct buffers *buffers,
Chris Wilson59c55622014-08-29 13:11:37 +0100949 do_test do_test_func,
Chris Wilson16bafdf2014-09-04 09:26:24 +0100950 do_copy do_copy_func,
951 do_hang do_hang_func)
Daniel Vetterec283d62013-08-14 15:18:37 +0200952{
953 int loop;
954
955 for (loop = 0; loop < 10; loop++)
Chris Wilson99b5ee82015-01-22 10:03:45 +0000956 do_test_func(buffers, do_copy_func, do_hang_func);
Chris Wilson197db862015-12-09 20:54:10 +0000957 check_gpu();
Daniel Vetterec283d62013-08-14 15:18:37 +0200958}
959
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000960static void __run_forked(struct buffers *buffers,
961 int num_children, int loops,
962 do_test do_test_func,
963 do_copy do_copy_func,
964 do_hang do_hang_func)
965
Daniel Vetterec283d62013-08-14 15:18:37 +0200966{
Chris Wilson1ca607b2013-08-16 09:44:13 +0100967 const int old_num_buffers = num_buffers;
Daniel Vetterec283d62013-08-14 15:18:37 +0200968
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000969 num_buffers /= num_children;
Chris Wilson1ca607b2013-08-16 09:44:13 +0100970 num_buffers += 2;
971
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000972 igt_fork(child, num_children) {
Daniel Vettercd1f2202013-08-29 10:06:51 +0200973 /* recreate process local variables */
Chris Wilson99b5ee82015-01-22 10:03:45 +0000974 buffers->count = 0;
Micah Fedkec81d2932015-07-22 21:54:02 +0000975 fd = drm_open_driver(DRIVER_INTEL);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530976
Chris Wilson99b5ee82015-01-22 10:03:45 +0000977 batch = buffers_init(buffers, buffers->mode, fd);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530978
Chris Wilson99b5ee82015-01-22 10:03:45 +0000979 buffers_create(buffers, num_buffers);
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000980 for (int loop = 0; loop < loops; loop++)
Chris Wilson99b5ee82015-01-22 10:03:45 +0000981 do_test_func(buffers, do_copy_func, do_hang_func);
Chris Wilsonf2a045f2015-01-02 16:33:33 +0530982
Chris Wilson99b5ee82015-01-22 10:03:45 +0000983 buffers_fini(buffers);
Daniel Vetterec283d62013-08-14 15:18:37 +0200984 }
Daniel Vettercd1f2202013-08-29 10:06:51 +0200985
986 igt_waitchildren();
Chris Wilson197db862015-12-09 20:54:10 +0000987 check_gpu();
Chris Wilson1ca607b2013-08-16 09:44:13 +0100988
Chris Wilson1ca607b2013-08-16 09:44:13 +0100989 num_buffers = old_num_buffers;
Daniel Vetterec283d62013-08-14 15:18:37 +0200990}
Daniel Vetter5a598c92013-08-14 15:08:05 +0200991
Chris Wilson2d08e9e2015-12-11 09:25:03 +0000992static void run_forked(struct buffers *buffers,
993 do_test do_test_func,
994 do_copy do_copy_func,
995 do_hang do_hang_func)
996{
997 __run_forked(buffers, sysconf(_SC_NPROCESSORS_ONLN), 10,
998 do_test_func, do_copy_func, do_hang_func);
999}
1000
1001static void run_bomb(struct buffers *buffers,
1002 do_test do_test_func,
1003 do_copy do_copy_func,
1004 do_hang do_hang_func)
1005{
1006 __run_forked(buffers, 8*sysconf(_SC_NPROCESSORS_ONLN), 10,
1007 do_test_func, do_copy_func, do_hang_func);
1008}
1009
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301010static void bit17_require(void)
1011{
1012 struct drm_i915_gem_get_tiling2 {
1013 uint32_t handle;
1014 uint32_t tiling_mode;
1015 uint32_t swizzle_mode;
1016 uint32_t phys_swizzle_mode;
1017 } arg;
1018#define DRM_IOCTL_I915_GEM_GET_TILING2 DRM_IOWR (DRM_COMMAND_BASE + DRM_I915_GEM_GET_TILING, struct drm_i915_gem_get_tiling2)
1019
1020 memset(&arg, 0, sizeof(arg));
1021 arg.handle = gem_create(fd, 4096);
1022 gem_set_tiling(fd, arg.handle, I915_TILING_X, 512);
1023
Daniel Stonede7ccdd2015-10-01 14:16:48 +01001024 do_ioctl(fd, DRM_IOCTL_I915_GEM_GET_TILING2, &arg);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301025 gem_close(fd, arg.handle);
1026 igt_require(arg.phys_swizzle_mode == arg.swizzle_mode);
1027}
1028
1029static void cpu_require(void)
1030{
1031 bit17_require();
1032}
1033
1034static void gtt_require(void)
1035{
1036}
1037
1038static void wc_require(void)
1039{
1040 bit17_require();
Daniel Vettera3e34ce2015-02-06 11:05:28 +01001041 gem_require_mmap_wc(fd);
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301042}
1043
Chris Wilson08188752014-09-03 13:38:30 +01001044static void bcs_require(void)
1045{
1046}
1047
1048static void rcs_require(void)
1049{
1050 igt_require(rendercopy);
1051}
1052
Chris Wilson16bafdf2014-09-04 09:26:24 +01001053static void no_require(void)
1054{
1055}
1056
Daniel Vetter5a598c92013-08-14 15:08:05 +02001057static void
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001058run_basic_modes(const char *prefix,
1059 const struct access_mode *mode,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001060 const char *suffix,
Daniel Vetterec283d62013-08-14 15:18:37 +02001061 run_wrap run_wrap_func)
Daniel Vetter5a598c92013-08-14 15:08:05 +02001062{
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301063 const struct {
Chris Wilson59c55622014-08-29 13:11:37 +01001064 const char *prefix;
1065 do_copy copy;
Chris Wilson08188752014-09-03 13:38:30 +01001066 void (*require)(void);
Chris Wilson59c55622014-08-29 13:11:37 +01001067 } pipelines[] = {
Chris Wilsonf2a045f2015-01-02 16:33:33 +05301068 { "cpu", cpu_copy_bo, cpu_require },
1069 { "gtt", gtt_copy_bo, gtt_require },
1070 { "wc", wc_copy_bo, wc_require },
Daniel Vetter3e9b4e32015-02-06 23:10:26 +01001071 { "blt", blt_copy_bo, bcs_require },
1072 { "render", render_copy_bo, rcs_require },
Chris Wilson59c55622014-08-29 13:11:37 +01001073 { NULL, NULL }
Chris Wilson77633492015-03-26 08:11:43 +00001074 }, *pskip = pipelines + 3, *p;
Chris Wilson16bafdf2014-09-04 09:26:24 +01001075 const struct {
1076 const char *suffix;
1077 do_hang hang;
1078 void (*require)(void);
1079 } hangs[] = {
1080 { "", no_hang, no_require },
Daniel Vetterfbcc7ba2015-01-22 09:43:10 +01001081 { "-hang-blt", bcs_hang, hang_require },
1082 { "-hang-render", rcs_hang, hang_require },
Chris Wilson16bafdf2014-09-04 09:26:24 +01001083 { NULL, NULL },
1084 }, *h;
Chris Wilson99b5ee82015-01-22 10:03:45 +00001085 struct buffers buffers;
Daniel Vetter5a598c92013-08-14 15:08:05 +02001086
Chris Wilson16bafdf2014-09-04 09:26:24 +01001087 for (h = hangs; h->suffix; h++) {
Chris Wilson77633492015-03-26 08:11:43 +00001088 if (!all && *h->suffix)
1089 continue;
1090
1091 for (p = all ? pipelines : pskip; p->prefix; p++) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001092 igt_fixture {
Chris Wilson99b5ee82015-01-22 10:03:45 +00001093 batch = buffers_init(&buffers, mode, fd);
Daniel Vetter60115082015-01-22 10:01:28 +01001094 }
Chris Wilson16bafdf2014-09-04 09:26:24 +01001095
Chris Wilson8bf09f32015-12-17 09:16:42 +00001096 igt_subtest_f("%s-%s-%s-sanitycheck0%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson197db862015-12-09 20:54:10 +00001097 h->require();
1098 p->require();
1099 buffers_create(&buffers, num_buffers);
Chris Wilson8bf09f32015-12-17 09:16:42 +00001100 run_wrap_func(&buffers, do_basic0,
1101 p->copy, h->hang);
1102 }
1103
1104 igt_subtest_f("%s-%s-%s-sanitycheck1%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
1105 h->require();
1106 p->require();
1107 buffers_create(&buffers, num_buffers);
1108 run_wrap_func(&buffers, do_basic1,
1109 p->copy, h->hang);
1110 }
1111
1112 igt_subtest_f("%s-%s-%s-sanitycheckN%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
1113 h->require();
1114 p->require();
1115 buffers_create(&buffers, num_buffers);
1116 run_wrap_func(&buffers, do_basicN,
Chris Wilson197db862015-12-09 20:54:10 +00001117 p->copy, h->hang);
1118 }
1119
Chris Wilson16bafdf2014-09-04 09:26:24 +01001120 /* try to overwrite the source values */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001121 igt_subtest_f("%s-%s-%s-overwrite-source-one%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001122 h->require();
1123 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001124 buffers_create(&buffers, num_buffers);
1125 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001126 do_overwrite_source__one,
1127 p->copy, h->hang);
1128 }
1129
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001130 igt_subtest_f("%s-%s-%s-overwrite-source%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001131 h->require();
1132 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001133 buffers_create(&buffers, num_buffers);
1134 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001135 do_overwrite_source,
1136 p->copy, h->hang);
1137 }
Chris Wilsona1726762015-03-16 16:29:57 +00001138
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001139 igt_subtest_f("%s-%s-%s-overwrite-source-read-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona1726762015-03-16 16:29:57 +00001140 h->require();
1141 p->require();
1142 buffers_create(&buffers, num_buffers);
1143 run_wrap_func(&buffers,
1144 do_overwrite_source_read_bcs,
1145 p->copy, h->hang);
1146 }
1147
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001148 igt_subtest_f("%s-%s-%s-overwrite-source-read-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona1726762015-03-16 16:29:57 +00001149 h->require();
1150 p->require();
1151 igt_require(rendercopy);
1152 buffers_create(&buffers, num_buffers);
1153 run_wrap_func(&buffers,
1154 do_overwrite_source_read_rcs,
1155 p->copy, h->hang);
1156 }
1157
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001158 igt_subtest_f("%s-%s-%s-overwrite-source-rev%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001159 h->require();
1160 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001161 buffers_create(&buffers, num_buffers);
1162 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001163 do_overwrite_source__rev,
1164 p->copy, h->hang);
1165 }
1166
Chris Wilsona72d4052015-03-18 14:15:22 +00001167 /* try to intermix copies with GPU copies*/
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001168 igt_subtest_f("%s-%s-%s-intermix-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona72d4052015-03-18 14:15:22 +00001169 h->require();
1170 p->require();
1171 igt_require(rendercopy);
1172 buffers_create(&buffers, num_buffers);
1173 run_wrap_func(&buffers,
1174 do_intermix_rcs,
1175 p->copy, h->hang);
1176 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001177 igt_subtest_f("%s-%s-%s-intermix-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona72d4052015-03-18 14:15:22 +00001178 h->require();
1179 p->require();
1180 igt_require(rendercopy);
1181 buffers_create(&buffers, num_buffers);
1182 run_wrap_func(&buffers,
1183 do_intermix_bcs,
1184 p->copy, h->hang);
1185 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001186 igt_subtest_f("%s-%s-%s-intermix-both%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilsona72d4052015-03-18 14:15:22 +00001187 h->require();
1188 p->require();
1189 igt_require(rendercopy);
1190 buffers_create(&buffers, num_buffers);
1191 run_wrap_func(&buffers,
1192 do_intermix_both,
1193 p->copy, h->hang);
1194 }
1195
Chris Wilson16bafdf2014-09-04 09:26:24 +01001196 /* try to read the results before the copy completes */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001197 igt_subtest_f("%s-%s-%s-early-read%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001198 h->require();
1199 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001200 buffers_create(&buffers, num_buffers);
1201 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001202 do_early_read,
1203 p->copy, h->hang);
1204 }
1205
Chris Wilson35b0ac92015-03-16 11:55:46 +00001206 /* concurrent reads */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001207 igt_subtest_f("%s-%s-%s-read-read-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson35b0ac92015-03-16 11:55:46 +00001208 h->require();
1209 p->require();
1210 buffers_create(&buffers, num_buffers);
1211 run_wrap_func(&buffers,
1212 do_read_read_bcs,
1213 p->copy, h->hang);
1214 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001215 igt_subtest_f("%s-%s-%s-read-read-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson35b0ac92015-03-16 11:55:46 +00001216 h->require();
1217 p->require();
1218 igt_require(rendercopy);
1219 buffers_create(&buffers, num_buffers);
1220 run_wrap_func(&buffers,
1221 do_read_read_rcs,
1222 p->copy, h->hang);
1223 }
1224
Chris Wilson0c266522015-11-11 16:37:16 +00001225 /* split copying between rings */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001226 igt_subtest_f("%s-%s-%s-write-read-bcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson0c266522015-11-11 16:37:16 +00001227 h->require();
1228 p->require();
1229 buffers_create(&buffers, num_buffers);
1230 run_wrap_func(&buffers,
1231 do_write_read_bcs,
1232 p->copy, h->hang);
1233 }
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001234 igt_subtest_f("%s-%s-%s-write-read-rcs%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson0c266522015-11-11 16:37:16 +00001235 h->require();
1236 p->require();
1237 igt_require(rendercopy);
1238 buffers_create(&buffers, num_buffers);
1239 run_wrap_func(&buffers,
1240 do_write_read_rcs,
1241 p->copy, h->hang);
1242 }
1243
Chris Wilson16bafdf2014-09-04 09:26:24 +01001244 /* and finally try to trick the kernel into loosing the pending write */
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001245 igt_subtest_f("%s-%s-%s-gpu-read-after-write%s%s", prefix, mode->name, p->prefix, suffix, h->suffix) {
Chris Wilson16bafdf2014-09-04 09:26:24 +01001246 h->require();
1247 p->require();
Chris Wilson99b5ee82015-01-22 10:03:45 +00001248 buffers_create(&buffers, num_buffers);
1249 run_wrap_func(&buffers,
Chris Wilson16bafdf2014-09-04 09:26:24 +01001250 do_gpu_read_after_write,
1251 p->copy, h->hang);
1252 }
1253
1254 igt_fixture {
Chris Wilson99b5ee82015-01-22 10:03:45 +00001255 buffers_fini(&buffers);
Chris Wilson16bafdf2014-09-04 09:26:24 +01001256 }
Chris Wilson08188752014-09-03 13:38:30 +01001257 }
Chris Wilson59c55622014-08-29 13:11:37 +01001258 }
Daniel Vetter5a598c92013-08-14 15:08:05 +02001259}
Daniel Vetter43779e32013-08-14 14:50:50 +02001260
1261static void
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001262run_modes(const char *style, const struct access_mode *mode)
Daniel Vetter43779e32013-08-14 14:50:50 +02001263{
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001264 igt_debug("%s: using 2x%d buffers, each 1MiB\n", style, num_buffers);
1265 intel_require_memory(2*num_buffers, 1024*1024, CHECK_RAM);
1266
Chris Wilson77633492015-03-26 08:11:43 +00001267 if (all) {
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001268 run_basic_modes(style, mode, "", run_single);
Chris Wilson6c428a62014-08-29 13:11:37 +01001269
Chris Wilson77633492015-03-26 08:11:43 +00001270 igt_fork_signal_helper();
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001271 run_basic_modes(style, mode, "-interruptible", run_interruptible);
Chris Wilson77633492015-03-26 08:11:43 +00001272 igt_stop_signal_helper();
1273 }
Daniel Vetter3dba47e2013-08-06 22:27:37 +02001274
Chris Wilson6c428a62014-08-29 13:11:37 +01001275 igt_fork_signal_helper();
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001276 run_basic_modes(style, mode, "-forked", run_forked);
1277 run_basic_modes(style, mode, "-bomb", run_bomb);
Chris Wilson6c428a62014-08-29 13:11:37 +01001278 igt_stop_signal_helper();
Daniel Vetter43779e32013-08-14 14:50:50 +02001279}
1280
Daniel Vetter071e9ca2013-10-31 16:23:26 +01001281igt_main
Daniel Vetter43779e32013-08-14 14:50:50 +02001282{
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001283 int i;
Daniel Vetter43779e32013-08-14 14:50:50 +02001284
Daniel Vetter43779e32013-08-14 14:50:50 +02001285 igt_skip_on_simulation();
1286
Chris Wilson77633492015-03-26 08:11:43 +00001287 if (strstr(igt_test_name(), "all"))
1288 all = true;
1289
Daniel Vetter2dbd9982013-08-14 15:48:54 +02001290 igt_fixture {
Micah Fedkec81d2932015-07-22 21:54:02 +00001291 fd = drm_open_driver(DRIVER_INTEL);
Chris Wilson6c428a62014-08-29 13:11:37 +01001292 devid = intel_get_drm_devid(fd);
1293 gen = intel_gen(devid);
Chris Wilson59c55622014-08-29 13:11:37 +01001294 rendercopy = igt_get_render_copyfunc(devid);
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001295 }
Daniel Vetter43779e32013-08-14 14:50:50 +02001296
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001297 igt_fixture {
1298 num_buffers = gem_mappable_aperture_size() / (1024 * 1024) / 4;
Daniel Vetter2dbd9982013-08-14 15:48:54 +02001299 }
Daniel Vetter43779e32013-08-14 14:50:50 +02001300
1301 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
Chris Wilson2d08e9e2015-12-11 09:25:03 +00001302 run_modes("small", &access_modes[i]);
1303
1304 igt_fixture {
1305 num_buffers = gem_mappable_aperture_size() / (1024 * 1024);
1306 }
1307
1308 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
1309 run_modes("thrash", &access_modes[i]);
1310
1311 igt_fixture {
1312 num_buffers = gem_aperture_size(fd) / (1024 * 1024);
1313 }
1314
1315 for (i = 0; i < ARRAY_SIZE(access_modes); i++)
1316 run_modes("full", &access_modes[i]);
Daniel Vetter3dba47e2013-08-06 22:27:37 +02001317}