Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2007, 2011, 2013, 2014 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | * Authors: |
| 24 | * Eric Anholt <eric@anholt.net> |
| 25 | * Daniel Vetter <daniel.vetter@ffwll.ch> |
| 26 | * Tvrtko Ursulin <tvrtko.ursulin@intel.com> |
| 27 | * |
| 28 | */ |
| 29 | |
Thomas Wood | 804e11f | 2015-08-17 17:57:43 +0100 | [diff] [blame] | 30 | #include "igt.h" |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 31 | #include <stdlib.h> |
| 32 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 33 | |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 34 | struct igt_eviction_test_ops { |
Thomas Wood | 7a5e1c6 | 2015-05-14 16:38:01 +0100 | [diff] [blame] | 35 | uint32_t (*create)(int fd, uint64_t size); |
Chris Wilson | 784b772 | 2014-07-21 09:12:43 +0100 | [diff] [blame] | 36 | void (*flink)(uint32_t old_handle, uint32_t new_handle); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 37 | void (*close)(int fd, uint32_t bo); |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 38 | int (*copy)(int fd, uint32_t dst, uint32_t src, |
| 39 | uint32_t *all_bo, int nr_bos); |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 40 | void (*clear)(int fd, uint32_t bo, uint64_t size); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 41 | }; |
| 42 | |
| 43 | #define FORKING_EVICTIONS_INTERRUPTIBLE (1 << 0) |
| 44 | #define FORKING_EVICTIONS_SWAPPING (1 << 1) |
| 45 | #define FORKING_EVICTIONS_DUP_DRMFD (1 << 2) |
| 46 | #define FORKING_EVICTIONS_MEMORY_PRESSURE (1 << 3) |
| 47 | #define ALL_FORKING_EVICTIONS (FORKING_EVICTIONS_INTERRUPTIBLE | \ |
| 48 | FORKING_EVICTIONS_SWAPPING | \ |
| 49 | FORKING_EVICTIONS_DUP_DRMFD | \ |
| 50 | FORKING_EVICTIONS_MEMORY_PRESSURE) |
| 51 | |
| 52 | static void exchange_uint32_t(void *array, unsigned i, unsigned j) |
| 53 | { |
| 54 | uint32_t *i_arr = array; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 55 | |
Daniel Vetter | 2eca38e | 2015-02-07 12:37:48 +0100 | [diff] [blame] | 56 | igt_swap(i_arr[i], i_arr[j]); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 57 | } |
| 58 | |
| 59 | static int minor_evictions(int fd, struct igt_eviction_test_ops *ops, |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 60 | uint64_t surface_size, |
| 61 | uint64_t nr_surfaces) |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 62 | { |
| 63 | uint32_t *bo, *sel; |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 64 | uint64_t n, m, total_surfaces; |
| 65 | int pass, fail; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 66 | |
Tvrtko Ursulin | 42bcd05 | 2014-02-03 10:59:41 +0000 | [diff] [blame] | 67 | /* Make sure nr_surfaces is not divisible by seven |
| 68 | * to avoid duplicates in the selection loop below. |
| 69 | */ |
| 70 | nr_surfaces /= 7; |
| 71 | nr_surfaces *= 7; |
| 72 | nr_surfaces += 3; |
| 73 | |
Chris Wilson | a1a8aa1 | 2014-06-05 13:19:39 +0100 | [diff] [blame] | 74 | total_surfaces = gem_aperture_size(fd) / surface_size + 1; |
Chris Wilson | 8c475e0 | 2014-02-26 12:01:47 +0000 | [diff] [blame] | 75 | igt_require(nr_surfaces < total_surfaces); |
Daniel Vetter | a535cde | 2014-11-17 14:43:33 +0100 | [diff] [blame] | 76 | intel_require_memory(total_surfaces, surface_size, CHECK_RAM); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 77 | |
Chris Wilson | 8c475e0 | 2014-02-26 12:01:47 +0000 | [diff] [blame] | 78 | bo = malloc((nr_surfaces + total_surfaces)*sizeof(*bo)); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 79 | igt_assert(bo); |
| 80 | |
Chris Wilson | 8c475e0 | 2014-02-26 12:01:47 +0000 | [diff] [blame] | 81 | for (n = 0; n < total_surfaces; n++) |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 82 | bo[n] = ops->create(fd, surface_size); |
| 83 | |
| 84 | sel = bo + n; |
| 85 | for (fail = 0, m = 0; fail < 10; fail++) { |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 86 | int ret; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 87 | for (pass = 0; pass < 100; pass++) { |
| 88 | for (n = 0; n < nr_surfaces; n++, m += 7) |
Chris Wilson | 8c475e0 | 2014-02-26 12:01:47 +0000 | [diff] [blame] | 89 | sel[n] = bo[m%total_surfaces]; |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 90 | ret = ops->copy(fd, sel[0], sel[1], sel, nr_surfaces); |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 91 | igt_assert_eq(ret, 0); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 92 | } |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 93 | ret = ops->copy(fd, bo[0], bo[0], bo, total_surfaces); |
Chris Wilson | 91d3780 | 2016-09-30 17:41:01 +0100 | [diff] [blame] | 94 | igt_assert_eq(ret, -ENOSPC); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 95 | } |
| 96 | |
Chris Wilson | 8c475e0 | 2014-02-26 12:01:47 +0000 | [diff] [blame] | 97 | for (n = 0; n < total_surfaces; n++) |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 98 | ops->close(fd, bo[n]); |
| 99 | free(bo); |
| 100 | |
| 101 | return 0; |
| 102 | } |
| 103 | |
| 104 | static int major_evictions(int fd, struct igt_eviction_test_ops *ops, |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 105 | uint64_t surface_size, uint64_t nr_surfaces) |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 106 | { |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 107 | uint64_t n, m; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 108 | uint32_t *bo; |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 109 | int ret, loop; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 110 | |
Daniel Vetter | a535cde | 2014-11-17 14:43:33 +0100 | [diff] [blame] | 111 | intel_require_memory(nr_surfaces, surface_size, CHECK_RAM); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 112 | |
| 113 | bo = malloc(nr_surfaces*sizeof(*bo)); |
| 114 | igt_assert(bo); |
| 115 | |
| 116 | for (n = 0; n < nr_surfaces; n++) |
| 117 | bo[n] = ops->create(fd, surface_size); |
| 118 | |
| 119 | for (loop = 0, m = 0; loop < 100; loop++, m += 17) { |
| 120 | n = m % nr_surfaces; |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 121 | ret = ops->copy(fd, bo[n], bo[n], &bo[n], 1); |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 122 | igt_assert_eq(ret, 0); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 123 | } |
| 124 | |
| 125 | for (n = 0; n < nr_surfaces; n++) |
| 126 | ops->close(fd, bo[n]); |
| 127 | free(bo); |
| 128 | |
| 129 | return 0; |
| 130 | } |
| 131 | |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 132 | static void mlocked_evictions(int fd, struct igt_eviction_test_ops *ops, |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 133 | uint64_t surface_size, |
| 134 | uint64_t surface_count) |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 135 | { |
Chris Wilson | d592fb9 | 2017-03-09 12:27:32 +0000 | [diff] [blame] | 136 | unsigned int *can_mlock; |
Chris Wilson | 0e96238 | 2016-02-04 17:41:05 +0000 | [diff] [blame] | 137 | uint64_t sz, pin; |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 138 | |
| 139 | intel_require_memory(surface_count, surface_size, CHECK_RAM); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 140 | |
| 141 | sz = surface_size*surface_count; |
| 142 | pin = intel_get_avail_ram_mb(); |
| 143 | pin *= 1024 * 1024; |
| 144 | igt_require(pin > sz); |
Chris Wilson | 0e96238 | 2016-02-04 17:41:05 +0000 | [diff] [blame] | 145 | pin -= sz; |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 146 | |
Chris Wilson | 0e96238 | 2016-02-04 17:41:05 +0000 | [diff] [blame] | 147 | igt_debug("Pinning [%'lld, %'lld] MiB\n", |
| 148 | (long long)pin/(1024*1024), |
| 149 | (long long)(pin + sz)/(1024*1024)); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 150 | |
Chris Wilson | d592fb9 | 2017-03-09 12:27:32 +0000 | [diff] [blame] | 151 | can_mlock = mmap(NULL, 4096, PROT_WRITE, MAP_SHARED | MAP_ANON, -1, 0); |
| 152 | igt_assert(can_mlock != MAP_FAILED); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 153 | |
| 154 | igt_fork(child, 1) { |
Chris Wilson | d592fb9 | 2017-03-09 12:27:32 +0000 | [diff] [blame] | 155 | void *locked; |
| 156 | |
| 157 | locked = malloc(pin + sz); |
| 158 | if (locked != NULL && !mlock(locked, pin + sz)) |
| 159 | *can_mlock = 1; |
| 160 | } |
| 161 | igt_waitchildren(); |
| 162 | igt_require(*can_mlock); |
| 163 | munmap(can_mlock, 4096); |
| 164 | |
| 165 | igt_fork(child, 1) { |
| 166 | void *locked; |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 167 | uint32_t *bo; |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 168 | uint64_t n; |
| 169 | int ret; |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 170 | |
| 171 | bo = malloc(surface_count*sizeof(*bo)); |
| 172 | igt_assert(bo); |
| 173 | |
| 174 | locked = malloc(pin); |
Chris Wilson | af3e32c | 2015-03-30 17:01:51 +0100 | [diff] [blame] | 175 | if (locked == NULL || mlock(locked, pin)) |
| 176 | exit(ENOSPC); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 177 | |
| 178 | for (n = 0; n < surface_count; n++) |
| 179 | bo[n] = ops->create(fd, surface_size); |
| 180 | |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 181 | for (n = 0; n < surface_count - 2; n++) { |
| 182 | igt_permute_array(bo, surface_count, exchange_uint32_t); |
| 183 | ret = ops->copy(fd, bo[0], bo[1], bo, surface_count-n); |
| 184 | if (ret) |
Chris Wilson | af3e32c | 2015-03-30 17:01:51 +0100 | [diff] [blame] | 185 | exit(ret); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 186 | |
| 187 | /* Having used the surfaces (and so pulled out of |
| 188 | * our pages into memory), start a memory hog to |
| 189 | * force evictions. |
| 190 | */ |
| 191 | |
| 192 | locked = malloc(surface_size); |
Chris Wilson | af3e32c | 2015-03-30 17:01:51 +0100 | [diff] [blame] | 193 | if (locked == NULL || mlock(locked, surface_size)) |
Chris Wilson | 0e96238 | 2016-02-04 17:41:05 +0000 | [diff] [blame] | 194 | free(locked); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 195 | } |
| 196 | |
| 197 | for (n = 0; n < surface_count; n++) |
| 198 | ops->close(fd, bo[n]); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | igt_waitchildren(); |
Chris Wilson | 068f9ce | 2014-12-03 09:05:54 +0000 | [diff] [blame] | 202 | } |
| 203 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 204 | static int swapping_evictions(int fd, struct igt_eviction_test_ops *ops, |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 205 | uint64_t surface_size, |
| 206 | uint64_t working_surfaces, |
| 207 | uint64_t trash_surfaces) |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 208 | { |
| 209 | uint32_t *bo; |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 210 | uint64_t i, n; |
| 211 | int pass, ret; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 212 | |
Daniel Vetter | a535cde | 2014-11-17 14:43:33 +0100 | [diff] [blame] | 213 | intel_require_memory(working_surfaces, surface_size, CHECK_RAM); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 214 | |
| 215 | if (trash_surfaces < working_surfaces) |
| 216 | trash_surfaces = working_surfaces; |
| 217 | |
Daniel Vetter | a535cde | 2014-11-17 14:43:33 +0100 | [diff] [blame] | 218 | intel_require_memory(trash_surfaces, surface_size, CHECK_RAM | CHECK_SWAP); |
Chris Wilson | 321273f | 2014-05-28 09:01:56 +0100 | [diff] [blame] | 219 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 220 | bo = malloc(trash_surfaces*sizeof(*bo)); |
| 221 | igt_assert(bo); |
| 222 | |
| 223 | for (n = 0; n < trash_surfaces; n++) |
| 224 | bo[n] = ops->create(fd, surface_size); |
| 225 | |
| 226 | for (i = 0; i < trash_surfaces/32; i++) { |
| 227 | igt_permute_array(bo, trash_surfaces, exchange_uint32_t); |
| 228 | |
| 229 | for (pass = 0; pass < 100; pass++) { |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 230 | ret = ops->copy(fd, bo[0], bo[1], bo, working_surfaces); |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 231 | igt_assert_eq(ret, 0); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 232 | } |
| 233 | } |
| 234 | |
| 235 | for (n = 0; n < trash_surfaces; n++) |
| 236 | ops->close(fd, bo[n]); |
| 237 | free(bo); |
| 238 | |
| 239 | return 0; |
| 240 | } |
| 241 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 242 | static int forking_evictions(int fd, struct igt_eviction_test_ops *ops, |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 243 | uint64_t surface_size, |
| 244 | uint64_t working_surfaces, |
| 245 | uint64_t trash_surfaces, |
| 246 | unsigned flags) |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 247 | { |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 248 | const int num_threads = sysconf(_SC_NPROCESSORS_ONLN); |
| 249 | uint64_t bo_count, n, l; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 250 | uint32_t *bo; |
Chris Wilson | 0e20714 | 2016-01-25 13:51:00 +0000 | [diff] [blame] | 251 | int pass, ret; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 252 | |
Daniel Vetter | a535cde | 2014-11-17 14:43:33 +0100 | [diff] [blame] | 253 | intel_require_memory(working_surfaces, surface_size, CHECK_RAM); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 254 | |
| 255 | if (flags & FORKING_EVICTIONS_SWAPPING) { |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 256 | bo_count = trash_surfaces; |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 257 | if (bo_count < working_surfaces) |
| 258 | bo_count = working_surfaces; |
Chris Wilson | 321273f | 2014-05-28 09:01:56 +0100 | [diff] [blame] | 259 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 260 | } else |
| 261 | bo_count = working_surfaces; |
| 262 | |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 263 | igt_assert_lte(working_surfaces, bo_count); |
Daniel Vetter | a535cde | 2014-11-17 14:43:33 +0100 | [diff] [blame] | 264 | intel_require_memory(bo_count, surface_size, CHECK_RAM | CHECK_SWAP); |
Chris Wilson | 321273f | 2014-05-28 09:01:56 +0100 | [diff] [blame] | 265 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 266 | bo = malloc(bo_count*sizeof(*bo)); |
| 267 | igt_assert(bo); |
| 268 | |
| 269 | for (n = 0; n < bo_count; n++) |
| 270 | bo[n] = ops->create(fd, surface_size); |
| 271 | |
| 272 | igt_fork(i, min(num_threads * 4, 12)) { |
| 273 | int realfd = fd; |
| 274 | int num_passes = flags & FORKING_EVICTIONS_SWAPPING ? 10 : 100; |
| 275 | |
| 276 | /* Every fork should have a different permutation! */ |
| 277 | srand(i * 63); |
| 278 | |
| 279 | if (flags & FORKING_EVICTIONS_INTERRUPTIBLE) |
| 280 | igt_fork_signal_helper(); |
| 281 | |
| 282 | igt_permute_array(bo, bo_count, exchange_uint32_t); |
| 283 | |
| 284 | if (flags & FORKING_EVICTIONS_DUP_DRMFD) { |
Micah Fedke | c81d293 | 2015-07-22 21:54:02 +0000 | [diff] [blame] | 285 | realfd = drm_open_driver(DRIVER_INTEL); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 286 | |
| 287 | /* We can overwrite the bo array since we're forked. */ |
| 288 | for (l = 0; l < bo_count; l++) { |
Chris Wilson | 784b772 | 2014-07-21 09:12:43 +0100 | [diff] [blame] | 289 | uint32_t handle = bo[l]; |
| 290 | uint32_t flink = gem_flink(fd, bo[l]); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 291 | |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 292 | bo[l] = gem_open(realfd, flink); |
Chris Wilson | 784b772 | 2014-07-21 09:12:43 +0100 | [diff] [blame] | 293 | if (ops->flink) |
| 294 | ops->flink(handle, bo[l]); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 295 | } |
| 296 | } |
| 297 | |
| 298 | for (pass = 0; pass < num_passes; pass++) { |
Chris Wilson | a384e55 | 2014-06-03 07:31:49 +0100 | [diff] [blame] | 299 | ret = ops->copy(realfd, bo[0], bo[1], bo, working_surfaces); |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 300 | igt_assert_eq(ret, 0); |
Tvrtko Ursulin | e1dea7e | 2014-02-03 10:59:40 +0000 | [diff] [blame] | 301 | |
| 302 | for (l = 0; l < working_surfaces && |
| 303 | (flags & FORKING_EVICTIONS_MEMORY_PRESSURE); |
| 304 | l++) { |
| 305 | ops->clear(realfd, bo[l], surface_size); |
| 306 | } |
| 307 | } |
| 308 | |
| 309 | if (flags & FORKING_EVICTIONS_INTERRUPTIBLE) |
| 310 | igt_stop_signal_helper(); |
| 311 | |
| 312 | /* drmfd closing will take care of additional bo refs */ |
| 313 | if (flags & FORKING_EVICTIONS_DUP_DRMFD) |
| 314 | close(realfd); |
| 315 | } |
| 316 | |
| 317 | igt_waitchildren(); |
| 318 | |
| 319 | for (n = 0; n < bo_count; n++) |
| 320 | ops->close(fd, bo[n]); |
| 321 | free(bo); |
| 322 | |
| 323 | return 0; |
| 324 | } |