Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 1 | /* wierd use of API tests */ |
| 2 | |
| 3 | /* test1- export buffer from intel, import same fd twice into nouveau, |
| 4 | check handles match |
| 5 | test2 - export buffer from intel, import fd once, close fd, try import again |
| 6 | fail if it succeeds |
| 7 | test3 - export buffer from intel, import twice on nouveau, check handle is the same |
| 8 | test4 - export handle twice from intel, import into nouveau twice, check handle is the same |
| 9 | */ |
| 10 | |
Thomas Wood | 804e11f | 2015-08-17 17:57:43 +0100 | [diff] [blame] | 11 | #include "igt.h" |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 12 | #include <stdio.h> |
| 13 | #include <stdlib.h> |
| 14 | #include <unistd.h> |
| 15 | #include <fcntl.h> |
| 16 | #include <sys/stat.h> |
| 17 | |
| 18 | #include "intel_bufmgr.h" |
| 19 | #include "nouveau.h" |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 20 | |
| 21 | #define BO_SIZE (256*1024) |
| 22 | |
| 23 | int intel_fd = -1, intel_fd2 = -1, nouveau_fd = -1, nouveau_fd2 = -1; |
| 24 | drm_intel_bufmgr *bufmgr; |
| 25 | drm_intel_bufmgr *bufmgr2; |
| 26 | struct nouveau_device *ndev, *ndev2; |
| 27 | struct nouveau_client *nclient, *nclient2; |
| 28 | uint32_t devid; |
| 29 | struct intel_batchbuffer *intel_batch; |
| 30 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 31 | static void find_and_open_devices(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 32 | { |
| 33 | int i; |
| 34 | char path[80]; |
| 35 | struct stat buf; |
| 36 | FILE *fl; |
| 37 | char vendor_id[8]; |
| 38 | int venid; |
| 39 | for (i = 0; i < 9; i++) { |
Imre Deak | 0bf5fd8 | 2012-10-10 16:04:44 +0300 | [diff] [blame] | 40 | char *ret; |
| 41 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 42 | sprintf(path, "/sys/class/drm/card%d/device/vendor", i); |
| 43 | if (stat(path, &buf)) |
| 44 | break; |
| 45 | |
| 46 | fl = fopen(path, "r"); |
| 47 | if (!fl) |
| 48 | break; |
| 49 | |
Imre Deak | 0bf5fd8 | 2012-10-10 16:04:44 +0300 | [diff] [blame] | 50 | ret = fgets(vendor_id, 8, fl); |
Daniel Vetter | 8344095 | 2013-08-13 12:35:58 +0200 | [diff] [blame] | 51 | igt_assert(ret); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 52 | fclose(fl); |
| 53 | |
| 54 | venid = strtoul(vendor_id, NULL, 16); |
| 55 | sprintf(path, "/dev/dri/card%d", i); |
| 56 | if (venid == 0x8086) { |
| 57 | intel_fd = open(path, O_RDWR); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 58 | igt_assert(intel_fd); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 59 | intel_fd2 = open(path, O_RDWR); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 60 | igt_assert(intel_fd2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 61 | } else if (venid == 0x10de) { |
| 62 | nouveau_fd = open(path, O_RDWR); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 63 | igt_assert(nouveau_fd); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 64 | nouveau_fd2 = open(path, O_RDWR); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 65 | igt_assert(nouveau_fd2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 66 | } |
| 67 | } |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 68 | } |
| 69 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 70 | static void test_i915_nv_import_twice(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 71 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 72 | drm_intel_bo *test_intel_bo; |
| 73 | int prime_fd; |
| 74 | struct nouveau_bo *nvbo = NULL, *nvbo2 = NULL; |
| 75 | |
| 76 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
| 77 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 78 | igt_assert(drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 79 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 80 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo) == 0); |
| 81 | igt_assert(nouveau_bo_prime_handle_ref(ndev2, prime_fd, &nvbo2) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 82 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 83 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 84 | nouveau_bo_ref(NULL, &nvbo2); |
| 85 | nouveau_bo_ref(NULL, &nvbo); |
| 86 | drm_intel_bo_unreference(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 87 | } |
| 88 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 89 | static void test_i915_nv_import_twice_check_flink_name(void) |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 90 | { |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 91 | drm_intel_bo *test_intel_bo; |
| 92 | int prime_fd; |
| 93 | struct nouveau_bo *nvbo = NULL, *nvbo2 = NULL; |
| 94 | uint32_t flink_name1, flink_name2; |
| 95 | |
| 96 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
| 97 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 98 | igt_assert(drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 99 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 100 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo) == 0); |
| 101 | igt_assert(nouveau_bo_prime_handle_ref(ndev2, prime_fd, &nvbo2) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 102 | close(prime_fd); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 103 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 104 | igt_assert(nouveau_bo_name_get(nvbo, &flink_name1) == 0); |
| 105 | igt_assert(nouveau_bo_name_get(nvbo2, &flink_name2) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 106 | |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 107 | igt_assert_eq_u32(flink_name1, flink_name2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 108 | |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 109 | nouveau_bo_ref(NULL, &nvbo2); |
| 110 | nouveau_bo_ref(NULL, &nvbo); |
| 111 | drm_intel_bo_unreference(test_intel_bo); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 112 | } |
| 113 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 114 | static void test_i915_nv_reimport_twice_check_flink_name(void) |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 115 | { |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 116 | drm_intel_bo *test_intel_bo; |
| 117 | int prime_fd; |
| 118 | struct nouveau_bo *nvbo = NULL, *nvbo2 = NULL; |
| 119 | uint32_t flink_name1, flink_name2; |
| 120 | |
| 121 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
| 122 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 123 | igt_assert(drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 124 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 125 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 126 | |
| 127 | /* create a new dma-buf */ |
| 128 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 129 | igt_assert(drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 130 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 131 | igt_assert(nouveau_bo_prime_handle_ref(ndev2, prime_fd, &nvbo2) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 132 | close(prime_fd); |
| 133 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 134 | igt_assert(nouveau_bo_name_get(nvbo, &flink_name1) == 0); |
| 135 | igt_assert(nouveau_bo_name_get(nvbo2, &flink_name2) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 136 | |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 137 | igt_assert_eq_u32(flink_name1, flink_name2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 138 | |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 139 | nouveau_bo_ref(NULL, &nvbo2); |
| 140 | nouveau_bo_ref(NULL, &nvbo); |
| 141 | drm_intel_bo_unreference(test_intel_bo); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 142 | } |
| 143 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 144 | static void test_nv_i915_import_twice_check_flink_name(void) |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 145 | { |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 146 | drm_intel_bo *intel_bo = NULL, *intel_bo2 = NULL; |
| 147 | int prime_fd; |
| 148 | struct nouveau_bo *nvbo = NULL; |
| 149 | uint32_t flink_name1, flink_name2; |
| 150 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 151 | igt_assert(nouveau_bo_new(ndev, NOUVEAU_BO_GART | NOUVEAU_BO_MAP, |
| 152 | 0, BO_SIZE, NULL, &nvbo) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 153 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 154 | igt_assert(nouveau_bo_set_prime(nvbo, &prime_fd) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 155 | |
| 156 | intel_bo = drm_intel_bo_gem_create_from_prime(bufmgr, prime_fd, BO_SIZE); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 157 | igt_assert(intel_bo); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 158 | |
| 159 | intel_bo2 = drm_intel_bo_gem_create_from_prime(bufmgr2, prime_fd, BO_SIZE); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 160 | igt_assert(intel_bo2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 161 | close(prime_fd); |
| 162 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 163 | igt_assert(drm_intel_bo_flink(intel_bo, &flink_name1) == 0); |
| 164 | igt_assert(drm_intel_bo_flink(intel_bo2, &flink_name2) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 165 | |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 166 | igt_assert_eq_u32(flink_name1, flink_name2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 167 | |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 168 | nouveau_bo_ref(NULL, &nvbo); |
| 169 | drm_intel_bo_unreference(intel_bo); |
| 170 | drm_intel_bo_unreference(intel_bo2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 171 | } |
| 172 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 173 | static void test_nv_i915_reimport_twice_check_flink_name(void) |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 174 | { |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 175 | drm_intel_bo *intel_bo = NULL, *intel_bo2 = NULL; |
| 176 | int prime_fd; |
| 177 | struct nouveau_bo *nvbo = NULL; |
| 178 | uint32_t flink_name1, flink_name2; |
| 179 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 180 | igt_assert(nouveau_bo_new(ndev, NOUVEAU_BO_GART | NOUVEAU_BO_MAP, |
| 181 | 0, BO_SIZE, NULL, &nvbo) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 182 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 183 | igt_assert(nouveau_bo_set_prime(nvbo, &prime_fd) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 184 | |
| 185 | intel_bo = drm_intel_bo_gem_create_from_prime(bufmgr, prime_fd, BO_SIZE); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 186 | igt_assert(intel_bo); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 187 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 188 | igt_assert(nouveau_bo_set_prime(nvbo, &prime_fd) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 189 | |
| 190 | intel_bo2 = drm_intel_bo_gem_create_from_prime(bufmgr2, prime_fd, BO_SIZE); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 191 | igt_assert(intel_bo2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 192 | close(prime_fd); |
| 193 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 194 | igt_assert(drm_intel_bo_flink(intel_bo, &flink_name1) == 0); |
| 195 | igt_assert(drm_intel_bo_flink(intel_bo2, &flink_name2) == 0); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 196 | |
Matt Roper | 07be8fe | 2015-03-05 15:01:00 -0800 | [diff] [blame] | 197 | igt_assert_eq_u32(flink_name1, flink_name2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 198 | |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 199 | nouveau_bo_ref(NULL, &nvbo); |
| 200 | drm_intel_bo_unreference(intel_bo); |
| 201 | drm_intel_bo_unreference(intel_bo2); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 202 | } |
| 203 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 204 | static void test_i915_nv_import_vs_close(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 205 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 206 | drm_intel_bo *test_intel_bo; |
| 207 | int prime_fd; |
| 208 | struct nouveau_bo *nvbo = NULL, *nvbo2 = NULL; |
| 209 | |
| 210 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 211 | igt_assert(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 212 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 213 | igt_assert(drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 214 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 215 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 216 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 217 | igt_assert(nouveau_bo_prime_handle_ref(ndev2, prime_fd, &nvbo2) < 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 218 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 219 | nouveau_bo_ref(NULL, &nvbo2); |
| 220 | nouveau_bo_ref(NULL, &nvbo); |
| 221 | drm_intel_bo_unreference(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 222 | } |
| 223 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 224 | /* import handle twice on one driver */ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 225 | static void test_i915_nv_double_import(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 226 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 227 | drm_intel_bo *test_intel_bo; |
| 228 | int prime_fd; |
| 229 | struct nouveau_bo *nvbo = NULL, *nvbo2 = NULL; |
| 230 | |
| 231 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 232 | igt_assert(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 233 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 234 | igt_assert(drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 235 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 236 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo) == 0); |
| 237 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo2) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 238 | close(prime_fd); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 239 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 240 | igt_assert(nvbo->handle == nvbo2->handle); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 241 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 242 | nouveau_bo_ref(NULL, &nvbo2); |
| 243 | nouveau_bo_ref(NULL, &nvbo); |
| 244 | drm_intel_bo_unreference(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 245 | } |
| 246 | |
| 247 | /* export handle twice from one driver - import twice |
| 248 | see if we get same object */ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 249 | static void test_i915_nv_double_export(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 250 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 251 | drm_intel_bo *test_intel_bo; |
| 252 | int prime_fd, prime_fd2; |
| 253 | struct nouveau_bo *nvbo = NULL, *nvbo2 = NULL; |
| 254 | |
| 255 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 256 | igt_assert(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 257 | |
| 258 | drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd); |
| 259 | |
| 260 | drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd2); |
| 261 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 262 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 263 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 264 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd2, &nvbo2) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 265 | close(prime_fd2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 266 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 267 | igt_assert(nvbo->handle == nvbo2->handle); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 268 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 269 | nouveau_bo_ref(NULL, &nvbo2); |
| 270 | nouveau_bo_ref(NULL, &nvbo); |
| 271 | drm_intel_bo_unreference(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 272 | } |
| 273 | |
| 274 | /* export handle from intel driver - reimport to intel driver |
| 275 | see if you get same object */ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 276 | static void test_i915_self_import(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 277 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 278 | drm_intel_bo *test_intel_bo, *test_intel_bo2; |
| 279 | int prime_fd; |
| 280 | |
| 281 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
| 282 | |
| 283 | drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd); |
| 284 | |
| 285 | test_intel_bo2 = drm_intel_bo_gem_create_from_prime(bufmgr, prime_fd, BO_SIZE); |
| 286 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 287 | igt_assert(test_intel_bo2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 288 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 289 | igt_assert(test_intel_bo->handle == test_intel_bo2->handle); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 290 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 291 | drm_intel_bo_unreference(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 292 | } |
| 293 | |
| 294 | /* nouveau export reimport test */ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 295 | static void test_nv_self_import(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 296 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 297 | int prime_fd; |
| 298 | struct nouveau_bo *nvbo, *nvbo2; |
| 299 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 300 | igt_assert(nouveau_bo_new(ndev, NOUVEAU_BO_GART | NOUVEAU_BO_MAP, |
| 301 | 0, BO_SIZE, NULL, &nvbo) == 0); |
| 302 | igt_assert(nouveau_bo_set_prime(nvbo, &prime_fd) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 303 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 304 | igt_assert(nouveau_bo_prime_handle_ref(ndev, prime_fd, &nvbo2) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 305 | close(prime_fd); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 306 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 307 | igt_assert(nvbo->handle == nvbo2->handle); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 308 | nouveau_bo_ref(NULL, &nvbo); |
| 309 | nouveau_bo_ref(NULL, &nvbo2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 310 | } |
| 311 | |
| 312 | /* export handle from intel driver - reimport to another intel driver bufmgr |
| 313 | see if you get same object */ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 314 | static void test_i915_self_import_to_different_fd(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 315 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 316 | drm_intel_bo *test_intel_bo, *test_intel_bo2; |
| 317 | int prime_fd; |
| 318 | |
| 319 | test_intel_bo = drm_intel_bo_alloc(bufmgr, "test bo", BO_SIZE, 4096); |
| 320 | |
| 321 | drm_intel_bo_gem_export_to_prime(test_intel_bo, &prime_fd); |
| 322 | |
| 323 | test_intel_bo2 = drm_intel_bo_gem_create_from_prime(bufmgr2, prime_fd, BO_SIZE); |
| 324 | close(prime_fd); |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 325 | igt_assert(test_intel_bo2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 326 | |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 327 | drm_intel_bo_unreference(test_intel_bo2); |
| 328 | drm_intel_bo_unreference(test_intel_bo); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 329 | } |
| 330 | |
| 331 | /* nouveau export reimport to other driver test */ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 332 | static void test_nv_self_import_to_different_fd(void) |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 333 | { |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 334 | int prime_fd; |
| 335 | struct nouveau_bo *nvbo, *nvbo2; |
| 336 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 337 | igt_assert(nouveau_bo_new(ndev, NOUVEAU_BO_GART | NOUVEAU_BO_MAP, |
| 338 | 0, BO_SIZE, NULL, &nvbo) == 0); |
| 339 | igt_assert(nouveau_bo_set_prime(nvbo, &prime_fd) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 340 | |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 341 | igt_assert(nouveau_bo_prime_handle_ref(ndev2, prime_fd, &nvbo2) == 0); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 342 | close(prime_fd); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 343 | |
| 344 | /* not sure what to test for, just make sure we don't explode */ |
| 345 | nouveau_bo_ref(NULL, &nvbo); |
| 346 | nouveau_bo_ref(NULL, &nvbo2); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 347 | } |
| 348 | |
Daniel Vetter | 071e9ca | 2013-10-31 16:23:26 +0100 | [diff] [blame] | 349 | igt_main |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 350 | { |
Daniel Vetter | b3880d3 | 2013-08-14 18:02:46 +0200 | [diff] [blame] | 351 | igt_fixture { |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 352 | find_and_open_devices(); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 353 | |
Daniel Vetter | b3880d3 | 2013-08-14 18:02:46 +0200 | [diff] [blame] | 354 | igt_require(nouveau_fd != -1); |
| 355 | igt_require(nouveau_fd2 != -1); |
| 356 | igt_require(intel_fd != -1); |
| 357 | igt_require(intel_fd2 != -1); |
| 358 | |
| 359 | /* set up intel bufmgr */ |
| 360 | bufmgr = drm_intel_bufmgr_gem_init(intel_fd, 4096); |
| 361 | igt_assert(bufmgr); |
| 362 | /* Do not enable reuse, we share (almost) all buffers. */ |
| 363 | //drm_intel_bufmgr_gem_enable_reuse(bufmgr); |
| 364 | |
| 365 | bufmgr2 = drm_intel_bufmgr_gem_init(intel_fd2, 4096); |
Daniel Vetter | ec834c9 | 2013-08-14 22:24:43 +0200 | [diff] [blame] | 366 | igt_assert(bufmgr2); |
Daniel Vetter | b3880d3 | 2013-08-14 18:02:46 +0200 | [diff] [blame] | 367 | drm_intel_bufmgr_gem_enable_reuse(bufmgr2); |
| 368 | |
| 369 | /* set up nouveau bufmgr */ |
| 370 | igt_assert(nouveau_device_wrap(nouveau_fd, 0, &ndev) >= 0); |
| 371 | igt_assert(nouveau_client_new(ndev, &nclient) >= 0); |
| 372 | |
| 373 | /* set up nouveau bufmgr */ |
| 374 | igt_assert(nouveau_device_wrap(nouveau_fd2, 0, &ndev2) >= 0); |
| 375 | |
| 376 | igt_assert(nouveau_client_new(ndev2, &nclient2) >= 0);; |
| 377 | |
| 378 | /* set up an intel batch buffer */ |
| 379 | devid = intel_get_drm_devid(intel_fd); |
| 380 | intel_batch = intel_batchbuffer_alloc(bufmgr, devid); |
| 381 | igt_assert(intel_batch); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 382 | } |
| 383 | |
Daniel Vetter | 4a9d50d | 2013-07-15 11:04:20 +0200 | [diff] [blame] | 384 | #define xtest(name) \ |
Daniel Vetter | 1caaf0a | 2013-08-12 12:17:35 +0200 | [diff] [blame] | 385 | igt_subtest(#name) \ |
Daniel Vetter | d502ae6 | 2014-05-14 10:44:16 +0200 | [diff] [blame] | 386 | test_##name(); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 387 | |
Daniel Vetter | 4a9d50d | 2013-07-15 11:04:20 +0200 | [diff] [blame] | 388 | xtest(i915_nv_import_twice); |
Daniel Vetter | 05cc515 | 2013-07-15 11:55:09 +0200 | [diff] [blame] | 389 | xtest(i915_nv_import_twice_check_flink_name); |
| 390 | xtest(i915_nv_reimport_twice_check_flink_name); |
| 391 | xtest(nv_i915_import_twice_check_flink_name); |
| 392 | xtest(nv_i915_reimport_twice_check_flink_name); |
Daniel Vetter | 4a9d50d | 2013-07-15 11:04:20 +0200 | [diff] [blame] | 393 | xtest(i915_nv_import_vs_close); |
| 394 | xtest(i915_nv_double_import); |
| 395 | xtest(i915_nv_double_export); |
| 396 | xtest(i915_self_import); |
| 397 | xtest(nv_self_import); |
| 398 | xtest(i915_self_import_to_different_fd); |
| 399 | xtest(nv_self_import_to_different_fd); |
| 400 | |
Daniel Vetter | b3880d3 | 2013-08-14 18:02:46 +0200 | [diff] [blame] | 401 | igt_fixture { |
| 402 | intel_batchbuffer_free(intel_batch); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 403 | |
Daniel Vetter | b3880d3 | 2013-08-14 18:02:46 +0200 | [diff] [blame] | 404 | nouveau_device_del(&ndev); |
| 405 | drm_intel_bufmgr_destroy(bufmgr); |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 406 | |
Daniel Vetter | b3880d3 | 2013-08-14 18:02:46 +0200 | [diff] [blame] | 407 | close(intel_fd); |
| 408 | close(nouveau_fd); |
| 409 | } |
Maarten Lankhorst | 4520025 | 2012-08-13 15:57:57 +0200 | [diff] [blame] | 410 | } |