blob: 045ef24e0b4574ca190cd71cc694e203aca6f614 [file] [log] [blame]
Marat Dukhancdb42a52021-11-22 20:09:32 -08001// Copyright 2019 Google LLC
2//
3// This source code is licensed under the BSD-style license found in the
4// LICENSE file in the root directory of this source tree.
5//
6// Auto-generated file. Do not edit!
7// Specification: test/u8-ibilinear.yaml
8// Generator: tools/generate-ibilinear-test.py
9
10
11#include <gtest/gtest.h>
12
13#include <xnnpack/common.h>
14#include <xnnpack/isa-checks.h>
15
16#include <xnnpack/ibilinear.h>
17#include "ibilinear-microkernel-tester.h"
18
19
Marat Dukhan6a69c8e2021-11-24 15:00:59 -080020TEST(U8_IBILINEAR__SCALAR_C1, channels_eq_1) {
21 IBilinearMicrokernelTester()
22 .pixels(1)
23 .channels(1)
24 .Test(xnn_u8_ibilinear_ukernel__scalar_c1);
25}
26
27TEST(U8_IBILINEAR__SCALAR_C1, channels_gt_1) {
28 for (size_t channels = 2; channels < 10; channels++) {
29 IBilinearMicrokernelTester()
30 .pixels(1)
31 .channels(channels)
32 .Test(xnn_u8_ibilinear_ukernel__scalar_c1);
33 }
34}
35
36TEST(U8_IBILINEAR__SCALAR_C1, pixels_gt_1) {
37 for (size_t pixels = 2; pixels < 3; pixels++) {
38 for (size_t channels = 1; channels <= 5; channels += 1) {
39 IBilinearMicrokernelTester()
40 .pixels(pixels)
41 .channels(channels)
42 .Test(xnn_u8_ibilinear_ukernel__scalar_c1);
43 }
44 }
45}
46
47TEST(U8_IBILINEAR__SCALAR_C1, input_offset) {
48 for (size_t pixels = 1; pixels < 5; pixels += 1) {
49 for (size_t channels = 1; channels <= 5; channels += 1) {
50 IBilinearMicrokernelTester()
51 .pixels(pixels)
52 .channels(channels)
53 .input_offset(7)
54 .Test(xnn_u8_ibilinear_ukernel__scalar_c1);
55 }
56 }
57}
58
59TEST(U8_IBILINEAR__SCALAR_C1, output_stride) {
60 for (size_t pixels = 1; pixels < 5; pixels += 1) {
61 for (size_t channels = 1; channels <= 5; channels += 1) {
62 IBilinearMicrokernelTester()
63 .pixels(pixels)
64 .channels(channels)
65 .output_stride(7)
66 .Test(xnn_u8_ibilinear_ukernel__scalar_c1);
67 }
68 }
69}
70
71TEST(U8_IBILINEAR__SCALAR_C2, channels_eq_2) {
72 IBilinearMicrokernelTester()
73 .pixels(1)
74 .channels(2)
75 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
76}
77
78TEST(U8_IBILINEAR__SCALAR_C2, channels_div_2) {
79 for (size_t channels = 4; channels < 20; channels += 2) {
80 IBilinearMicrokernelTester()
81 .pixels(1)
82 .channels(channels)
83 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
84 }
85}
86
87TEST(U8_IBILINEAR__SCALAR_C2, channels_lt_2) {
88 for (size_t channels = 1; channels < 2; channels++) {
89 IBilinearMicrokernelTester()
90 .pixels(1)
91 .channels(channels)
92 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
93 }
94}
95
96TEST(U8_IBILINEAR__SCALAR_C2, channels_gt_2) {
97 for (size_t channels = 3; channels < 4; channels++) {
98 IBilinearMicrokernelTester()
99 .pixels(1)
100 .channels(channels)
101 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
102 }
103}
104
105TEST(U8_IBILINEAR__SCALAR_C2, pixels_gt_1) {
106 for (size_t pixels = 2; pixels < 3; pixels++) {
107 for (size_t channels = 1; channels <= 10; channels += 1) {
108 IBilinearMicrokernelTester()
109 .pixels(pixels)
110 .channels(channels)
111 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
112 }
113 }
114}
115
116TEST(U8_IBILINEAR__SCALAR_C2, input_offset) {
117 for (size_t pixels = 1; pixels < 5; pixels += 1) {
118 for (size_t channels = 1; channels <= 10; channels += 1) {
119 IBilinearMicrokernelTester()
120 .pixels(pixels)
121 .channels(channels)
122 .input_offset(13)
123 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
124 }
125 }
126}
127
128TEST(U8_IBILINEAR__SCALAR_C2, output_stride) {
129 for (size_t pixels = 1; pixels < 5; pixels += 1) {
130 for (size_t channels = 1; channels <= 10; channels += 1) {
131 IBilinearMicrokernelTester()
132 .pixels(pixels)
133 .channels(channels)
134 .output_stride(13)
135 .Test(xnn_u8_ibilinear_ukernel__scalar_c2);
136 }
137 }
138}
139
140TEST(U8_IBILINEAR__SCALAR_C4, channels_eq_4) {
141 IBilinearMicrokernelTester()
142 .pixels(1)
143 .channels(4)
144 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
145}
146
147TEST(U8_IBILINEAR__SCALAR_C4, channels_div_4) {
148 for (size_t channels = 8; channels < 40; channels += 4) {
149 IBilinearMicrokernelTester()
150 .pixels(1)
151 .channels(channels)
152 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
153 }
154}
155
156TEST(U8_IBILINEAR__SCALAR_C4, channels_lt_4) {
157 for (size_t channels = 1; channels < 4; channels++) {
158 IBilinearMicrokernelTester()
159 .pixels(1)
160 .channels(channels)
161 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
162 }
163}
164
165TEST(U8_IBILINEAR__SCALAR_C4, channels_gt_4) {
166 for (size_t channels = 5; channels < 8; channels++) {
167 IBilinearMicrokernelTester()
168 .pixels(1)
169 .channels(channels)
170 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
171 }
172}
173
174TEST(U8_IBILINEAR__SCALAR_C4, pixels_gt_1) {
175 for (size_t pixels = 2; pixels < 3; pixels++) {
176 for (size_t channels = 1; channels <= 20; channels += 3) {
177 IBilinearMicrokernelTester()
178 .pixels(pixels)
179 .channels(channels)
180 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
181 }
182 }
183}
184
185TEST(U8_IBILINEAR__SCALAR_C4, input_offset) {
186 for (size_t pixels = 1; pixels < 5; pixels += 1) {
187 for (size_t channels = 1; channels <= 20; channels += 3) {
188 IBilinearMicrokernelTester()
189 .pixels(pixels)
190 .channels(channels)
191 .input_offset(23)
192 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
193 }
194 }
195}
196
197TEST(U8_IBILINEAR__SCALAR_C4, output_stride) {
198 for (size_t pixels = 1; pixels < 5; pixels += 1) {
199 for (size_t channels = 1; channels <= 20; channels += 3) {
200 IBilinearMicrokernelTester()
201 .pixels(pixels)
202 .channels(channels)
203 .output_stride(23)
204 .Test(xnn_u8_ibilinear_ukernel__scalar_c4);
205 }
206 }
207}
208
Marat Dukhancdb42a52021-11-22 20:09:32 -0800209#if XNN_ARCH_ARM || XNN_ARCH_ARM64
210 TEST(U8_IBILINEAR__NEON_C8, channels_eq_8) {
211 TEST_REQUIRES_ARM_NEON;
212 IBilinearMicrokernelTester()
213 .pixels(1)
214 .channels(8)
215 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
216 }
217
218 TEST(U8_IBILINEAR__NEON_C8, channels_div_8) {
219 TEST_REQUIRES_ARM_NEON;
220 for (size_t channels = 16; channels < 80; channels += 8) {
221 IBilinearMicrokernelTester()
222 .pixels(1)
223 .channels(channels)
224 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
225 }
226 }
227
228 TEST(U8_IBILINEAR__NEON_C8, channels_lt_8) {
229 TEST_REQUIRES_ARM_NEON;
230 for (size_t channels = 1; channels < 8; channels++) {
231 IBilinearMicrokernelTester()
232 .pixels(1)
233 .channels(channels)
234 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
235 }
236 }
237
238 TEST(U8_IBILINEAR__NEON_C8, channels_gt_8) {
239 TEST_REQUIRES_ARM_NEON;
240 for (size_t channels = 9; channels < 16; channels++) {
241 IBilinearMicrokernelTester()
242 .pixels(1)
243 .channels(channels)
244 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
245 }
246 }
247
248 TEST(U8_IBILINEAR__NEON_C8, pixels_gt_1) {
249 TEST_REQUIRES_ARM_NEON;
250 for (size_t pixels = 2; pixels < 3; pixels++) {
251 for (size_t channels = 1; channels <= 40; channels += 7) {
252 IBilinearMicrokernelTester()
253 .pixels(pixels)
254 .channels(channels)
255 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
256 }
257 }
258 }
259
260 TEST(U8_IBILINEAR__NEON_C8, input_offset) {
261 TEST_REQUIRES_ARM_NEON;
262 for (size_t pixels = 1; pixels < 5; pixels += 1) {
263 for (size_t channels = 1; channels <= 40; channels += 7) {
264 IBilinearMicrokernelTester()
265 .pixels(pixels)
266 .channels(channels)
267 .input_offset(43)
268 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
269 }
270 }
271 }
272
273 TEST(U8_IBILINEAR__NEON_C8, output_stride) {
274 TEST_REQUIRES_ARM_NEON;
275 for (size_t pixels = 1; pixels < 5; pixels += 1) {
276 for (size_t channels = 1; channels <= 40; channels += 7) {
277 IBilinearMicrokernelTester()
278 .pixels(pixels)
279 .channels(channels)
280 .output_stride(43)
281 .Test(xnn_u8_ibilinear_ukernel__neon_c8);
282 }
283 }
284 }
285#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
286
287
288#if XNN_ARCH_ARM || XNN_ARCH_ARM64
289 TEST(U8_IBILINEAR__NEON_C16, channels_eq_16) {
290 TEST_REQUIRES_ARM_NEON;
291 IBilinearMicrokernelTester()
292 .pixels(1)
293 .channels(16)
294 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
295 }
296
297 TEST(U8_IBILINEAR__NEON_C16, channels_div_16) {
298 TEST_REQUIRES_ARM_NEON;
299 for (size_t channels = 32; channels < 160; channels += 16) {
300 IBilinearMicrokernelTester()
301 .pixels(1)
302 .channels(channels)
303 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
304 }
305 }
306
307 TEST(U8_IBILINEAR__NEON_C16, channels_lt_16) {
308 TEST_REQUIRES_ARM_NEON;
309 for (size_t channels = 1; channels < 16; channels++) {
310 IBilinearMicrokernelTester()
311 .pixels(1)
312 .channels(channels)
313 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
314 }
315 }
316
317 TEST(U8_IBILINEAR__NEON_C16, channels_gt_16) {
318 TEST_REQUIRES_ARM_NEON;
319 for (size_t channels = 17; channels < 32; channels++) {
320 IBilinearMicrokernelTester()
321 .pixels(1)
322 .channels(channels)
323 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
324 }
325 }
326
327 TEST(U8_IBILINEAR__NEON_C16, pixels_gt_1) {
328 TEST_REQUIRES_ARM_NEON;
329 for (size_t pixels = 2; pixels < 3; pixels++) {
330 for (size_t channels = 1; channels <= 80; channels += 15) {
331 IBilinearMicrokernelTester()
332 .pixels(pixels)
333 .channels(channels)
334 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
335 }
336 }
337 }
338
339 TEST(U8_IBILINEAR__NEON_C16, input_offset) {
340 TEST_REQUIRES_ARM_NEON;
341 for (size_t pixels = 1; pixels < 5; pixels += 1) {
342 for (size_t channels = 1; channels <= 80; channels += 15) {
343 IBilinearMicrokernelTester()
344 .pixels(pixels)
345 .channels(channels)
346 .input_offset(83)
347 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
348 }
349 }
350 }
351
352 TEST(U8_IBILINEAR__NEON_C16, output_stride) {
353 TEST_REQUIRES_ARM_NEON;
354 for (size_t pixels = 1; pixels < 5; pixels += 1) {
355 for (size_t channels = 1; channels <= 80; channels += 15) {
356 IBilinearMicrokernelTester()
357 .pixels(pixels)
358 .channels(channels)
359 .output_stride(83)
360 .Test(xnn_u8_ibilinear_ukernel__neon_c16);
361 }
362 }
363 }
364#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
Marat Dukhan7519eb12021-11-23 19:08:29 -0800365
366
367#if XNN_ARCH_X86 || XNN_ARCH_X86_64
368 TEST(U8_IBILINEAR__SSE2_C8, channels_eq_8) {
369 TEST_REQUIRES_X86_SSE2;
370 IBilinearMicrokernelTester()
371 .pixels(1)
372 .channels(8)
373 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
374 }
375
376 TEST(U8_IBILINEAR__SSE2_C8, channels_div_8) {
377 TEST_REQUIRES_X86_SSE2;
378 for (size_t channels = 16; channels < 80; channels += 8) {
379 IBilinearMicrokernelTester()
380 .pixels(1)
381 .channels(channels)
382 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
383 }
384 }
385
386 TEST(U8_IBILINEAR__SSE2_C8, channels_lt_8) {
387 TEST_REQUIRES_X86_SSE2;
388 for (size_t channels = 1; channels < 8; channels++) {
389 IBilinearMicrokernelTester()
390 .pixels(1)
391 .channels(channels)
392 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
393 }
394 }
395
396 TEST(U8_IBILINEAR__SSE2_C8, channels_gt_8) {
397 TEST_REQUIRES_X86_SSE2;
398 for (size_t channels = 9; channels < 16; channels++) {
399 IBilinearMicrokernelTester()
400 .pixels(1)
401 .channels(channels)
402 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
403 }
404 }
405
406 TEST(U8_IBILINEAR__SSE2_C8, pixels_gt_1) {
407 TEST_REQUIRES_X86_SSE2;
408 for (size_t pixels = 2; pixels < 3; pixels++) {
409 for (size_t channels = 1; channels <= 40; channels += 7) {
410 IBilinearMicrokernelTester()
411 .pixels(pixels)
412 .channels(channels)
413 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
414 }
415 }
416 }
417
418 TEST(U8_IBILINEAR__SSE2_C8, input_offset) {
419 TEST_REQUIRES_X86_SSE2;
420 for (size_t pixels = 1; pixels < 5; pixels += 1) {
421 for (size_t channels = 1; channels <= 40; channels += 7) {
422 IBilinearMicrokernelTester()
423 .pixels(pixels)
424 .channels(channels)
425 .input_offset(43)
426 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
427 }
428 }
429 }
430
431 TEST(U8_IBILINEAR__SSE2_C8, output_stride) {
432 TEST_REQUIRES_X86_SSE2;
433 for (size_t pixels = 1; pixels < 5; pixels += 1) {
434 for (size_t channels = 1; channels <= 40; channels += 7) {
435 IBilinearMicrokernelTester()
436 .pixels(pixels)
437 .channels(channels)
438 .output_stride(43)
439 .Test(xnn_u8_ibilinear_ukernel__sse2_c8);
440 }
441 }
442 }
443#endif // XNN_ARCH_X86 || XNN_ARCH_X86_64
444
445
446#if XNN_ARCH_X86 || XNN_ARCH_X86_64
447 TEST(U8_IBILINEAR__SSE2_C16, channels_eq_16) {
448 TEST_REQUIRES_X86_SSE2;
449 IBilinearMicrokernelTester()
450 .pixels(1)
451 .channels(16)
452 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
453 }
454
455 TEST(U8_IBILINEAR__SSE2_C16, channels_div_16) {
456 TEST_REQUIRES_X86_SSE2;
457 for (size_t channels = 32; channels < 160; channels += 16) {
458 IBilinearMicrokernelTester()
459 .pixels(1)
460 .channels(channels)
461 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
462 }
463 }
464
465 TEST(U8_IBILINEAR__SSE2_C16, channels_lt_16) {
466 TEST_REQUIRES_X86_SSE2;
467 for (size_t channels = 1; channels < 16; channels++) {
468 IBilinearMicrokernelTester()
469 .pixels(1)
470 .channels(channels)
471 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
472 }
473 }
474
475 TEST(U8_IBILINEAR__SSE2_C16, channels_gt_16) {
476 TEST_REQUIRES_X86_SSE2;
477 for (size_t channels = 17; channels < 32; channels++) {
478 IBilinearMicrokernelTester()
479 .pixels(1)
480 .channels(channels)
481 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
482 }
483 }
484
485 TEST(U8_IBILINEAR__SSE2_C16, pixels_gt_1) {
486 TEST_REQUIRES_X86_SSE2;
487 for (size_t pixels = 2; pixels < 3; pixels++) {
488 for (size_t channels = 1; channels <= 80; channels += 15) {
489 IBilinearMicrokernelTester()
490 .pixels(pixels)
491 .channels(channels)
492 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
493 }
494 }
495 }
496
497 TEST(U8_IBILINEAR__SSE2_C16, input_offset) {
498 TEST_REQUIRES_X86_SSE2;
499 for (size_t pixels = 1; pixels < 5; pixels += 1) {
500 for (size_t channels = 1; channels <= 80; channels += 15) {
501 IBilinearMicrokernelTester()
502 .pixels(pixels)
503 .channels(channels)
504 .input_offset(83)
505 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
506 }
507 }
508 }
509
510 TEST(U8_IBILINEAR__SSE2_C16, output_stride) {
511 TEST_REQUIRES_X86_SSE2;
512 for (size_t pixels = 1; pixels < 5; pixels += 1) {
513 for (size_t channels = 1; channels <= 80; channels += 15) {
514 IBilinearMicrokernelTester()
515 .pixels(pixels)
516 .channels(channels)
517 .output_stride(83)
518 .Test(xnn_u8_ibilinear_ukernel__sse2_c16);
519 }
520 }
521 }
522#endif // XNN_ARCH_X86 || XNN_ARCH_X86_64
523
524
525#if XNN_ARCH_X86 || XNN_ARCH_X86_64
526 TEST(U8_IBILINEAR__SSE41_C8, channels_eq_8) {
527 TEST_REQUIRES_X86_SSE41;
528 IBilinearMicrokernelTester()
529 .pixels(1)
530 .channels(8)
531 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
532 }
533
534 TEST(U8_IBILINEAR__SSE41_C8, channels_div_8) {
535 TEST_REQUIRES_X86_SSE41;
536 for (size_t channels = 16; channels < 80; channels += 8) {
537 IBilinearMicrokernelTester()
538 .pixels(1)
539 .channels(channels)
540 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
541 }
542 }
543
544 TEST(U8_IBILINEAR__SSE41_C8, channels_lt_8) {
545 TEST_REQUIRES_X86_SSE41;
546 for (size_t channels = 1; channels < 8; channels++) {
547 IBilinearMicrokernelTester()
548 .pixels(1)
549 .channels(channels)
550 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
551 }
552 }
553
554 TEST(U8_IBILINEAR__SSE41_C8, channels_gt_8) {
555 TEST_REQUIRES_X86_SSE41;
556 for (size_t channels = 9; channels < 16; channels++) {
557 IBilinearMicrokernelTester()
558 .pixels(1)
559 .channels(channels)
560 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
561 }
562 }
563
564 TEST(U8_IBILINEAR__SSE41_C8, pixels_gt_1) {
565 TEST_REQUIRES_X86_SSE41;
566 for (size_t pixels = 2; pixels < 3; pixels++) {
567 for (size_t channels = 1; channels <= 40; channels += 7) {
568 IBilinearMicrokernelTester()
569 .pixels(pixels)
570 .channels(channels)
571 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
572 }
573 }
574 }
575
576 TEST(U8_IBILINEAR__SSE41_C8, input_offset) {
577 TEST_REQUIRES_X86_SSE41;
578 for (size_t pixels = 1; pixels < 5; pixels += 1) {
579 for (size_t channels = 1; channels <= 40; channels += 7) {
580 IBilinearMicrokernelTester()
581 .pixels(pixels)
582 .channels(channels)
583 .input_offset(43)
584 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
585 }
586 }
587 }
588
589 TEST(U8_IBILINEAR__SSE41_C8, output_stride) {
590 TEST_REQUIRES_X86_SSE41;
591 for (size_t pixels = 1; pixels < 5; pixels += 1) {
592 for (size_t channels = 1; channels <= 40; channels += 7) {
593 IBilinearMicrokernelTester()
594 .pixels(pixels)
595 .channels(channels)
596 .output_stride(43)
597 .Test(xnn_u8_ibilinear_ukernel__sse41_c8);
598 }
599 }
600 }
601#endif // XNN_ARCH_X86 || XNN_ARCH_X86_64
602
603
604#if XNN_ARCH_X86 || XNN_ARCH_X86_64
605 TEST(U8_IBILINEAR__SSE41_C16, channels_eq_16) {
606 TEST_REQUIRES_X86_SSE41;
607 IBilinearMicrokernelTester()
608 .pixels(1)
609 .channels(16)
610 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
611 }
612
613 TEST(U8_IBILINEAR__SSE41_C16, channels_div_16) {
614 TEST_REQUIRES_X86_SSE41;
615 for (size_t channels = 32; channels < 160; channels += 16) {
616 IBilinearMicrokernelTester()
617 .pixels(1)
618 .channels(channels)
619 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
620 }
621 }
622
623 TEST(U8_IBILINEAR__SSE41_C16, channels_lt_16) {
624 TEST_REQUIRES_X86_SSE41;
625 for (size_t channels = 1; channels < 16; channels++) {
626 IBilinearMicrokernelTester()
627 .pixels(1)
628 .channels(channels)
629 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
630 }
631 }
632
633 TEST(U8_IBILINEAR__SSE41_C16, channels_gt_16) {
634 TEST_REQUIRES_X86_SSE41;
635 for (size_t channels = 17; channels < 32; channels++) {
636 IBilinearMicrokernelTester()
637 .pixels(1)
638 .channels(channels)
639 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
640 }
641 }
642
643 TEST(U8_IBILINEAR__SSE41_C16, pixels_gt_1) {
644 TEST_REQUIRES_X86_SSE41;
645 for (size_t pixels = 2; pixels < 3; pixels++) {
646 for (size_t channels = 1; channels <= 80; channels += 15) {
647 IBilinearMicrokernelTester()
648 .pixels(pixels)
649 .channels(channels)
650 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
651 }
652 }
653 }
654
655 TEST(U8_IBILINEAR__SSE41_C16, input_offset) {
656 TEST_REQUIRES_X86_SSE41;
657 for (size_t pixels = 1; pixels < 5; pixels += 1) {
658 for (size_t channels = 1; channels <= 80; channels += 15) {
659 IBilinearMicrokernelTester()
660 .pixels(pixels)
661 .channels(channels)
662 .input_offset(83)
663 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
664 }
665 }
666 }
667
668 TEST(U8_IBILINEAR__SSE41_C16, output_stride) {
669 TEST_REQUIRES_X86_SSE41;
670 for (size_t pixels = 1; pixels < 5; pixels += 1) {
671 for (size_t channels = 1; channels <= 80; channels += 15) {
672 IBilinearMicrokernelTester()
673 .pixels(pixels)
674 .channels(channels)
675 .output_stride(83)
676 .Test(xnn_u8_ibilinear_ukernel__sse41_c16);
677 }
678 }
679 }
680#endif // XNN_ARCH_X86 || XNN_ARCH_X86_64
Marat Dukhan266a47b2021-11-24 13:58:12 -0800681
682
Marat Dukhan4c617792021-12-21 15:47:58 -0800683#if XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800684 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, channels_eq_8) {
685 IBilinearMicrokernelTester()
686 .pixels(1)
687 .channels(8)
688 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
689 }
690
691 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, channels_div_8) {
692 for (size_t channels = 16; channels < 80; channels += 8) {
693 IBilinearMicrokernelTester()
694 .pixels(1)
695 .channels(channels)
696 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
697 }
698 }
699
700 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, channels_lt_8) {
701 for (size_t channels = 1; channels < 8; channels++) {
702 IBilinearMicrokernelTester()
703 .pixels(1)
704 .channels(channels)
705 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
706 }
707 }
708
709 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, channels_gt_8) {
710 for (size_t channels = 9; channels < 16; channels++) {
711 IBilinearMicrokernelTester()
712 .pixels(1)
713 .channels(channels)
714 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
715 }
716 }
717
718 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, pixels_gt_1) {
719 for (size_t pixels = 2; pixels < 3; pixels++) {
720 for (size_t channels = 1; channels <= 40; channels += 7) {
721 IBilinearMicrokernelTester()
722 .pixels(pixels)
723 .channels(channels)
724 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
725 }
726 }
727 }
728
729 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, input_offset) {
730 for (size_t pixels = 1; pixels < 5; pixels += 1) {
731 for (size_t channels = 1; channels <= 40; channels += 7) {
732 IBilinearMicrokernelTester()
733 .pixels(pixels)
734 .channels(channels)
735 .input_offset(43)
736 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
737 }
738 }
739 }
740
741 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C8, output_stride) {
742 for (size_t pixels = 1; pixels < 5; pixels += 1) {
743 for (size_t channels = 1; channels <= 40; channels += 7) {
744 IBilinearMicrokernelTester()
745 .pixels(pixels)
746 .channels(channels)
747 .output_stride(43)
748 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c8);
749 }
750 }
751 }
Marat Dukhan4c617792021-12-21 15:47:58 -0800752#endif // XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800753
754
Marat Dukhan4c617792021-12-21 15:47:58 -0800755#if XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800756 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, channels_eq_16) {
757 IBilinearMicrokernelTester()
758 .pixels(1)
759 .channels(16)
760 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
761 }
762
763 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, channels_div_16) {
764 for (size_t channels = 32; channels < 160; channels += 16) {
765 IBilinearMicrokernelTester()
766 .pixels(1)
767 .channels(channels)
768 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
769 }
770 }
771
772 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, channels_lt_16) {
773 for (size_t channels = 1; channels < 16; channels++) {
774 IBilinearMicrokernelTester()
775 .pixels(1)
776 .channels(channels)
777 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
778 }
779 }
780
781 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, channels_gt_16) {
782 for (size_t channels = 17; channels < 32; channels++) {
783 IBilinearMicrokernelTester()
784 .pixels(1)
785 .channels(channels)
786 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
787 }
788 }
789
790 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, pixels_gt_1) {
791 for (size_t pixels = 2; pixels < 3; pixels++) {
792 for (size_t channels = 1; channels <= 80; channels += 15) {
793 IBilinearMicrokernelTester()
794 .pixels(pixels)
795 .channels(channels)
796 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
797 }
798 }
799 }
800
801 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, input_offset) {
802 for (size_t pixels = 1; pixels < 5; pixels += 1) {
803 for (size_t channels = 1; channels <= 80; channels += 15) {
804 IBilinearMicrokernelTester()
805 .pixels(pixels)
806 .channels(channels)
807 .input_offset(83)
808 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
809 }
810 }
811 }
812
813 TEST(U8_IBILINEAR__WASMSIMD_DOT16X2_C16, output_stride) {
814 for (size_t pixels = 1; pixels < 5; pixels += 1) {
815 for (size_t channels = 1; channels <= 80; channels += 15) {
816 IBilinearMicrokernelTester()
817 .pixels(pixels)
818 .channels(channels)
819 .output_stride(83)
820 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_dot16x2_c16);
821 }
822 }
823 }
Marat Dukhan4c617792021-12-21 15:47:58 -0800824#endif // XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800825
826
Marat Dukhan4c617792021-12-21 15:47:58 -0800827#if XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800828 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, channels_eq_8) {
829 IBilinearMicrokernelTester()
830 .pixels(1)
831 .channels(8)
832 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
833 }
834
835 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, channels_div_8) {
836 for (size_t channels = 16; channels < 80; channels += 8) {
837 IBilinearMicrokernelTester()
838 .pixels(1)
839 .channels(channels)
840 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
841 }
842 }
843
844 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, channels_lt_8) {
845 for (size_t channels = 1; channels < 8; channels++) {
846 IBilinearMicrokernelTester()
847 .pixels(1)
848 .channels(channels)
849 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
850 }
851 }
852
853 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, channels_gt_8) {
854 for (size_t channels = 9; channels < 16; channels++) {
855 IBilinearMicrokernelTester()
856 .pixels(1)
857 .channels(channels)
858 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
859 }
860 }
861
862 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, pixels_gt_1) {
863 for (size_t pixels = 2; pixels < 3; pixels++) {
864 for (size_t channels = 1; channels <= 40; channels += 7) {
865 IBilinearMicrokernelTester()
866 .pixels(pixels)
867 .channels(channels)
868 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
869 }
870 }
871 }
872
873 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, input_offset) {
874 for (size_t pixels = 1; pixels < 5; pixels += 1) {
875 for (size_t channels = 1; channels <= 40; channels += 7) {
876 IBilinearMicrokernelTester()
877 .pixels(pixels)
878 .channels(channels)
879 .input_offset(43)
880 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
881 }
882 }
883 }
884
885 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C8, output_stride) {
886 for (size_t pixels = 1; pixels < 5; pixels += 1) {
887 for (size_t channels = 1; channels <= 40; channels += 7) {
888 IBilinearMicrokernelTester()
889 .pixels(pixels)
890 .channels(channels)
891 .output_stride(43)
892 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c8);
893 }
894 }
895 }
Marat Dukhan4c617792021-12-21 15:47:58 -0800896#endif // XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800897
898
Marat Dukhan4c617792021-12-21 15:47:58 -0800899#if XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD
Marat Dukhan266a47b2021-11-24 13:58:12 -0800900 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, channels_eq_16) {
901 IBilinearMicrokernelTester()
902 .pixels(1)
903 .channels(16)
904 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
905 }
906
907 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, channels_div_16) {
908 for (size_t channels = 32; channels < 160; channels += 16) {
909 IBilinearMicrokernelTester()
910 .pixels(1)
911 .channels(channels)
912 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
913 }
914 }
915
916 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, channels_lt_16) {
917 for (size_t channels = 1; channels < 16; channels++) {
918 IBilinearMicrokernelTester()
919 .pixels(1)
920 .channels(channels)
921 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
922 }
923 }
924
925 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, channels_gt_16) {
926 for (size_t channels = 17; channels < 32; channels++) {
927 IBilinearMicrokernelTester()
928 .pixels(1)
929 .channels(channels)
930 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
931 }
932 }
933
934 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, pixels_gt_1) {
935 for (size_t pixels = 2; pixels < 3; pixels++) {
936 for (size_t channels = 1; channels <= 80; channels += 15) {
937 IBilinearMicrokernelTester()
938 .pixels(pixels)
939 .channels(channels)
940 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
941 }
942 }
943 }
944
945 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, input_offset) {
946 for (size_t pixels = 1; pixels < 5; pixels += 1) {
947 for (size_t channels = 1; channels <= 80; channels += 15) {
948 IBilinearMicrokernelTester()
949 .pixels(pixels)
950 .channels(channels)
951 .input_offset(83)
952 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
953 }
954 }
955 }
956
957 TEST(U8_IBILINEAR__WASMSIMD_MUL32_C16, output_stride) {
958 for (size_t pixels = 1; pixels < 5; pixels += 1) {
959 for (size_t channels = 1; channels <= 80; channels += 15) {
960 IBilinearMicrokernelTester()
961 .pixels(pixels)
962 .channels(channels)
963 .output_stride(83)
964 .Test(xnn_u8_ibilinear_ukernel__wasmsimd_mul32_c16);
965 }
966 }
967 }
Marat Dukhan4c617792021-12-21 15:47:58 -0800968#endif // XNN_ARCH_WASMSIMD || XNN_ARCH_WASMRELAXEDSIMD