blob: f0bf70f0c81c16739e9e5cb95a539fdd95321ac1 [file] [log] [blame]
XNNPACK Teamb455b122019-09-27 18:10:33 -07001// Copyright (c) Facebook, Inc. and its affiliates.
2// All rights reserved.
3//
4// Copyright 2019 Google LLC
5//
6// This source code is licensed under the BSD-style license found in the
7// LICENSE file in the root directory of this source tree.
8
9#include <stdbool.h>
10#include <stddef.h>
11#include <stdint.h>
Marat Dukhan04f03be2019-11-19 12:36:47 -080012#include <string.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070013
14#include <pthread.h>
15
Marat Dukhand343c222019-10-07 09:22:14 -070016#ifndef __EMSCRIPTEN__
17 #include <cpuinfo.h>
18#endif
XNNPACK Teamb455b122019-09-27 18:10:33 -070019
20#include <xnnpack.h>
21#include <xnnpack/argmaxpool.h>
22#include <xnnpack/avgpool.h>
Marat Dukhan69722492019-11-11 19:55:50 -080023#include <xnnpack/bilinear.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070024#include <xnnpack/clamp.h>
Marat Dukhan1dadbf72019-10-01 10:46:20 -070025#include <xnnpack/common.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070026#include <xnnpack/conv.h>
27#include <xnnpack/dwconv.h>
28#include <xnnpack/gavgpool.h>
29#include <xnnpack/gemm.h>
30#include <xnnpack/hswish.h>
31#include <xnnpack/igemm.h>
32#include <xnnpack/log.h>
33#include <xnnpack/lut.h>
34#include <xnnpack/maxpool.h>
Marat Dukhan04f03be2019-11-19 12:36:47 -080035#include <xnnpack/memory.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070036#include <xnnpack/pad.h>
37#include <xnnpack/params.h>
38#include <xnnpack/pavgpool.h>
39#include <xnnpack/prelu.h>
40#include <xnnpack/rmax.h>
41#include <xnnpack/spmm.h>
42#include <xnnpack/unpool.h>
43#include <xnnpack/vadd.h>
Marat Dukhan1e782c42019-11-21 17:02:40 -080044#include <xnnpack/vbinary.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070045#include <xnnpack/vmulcaddc.h>
Marat Dukhan1e782c42019-11-21 17:02:40 -080046#include <xnnpack/vunary.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070047#include <xnnpack/zip.h>
48
49#ifndef XNN_ENABLE_ASSEMBLY
50 #define XNN_ENABLE_ASSEMBLY 1
51#endif
52
53static pthread_once_t init_guard = PTHREAD_ONCE_INIT;
54
55struct xnn_parameters xnn_params = {
56 .initialized = false
57};
58
Marat Dukhan1dadbf72019-10-01 10:46:20 -070059#if XNN_ARCH_PNACL || XNN_ARCH_ASMJS || XNN_ARCH_WASM || XNN_ARCH_WASMSIMD
XNNPACK Teamb455b122019-09-27 18:10:33 -070060 extern uint32_t xnn_stub_wasm_f32_sub(uint32_t a, uint32_t b);
61#endif
Marat Dukhan1dadbf72019-10-01 10:46:20 -070062#if XNN_ARCH_PNACL || XNN_ARCH_WASM || XNN_ARCH_WASMSIMD
XNNPACK Teamb455b122019-09-27 18:10:33 -070063 extern uint32_t xnn_stub_wasm_f32_min(uint32_t a, uint32_t b);
64#endif
65
66static void init(void) {
Marat Dukhan1dadbf72019-10-01 10:46:20 -070067#if XNN_ARCH_ARM
XNNPACK Teamb455b122019-09-27 18:10:33 -070068 if (!cpuinfo_has_arm_neon()) {
69 xnn_log_error("XNNPACK initialization failed: NEON is not supported");
70 return;
71 }
72
73 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -070074 #ifndef XNN_NO_Q8_OPERATORS
75 xnn_params.q8.gemm = (struct gemm_parameters) {
76 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_4x8__neon,
77 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_4x8__neon,
78 .mr = 4,
79 .nr = 8,
80 };
XNNPACK Teamb455b122019-09-27 18:10:33 -070081
Marat Dukhan8fe54e42019-10-10 14:12:59 -070082 #if XNN_ENABLE_ASSEMBLY
83 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
84 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__aarch32_neon,
85 .cr = 8,
86 .mr = 9,
87 };
88 #else
89 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
90 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__neon,
91 .cr = 8,
92 .mr = 9,
93 };
94 #endif
95 xnn_params.q8.avgpool = (struct avgpool_parameters) {
96 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__neon,
97 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__neon,
98 .mr = 9,
99 .qr = 8,
100 };
101 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
102 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__neon,
103 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__neon,
104 .mr = 7,
105 };
106 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__neon;
107 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700108
109 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700110 #ifndef XNN_NO_U8_OPERATORS
111 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800112 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__neon_c16,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700113 .mr = 9,
114 .qr = 8,
115 };
116 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__neon;
117 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__neon;
118 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
119 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700120
121 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700122 #ifndef XNN_NO_X8_OPERATORS
123 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
124 xnn_params.x8.zip = (struct zip_parameters) {
125 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__neon,
126 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__neon,
127 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__neon,
128 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__neon,
129 };
130 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700131
132 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700133 #ifndef XNN_NO_F32_OPERATORS
Frank Barchard32670922019-11-30 21:58:51 -0800134 #if XNN_ENABLE_ASSEMBLY
Frank Barchardf9a34842019-12-12 11:17:50 -0800135 switch (cpuinfo_get_core(0)->uarch) {
136 case cpuinfo_uarch_cortex_a53:
137 case cpuinfo_uarch_cortex_a55:
138 xnn_params.f32.gemm = (struct gemm_parameters) {
139 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch32_neon_cortex_a53,
140 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
141 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
142 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
143 .mr = 4,
144 .nr = 8,
145 };
146 break;
Frank Barchard4d281a52019-12-12 15:49:41 -0800147
148 case cpuinfo_uarch_cortex_a57:
149 case cpuinfo_uarch_cortex_a72:
150 case cpuinfo_uarch_cortex_a73:
151 xnn_params.f32.gemm = (struct gemm_parameters) {
152 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch32_neon_pld_cortex_a75,
153 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
154 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
155 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
156 .mr = 4,
157 .nr = 8,
158 };
159 break;
160
Frank Barchardf9a34842019-12-12 11:17:50 -0800161 default:
162 xnn_params.f32.gemm = (struct gemm_parameters) {
163 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch32_neon_cortex_a75,
164 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
165 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
166 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
167 .mr = 4,
168 .nr = 8,
169 };
170 break;
171 }
Frank Barchard32670922019-11-30 21:58:51 -0800172 #else // XNN_ENABLE_ASSEMBLY
173 xnn_params.f32.gemm = (struct gemm_parameters) {
174 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__neon_lane_ld128,
175 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
176 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
177 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
178 .mr = 4,
179 .nr = 8,
180 };
181 #endif // XNN_ENABLE_ASSEMBLY
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700182 xnn_params.f32.gemm2 = (struct gemm_parameters) {
183 .gemm = NULL,
Frank Barchard91317c52019-11-22 10:54:35 -0800184 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2__neon_lane_ld64,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700185 .mr = 4,
186 .nr = 2,
187 };
188 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
189 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x4__psimd,
190 .cr = 4,
191 .mr = 4,
192 };
193 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
194 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__neon,
195 .cr = 4,
196 .mr = 9,
197 };
198 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
199 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x25__psimd,
200 .cr = 4,
201 .mr = 25,
202 };
203 xnn_params.f32.avgpool = (struct avgpool_parameters) {
204 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__neon,
205 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__neon,
206 .mr = 9,
207 .qr = 8,
208 };
209 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
210 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__neon,
211 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__neon,
212 .mr = 9,
213 .qr = 8,
214 };
215 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
216 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__neon,
217 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__neon,
218 .mr = 7,
219 };
220 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800221 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700222 .mr = 9,
223 .qr = 8,
224 };
225 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800226 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700227 .mr = 4,
228 };
229 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800230 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700231 .mr = 9,
232 };
233 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800234 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700235 .mr = 9,
236 .qr = 8,
237 };
Marat Dukhan69722492019-11-11 19:55:50 -0800238 xnn_params.f32.bilinear = (struct bilinear_parameters) {
239 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__neon_c8,
240 .pixel_tile = 1,
241 .channel_tile = 8,
242 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700243 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__neon;
Marat Dukhan662faa02019-12-09 22:48:16 -0800244 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__neon_x8;
Marat Dukhan4a24a582020-01-06 13:30:00 -0800245 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__neon_rr2_lut64_p2_nr2recps_x8;
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700246 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -0800247 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__neon_2x8,
248 .row_tile = 2,
249 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700250 };
Marat Dukhanb1a0fc32019-12-02 19:32:02 -0800251 xnn_params.f32.vadd = (struct vbinary_parameters) {
252 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__neon_x8,
253 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
254 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
255 .element_tile = 8,
256 };
Marat Dukhan69180502019-12-06 15:00:31 -0800257 xnn_params.f32.vdiv = (struct vbinary_parameters) {
258 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__scalar_x2,
259 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__scalar_x2,
260 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__scalar_x2,
261 .element_tile = 2,
262 };
Marat Dukhan79e7f842019-12-05 14:35:50 -0800263 xnn_params.f32.vmax = (struct vbinary_parameters) {
264 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__neon_x8,
265 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
266 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
267 .element_tile = 8,
268 };
269 xnn_params.f32.vmin = (struct vbinary_parameters) {
270 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__neon_x8,
271 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
272 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
273 .element_tile = 8,
274 };
Marat Dukhan1e782c42019-11-21 17:02:40 -0800275 xnn_params.f32.vmul = (struct vbinary_parameters) {
276 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__neon_x8,
277 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
278 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
Marat Dukhanca2733c2019-11-15 23:21:17 -0800279 .element_tile = 8,
280 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -0800281 xnn_params.f32.vsub = (struct vbinary_parameters) {
282 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__neon_x8,
283 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__neon_x8,
284 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__neon_x8,
285 .element_tile = 8,
286 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700287 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -0800288 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__neon_2x,
289 .channel_tile = 4,
290 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700291 };
292 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700293
294 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700295 #ifndef XNN_NO_X32_OPERATORS
296 xnn_params.x32.pad = (struct pad_parameters) {
297 .ukernel = xnn_x32_pad_x2__neon,
298 .mr = 2,
299 };
300 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
301 xnn_params.x32.zip = (struct zip_parameters) {
302 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__neon,
303 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__neon,
304 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__neon,
305 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__neon,
306 };
307 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700308
Marat Dukhan1dadbf72019-10-01 10:46:20 -0700309#elif XNN_ARCH_ARM64
XNNPACK Teamb455b122019-09-27 18:10:33 -0700310
311 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700312 #ifndef XNN_NO_Q8_OPERATORS
313 xnn_params.q8.gemm = (struct gemm_parameters) {
314 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_8x8__neon,
315 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_8x8__neon,
316 .mr = 8,
317 .nr = 8,
318 };
319 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
320 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__neon,
321 .cr = 8,
322 .mr = 9,
323 };
324 xnn_params.q8.avgpool = (struct avgpool_parameters) {
325 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__neon,
326 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__neon,
327 .mr = 9,
328 .qr = 8,
329 };
330 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
331 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__neon,
332 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__neon,
333 .mr = 7,
334 };
335 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__neon;
336 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700337
338 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700339 #ifndef XNN_NO_U8_OPERATORS
340 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800341 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__neon_c16,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700342 .mr = 9,
343 .qr = 8,
344 };
345 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__neon;
346 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
347 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__neon;
348 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700349
350 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700351 #ifndef XNN_NO_X8_OPERATORS
352 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
353 xnn_params.x8.zip = (struct zip_parameters) {
354 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__neon,
355 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__neon,
356 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__neon,
357 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__neon,
358 };
359 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700360
361 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700362 #ifndef XNN_NO_F32_OPERATORS
363 #if XNN_ENABLE_ASSEMBLY
364 switch (cpuinfo_get_core(0)->uarch) {
365 case cpuinfo_uarch_kryo:
366 xnn_params.f32.gemm = (struct gemm_parameters) {
367 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a57,
368 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
369 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
370 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
371 .mr = 4,
372 .nr = 8,
373 };
374 break;
375 case cpuinfo_uarch_cortex_a57:
376 xnn_params.f32.gemm = (struct gemm_parameters) {
377 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a57,
378 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a57,
379 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a57,
380 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a57,
381 .mr = 6,
382 .nr = 8,
383 };
384 break;
385 case cpuinfo_uarch_cortex_a72:
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700386 xnn_params.f32.gemm = (struct gemm_parameters) {
387 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
388 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
389 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
390 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
391 .mr = 4,
392 .nr = 8,
393 };
394 break;
395 case cpuinfo_uarch_cortex_a75:
Frank Barchard263bb092019-10-28 15:28:46 -0700396 case cpuinfo_uarch_cortex_a76:
Marat Dukhan1f5d9bc2020-01-02 09:11:16 -0800397 case cpuinfo_uarch_exynos_m3:
398 case cpuinfo_uarch_exynos_m4:
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700399 xnn_params.f32.gemm = (struct gemm_parameters) {
400 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a75,
401 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a75,
402 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
403 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
404 .mr = 6,
405 .nr = 8,
406 };
407 break;
Frank Barcharddf06d802019-11-20 15:53:46 -0800408
Marat Dukhan1f5d9bc2020-01-02 09:11:16 -0800409 case cpuinfo_uarch_exynos_m1:
410 case cpuinfo_uarch_exynos_m2:
Frank Barcharddf06d802019-11-20 15:53:46 -0800411 xnn_params.f32.gemm = (struct gemm_parameters) {
412 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8s4__neonfma,
413 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8s4__neonfma,
414 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8s4__neonfma,
415 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8s4__neonfma,
416 .mr = 6,
417 .nr = 8,
418 .log2_sr = 2,
419 };
420 break;
421
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700422 case cpuinfo_uarch_cortex_a53:
423 case cpuinfo_uarch_cortex_a55:
424 xnn_params.f32.gemm = (struct gemm_parameters) {
Frank Barchardbd1d5d92019-10-30 15:53:30 -0700425 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a53,
426 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a53,
427 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a53,
428 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a53,
429 .mr = 6,
430 .nr = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700431 };
432 break;
433 case cpuinfo_uarch_cortex_a73:
434 xnn_params.f32.gemm = (struct gemm_parameters) {
435 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a73,
436 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a73,
437 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
438 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
439 .mr = 6,
440 .nr = 8,
441 };
442 break;
443 default:
444 xnn_params.f32.gemm = (struct gemm_parameters) {
Frank Barchard5cc1cc22019-12-16 15:36:12 -0800445 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a57,
446 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700447 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
448 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
Frank Barchard5cc1cc22019-12-16 15:36:12 -0800449 .mr = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700450 .nr = 8,
451 };
452 break;
453 }
454 #else // XNN_ENABLE_ASSEMBLY
XNNPACK Teamb455b122019-09-27 18:10:33 -0700455 xnn_params.f32.gemm = (struct gemm_parameters) {
Frank Barchard91317c52019-11-22 10:54:35 -0800456 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__neonfma_lane_ld64,
457 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__neonfma_lane_ld64,
458 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neonfma_lane_ld64,
459 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neonfma_lane_ld64,
Frank Barchard2af471b2019-10-16 19:10:32 -0700460 .mr = 6,
XNNPACK Teamb455b122019-09-27 18:10:33 -0700461 .nr = 8,
462 };
Frank Barchard32670922019-11-30 21:58:51 -0800463 #endif // XNN_ENABLE_ASSEMBLY
XNNPACK Teamb455b122019-09-27 18:10:33 -0700464
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700465 xnn_params.f32.gemm2 = (struct gemm_parameters) {
466 .gemm = NULL,
Frank Barchard91317c52019-11-22 10:54:35 -0800467 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700468 .mr = 4,
469 .nr = 2,
470 };
471 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
472 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x4__psimd,
473 .cr = 4,
474 .mr = 4,
475 };
476 switch (cpuinfo_get_core(0)->uarch) {
477 case cpuinfo_uarch_kryo:
478 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
479 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__neonfma,
480 .cr = 4,
481 .mr = 9,
482 };
483 break;
XNNPACK Teamb455b122019-09-27 18:10:33 -0700484#if XNN_ENABLE_ASSEMBLY
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700485 case cpuinfo_uarch_cortex_a53:
486 case cpuinfo_uarch_cortex_a55:
487 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
488 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__aarch64_neonfma_cortex_a55,
489 .cr = 4,
490 .mr = 9,
491 };
492 break;
XNNPACK Teamb455b122019-09-27 18:10:33 -0700493#endif
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700494 default:
495 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
496 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x9__neonfma,
497 .cr = 8,
498 .mr = 9,
499 };
500 break;
501 }
502 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
503 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x25__psimd,
504 .cr = 4,
505 .mr = 25,
506 };
507 xnn_params.f32.avgpool = (struct avgpool_parameters) {
508 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__neon,
509 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__neon,
510 .mr = 9,
511 .qr = 8,
512 };
513 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
514 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__neon,
515 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__neon,
516 .mr = 9,
517 .qr = 8,
518 };
519 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
520 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__neon,
521 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__neon,
522 .mr = 7,
523 };
524 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800525 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700526 .mr = 9,
527 .qr = 8,
528 };
529 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800530 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700531 .mr = 4,
532 };
533 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800534 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700535 .mr = 9,
536 };
537 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800538 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700539 .mr = 9,
540 .qr = 8,
541 };
Marat Dukhan69722492019-11-11 19:55:50 -0800542 xnn_params.f32.bilinear = (struct bilinear_parameters) {
543 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__neonfma_c8,
544 .pixel_tile = 1,
545 .channel_tile = 8,
546 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700547 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__neon;
Marat Dukhan662faa02019-12-09 22:48:16 -0800548 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__neonfma_x8;
Marat Dukhan4a24a582020-01-06 13:30:00 -0800549 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__neonfma_rr1_lut64_p2_nr2recps_x16;
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700550 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -0800551 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__neon_2x8,
552 .row_tile = 2,
553 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700554 };
Marat Dukhanb1a0fc32019-12-02 19:32:02 -0800555 xnn_params.f32.vadd = (struct vbinary_parameters) {
556 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__neon_x8,
557 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
558 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
559 .element_tile = 8,
560 };
Marat Dukhan69180502019-12-06 15:00:31 -0800561 xnn_params.f32.vdiv = (struct vbinary_parameters) {
562 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__neon_x8,
563 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__neon_x8,
564 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__neon_x8,
565 .element_tile = 8,
566 };
Marat Dukhan79e7f842019-12-05 14:35:50 -0800567 xnn_params.f32.vmax = (struct vbinary_parameters) {
568 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__neon_x8,
569 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
570 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
571 .element_tile = 8,
572 };
573 xnn_params.f32.vmin = (struct vbinary_parameters) {
574 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__neon_x8,
575 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
576 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
577 .element_tile = 8,
578 };
Marat Dukhan1e782c42019-11-21 17:02:40 -0800579 xnn_params.f32.vmul = (struct vbinary_parameters) {
580 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__neon_x8,
581 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
582 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
Marat Dukhanca2733c2019-11-15 23:21:17 -0800583 .element_tile = 8,
584 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -0800585 xnn_params.f32.vsub = (struct vbinary_parameters) {
586 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__neon_x8,
587 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__neon_x8,
588 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__neon_x8,
589 .element_tile = 8,
590 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700591 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -0800592 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__neonfma_2x,
593 .channel_tile = 4,
594 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700595 };
Marat Dukhanefc47b82019-11-18 09:25:38 -0800596 #ifndef XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700597 xnn_params.f32.spmm = (struct spmm_parameters) {
Erich Elsen9cdade32019-10-16 05:26:59 -0700598 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_16x1__neonfma_pipelined,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700599 .mr = 16,
600 .nr = 1,
XNNPACK Teamb455b122019-09-27 18:10:33 -0700601 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700602 xnn_params.f32.spmm2 = (struct spmm_parameters) {
603 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_16x2__neonfma,
604 .mr = 16,
605 .nr = 2,
606 };
607 xnn_params.f32.spmm4 = (struct spmm_parameters) {
608 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_16x4__neonfma,
609 .mr = 16,
610 .nr = 4,
611 };
612 xnn_params.f32.hwc2spchw_dconv3x3c3s2 = (struct hwc2spchw_dconv_parameters) {
613 .ukernel_with_symm_padding =
614 (xnn_conv_hwc2spchw_ukernel_function) xnn_f32_conv_hwc2spchw_ukernel_3x3s2p1c3x4__neonfma_2x2,
615 .output_channel_tile = 4,
616 .output_height_tile = 2,
617 .output_width_tile = 2,
618 };
619 xnn_params.f32.spchw_dwconv3x3 = (struct spchw_dwconv_parameters) {
620 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3p1__neonfma,
621 .input_width_tile = 4,
622 .output_width_tile = 4,
623 .output_height_tile = 3,
624 };
625 xnn_params.f32.spchw_dwconv3x3s2 = (struct spchw_dwconv_parameters) {
626 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3s2p1__neonfma,
627 .input_width_tile = 4,
628 .output_width_tile = 4,
629 .output_height_tile = 1,
630 };
Marat Dukhana99918a2019-11-15 14:40:12 -0800631 xnn_params.f32.spchw_dwconv5x5 = (struct spchw_dwconv_parameters) {
632 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5p2__neonfma,
633 .input_width_tile = 4,
634 .output_width_tile = 4,
Erich Elsen4ad51152019-11-19 13:11:53 -0800635 .output_height_tile = 3,
Marat Dukhana99918a2019-11-15 14:40:12 -0800636 };
637 xnn_params.f32.spchw_dwconv5x5s2 = (struct spchw_dwconv_parameters) {
638 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5s2p2__neonfma,
639 .input_width_tile = 4,
640 .output_width_tile = 4,
641 .output_height_tile = 1,
642 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700643 xnn_params.f32.spchw_gavgpool = (struct spchw_gavgpool_parameters) {
644 .ukernel = (xnn_gavgpool_spchw_ukernel_function) xnn_f32_gavgpool_spchw_ukernel__neon_x4,
645 .channel_tile = 4,
646 };
Marat Dukhanefc47b82019-11-18 09:25:38 -0800647 #endif // XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700648 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700649
650 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700651 #ifndef XNN_NO_X32_OPERATORS
652 xnn_params.x32.pad = (struct pad_parameters) {
653 .ukernel = xnn_x32_pad_x2__neon,
654 .mr = 2,
655 };
656 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
657 xnn_params.x32.zip = (struct zip_parameters) {
658 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__neon,
659 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__neon,
660 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__neon,
661 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__neon,
662 };
663 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700664
Marat Dukhan1dadbf72019-10-01 10:46:20 -0700665#elif XNN_ARCH_X86 || XNN_ARCH_X86_64
XNNPACK Teamb455b122019-09-27 18:10:33 -0700666 if (!cpuinfo_has_x86_sse2()) {
667 xnn_log_error("XNNPACK initialization failed: SSE2 is not supported");
668 return;
669 }
670
671 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700672 #ifndef XNN_NO_Q8_OPERATORS
673 xnn_params.q8.gemm = (struct gemm_parameters) {
674 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_4x4c2__sse2,
675 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_4x4c2__sse2,
676 .mr = 4,
677 .nr = 4,
678 .log2_kr = 1,
679 };
680 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
681 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__sse2,
682 .cr = 8,
683 .mr = 9,
684 };
685 xnn_params.q8.avgpool = (struct avgpool_parameters) {
686 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__sse2,
687 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__sse2,
688 .mr = 9,
689 .qr = 8,
690 };
691 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
692 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__sse2,
693 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__sse2,
694 .mr = 7,
695 };
696 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__sse2;
697 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700698
699 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700700 #ifndef XNN_NO_U8_OPERATORS
701 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800702 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__sse2_c16,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700703 .mr = 9,
704 .qr = 8,
705 };
706 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__sse2;
707 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
708 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__sse2;
709 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700710
711 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700712 #ifndef XNN_NO_X8_OPERATORS
713 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
714 xnn_params.x8.zip = (struct zip_parameters) {
715 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__sse2,
716 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__sse2,
717 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__sse2,
718 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__sse2,
719 };
720 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700721
722 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700723 #ifndef XNN_NO_F32_OPERATORS
Marat Dukhan0f349c42019-11-27 11:58:54 -0800724 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
725 xnn_params.f32.gemm = (struct gemm_parameters) {
726 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_7x16__avx512f_broadcast,
727 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_7x16__avx512f_broadcast,
728 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16__avx512f_broadcast,
729 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16__avx512f_broadcast,
730 .mr = 7,
731 .nr = 16,
732 };
733 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_fma3()) {
Marat Dukhan27121322019-12-09 14:57:40 -0800734 switch (cpuinfo_get_core(0)->uarch) {
735 case cpuinfo_uarch_zen:
736 xnn_params.f32.gemm = (struct gemm_parameters) {
737 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x16s4__fma3_broadcast,
738 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x16s4__fma3_broadcast,
739 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16s4__fma3_broadcast,
740 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16s4__fma3_broadcast,
741 .mr = 4,
742 .nr = 16,
743 .log2_sr = 2,
744 };
745 break;
746 default:
747 xnn_params.f32.gemm = (struct gemm_parameters) {
748 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_5x16__fma3_broadcast,
749 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_5x16__fma3_broadcast,
750 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16__fma3_broadcast,
751 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16__fma3_broadcast,
752 .mr = 5,
753 .nr = 16,
754 };
755 break;
756 }
Marat Dukhan1025ea32019-11-21 16:01:08 -0800757 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
758 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhaneccfd712019-12-08 16:49:27 -0800759 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_5x16__avx_broadcast,
760 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_5x16__avx_broadcast,
761 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16__avx_broadcast,
762 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16__avx_broadcast,
763 .mr = 5,
764 .nr = 16,
Marat Dukhan1025ea32019-11-21 16:01:08 -0800765 };
766 } else {
767 xnn_params.f32.gemm = (struct gemm_parameters) {
768 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__sse_load1,
769 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__sse_load1,
770 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__sse_load1,
771 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__sse_load1,
772 .mr = 4,
773 .nr = 8,
774 };
775 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700776 xnn_params.f32.gemm2 = (struct gemm_parameters) {
777 .gemm = NULL,
778 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2c4__sse,
779 .mr = 4,
780 .nr = 2,
781 .log2_kr = 2,
782 };
Marat Dukhan479f87e2019-11-27 15:17:06 -0800783 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
784 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
785 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x4__avx512f,
786 .cr = 16,
787 .mr = 4,
788 };
789 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
790 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x9__avx512f,
791 .cr = 16,
792 .mr = 9,
793 };
794 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
795 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x25__avx512f,
796 .cr = 16,
797 .mr = 25,
798 };
799 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_fma3()) {
Marat Dukhan17ec5f32019-11-22 13:34:16 -0800800 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
801 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x4__fma3,
802 .cr = 16,
803 .mr = 4,
804 };
805 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
806 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x9__fma3,
807 .cr = 16,
808 .mr = 9,
809 };
810 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
811 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x25__fma3,
812 .cr = 8,
813 .mr = 25,
814 };
815 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
816 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
817 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x4__avx,
818 .cr = 16,
819 .mr = 4,
820 };
821 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
822 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x9__avx,
823 .cr = 16,
824 .mr = 9,
825 };
826 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
827 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x25__avx,
828 .cr = 8,
829 .mr = 25,
830 };
831 } else {
832 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
833 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x4__sse,
834 .cr = 8,
835 .mr = 4,
836 };
837 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
838 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x9__sse,
839 .cr = 8,
840 .mr = 9,
841 };
842 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
843 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x25__sse,
844 .cr = 8,
845 .mr = 25,
846 };
847 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700848 xnn_params.f32.avgpool = (struct avgpool_parameters) {
849 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__sse,
850 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__sse,
851 .mr = 9,
852 .qr = 8,
853 };
854 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
855 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__sse,
856 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__sse,
857 .mr = 9,
858 .qr = 8,
859 };
860 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
861 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__sse,
862 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__sse,
863 .mr = 7,
864 };
865 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800866 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__sse_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700867 .mr = 9,
868 .qr = 8,
869 };
870 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800871 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__sse2_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700872 .mr = 4,
873 };
874 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800875 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__sse2_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700876 .mr = 9,
877 };
878 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800879 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__sse2_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700880 .mr = 9,
881 .qr = 8,
882 };
Marat Dukhan69722492019-11-11 19:55:50 -0800883 xnn_params.f32.bilinear = (struct bilinear_parameters) {
884 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__sse_c8,
885 .pixel_tile = 1,
886 .channel_tile = 8,
887 };
Marat Dukhane2c3f292019-11-27 15:40:54 -0800888 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
889 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__avx512f;
890 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
891 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__avx;
892 } else {
893 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__sse;
894 }
Marat Dukhan662faa02019-12-09 22:48:16 -0800895 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
896 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__avx512f_x32;
897 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_fma3()) {
898 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__fma3_x16;
899 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
900 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__avx_x16;
901 } else {
902 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__sse_x8;
903 }
Marat Dukhan7bee7512019-11-18 15:15:48 -0800904 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__sse2_p5_div_x16;
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700905 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -0800906 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__sse2_2x8,
907 .row_tile = 2,
908 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700909 };
Marat Dukhan9a88efe2019-12-10 15:54:24 -0800910 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
911 xnn_params.f32.vadd = (struct vbinary_parameters) {
912 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__avx512f_x32,
913 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx512f_x32,
914 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx512f_x32,
915 .element_tile = 32,
916 };
917 xnn_params.f32.vdiv = (struct vbinary_parameters) {
918 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__avx512f_x32,
919 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__avx512f_x32,
920 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__avx512f_x32,
921 .element_tile = 32,
922 };
923 xnn_params.f32.vmax = (struct vbinary_parameters) {
924 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__avx512f_x32,
925 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx512f_x32,
926 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx512f_x32,
927 .element_tile = 32,
928 };
929 xnn_params.f32.vmin = (struct vbinary_parameters) {
930 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__avx512f_x32,
931 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx512f_x32,
932 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx512f_x32,
933 .element_tile = 32,
934 };
935 xnn_params.f32.vmul = (struct vbinary_parameters) {
936 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__avx512f_x32,
937 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx512f_x32,
938 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx512f_x32,
939 .element_tile = 32,
940 };
941 xnn_params.f32.vsub = (struct vbinary_parameters) {
942 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__avx512f_x32,
943 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__avx512f_x32,
944 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__avx512f_x32,
945 .element_tile = 32,
946 };
947 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
948 xnn_params.f32.vadd = (struct vbinary_parameters) {
949 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__avx_x16,
950 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx_x16,
951 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx_x16,
952 .element_tile = 16,
953 };
954 xnn_params.f32.vdiv = (struct vbinary_parameters) {
955 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__avx_x16,
956 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__avx_x16,
957 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__avx_x16,
958 .element_tile = 16,
959 };
960 xnn_params.f32.vmax = (struct vbinary_parameters) {
961 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__avx_x16,
962 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx_x16,
963 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx_x16,
964 .element_tile = 16,
965 };
966 xnn_params.f32.vmin = (struct vbinary_parameters) {
967 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__avx_x16,
968 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx_x16,
969 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx_x16,
970 .element_tile = 16,
971 };
972 xnn_params.f32.vmul = (struct vbinary_parameters) {
973 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__avx_x16,
974 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx_x16,
975 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx_x16,
976 .element_tile = 16,
977 };
978 xnn_params.f32.vsub = (struct vbinary_parameters) {
979 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__avx_x16,
980 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__avx_x16,
981 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__avx_x16,
982 .element_tile = 16,
983 };
984 } else {
985 xnn_params.f32.vadd = (struct vbinary_parameters) {
986 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__sse_x8,
987 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__sse_x8,
988 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__sse_x8,
989 .element_tile = 8,
990 };
991 xnn_params.f32.vdiv = (struct vbinary_parameters) {
992 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__sse_x8,
993 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__sse_x8,
994 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__sse_x8,
995 .element_tile = 8,
996 };
997 xnn_params.f32.vmax = (struct vbinary_parameters) {
998 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__sse_x8,
999 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__sse_x8,
1000 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__sse_x8,
1001 .element_tile = 8,
1002 };
1003 xnn_params.f32.vmin = (struct vbinary_parameters) {
1004 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__sse_x8,
1005 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__sse_x8,
1006 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__sse_x8,
1007 .element_tile = 8,
1008 };
1009 xnn_params.f32.vmul = (struct vbinary_parameters) {
1010 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__sse_x8,
1011 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__sse_x8,
1012 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__sse_x8,
1013 .element_tile = 8,
1014 };
1015 xnn_params.f32.vsub = (struct vbinary_parameters) {
1016 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__sse_x8,
1017 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__sse_x8,
1018 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__sse_x8,
1019 .element_tile = 8,
1020 };
1021 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001022 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -08001023 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__sse_2x,
1024 .channel_tile = 4,
1025 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001026 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001027 #ifndef XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001028 xnn_params.f32.spmm = (struct spmm_parameters) {
1029 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_4x1__sse,
1030 .mr = 4,
1031 .nr = 1,
1032 };
1033 xnn_params.f32.spchw_dwconv3x3 = (struct spchw_dwconv_parameters) {
1034 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3p1__sse,
1035 .input_width_tile = 4,
1036 .output_width_tile = 4,
1037 .output_height_tile = 1,
1038 };
1039 xnn_params.f32.spchw_dwconv3x3s2 = (struct spchw_dwconv_parameters) {
1040 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3s2p1__sse,
1041 .input_width_tile = 4,
1042 .output_width_tile = 4,
1043 .output_height_tile = 1,
1044 };
1045 xnn_params.f32.spchw_gavgpool = (struct spchw_gavgpool_parameters) {
1046 .ukernel = (xnn_gavgpool_spchw_ukernel_function) xnn_f32_gavgpool_spchw_ukernel__sse_x4,
1047 .channel_tile = 4,
1048 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001049 #endif // XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001050 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001051
1052 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001053 #ifndef XNN_NO_X32_OPERATORS
1054 xnn_params.x32.pad = (struct pad_parameters) {
1055 .ukernel = xnn_x32_pad_x2__sse2,
1056 .mr = 2,
1057 };
1058 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
1059 xnn_params.x32.zip = (struct zip_parameters) {
1060 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__sse2,
1061 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__sse2,
1062 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__sse2,
1063 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__sse2,
1064 };
1065 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001066
Marat Dukhan1dadbf72019-10-01 10:46:20 -07001067#elif XNN_ARCH_PNACL || XNN_ARCH_WASMSIMD
Marat Dukhan466b5232019-10-09 11:22:20 -07001068 // Unlike most other architectures, on x86/x86-64 when floating-point instructions
1069 // have no NaN arguments, but produce NaN output, the output NaN has sign bit set.
1070 // We use it to distinguish x86/x86-64 from other architectures, by doing subtraction
1071 // of two infinities (must produce NaN per IEEE 754 standard).
1072 static volatile uint32_t minus_inf = UINT32_C(0xFF800000);
1073 const bool is_wasm_x86 = (int32_t) xnn_stub_wasm_f32_sub(minus_inf, minus_inf) < 0;
1074
XNNPACK Teamb455b122019-09-27 18:10:33 -07001075 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001076 #ifndef XNN_NO_Q8_OPERATORS
1077 xnn_params.q8.gemm = (struct gemm_parameters) {
1078 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_2x2__scalar,
1079 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_2x2__scalar,
1080 .mr = 2,
1081 .nr = 2,
1082 };
1083 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
1084 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up1x9__scalar,
1085 .cr = 1,
1086 .mr = 9,
1087 };
1088 xnn_params.q8.avgpool = (struct avgpool_parameters) {
1089 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__scalar,
1090 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__scalar,
1091 .mr = 9,
1092 .qr = 8,
1093 };
1094 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
1095 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__scalar,
1096 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__scalar,
1097 .mr = 7,
1098 };
1099 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__scalar;
1100 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001101
1102 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001103 #ifndef XNN_NO_U8_OPERATORS
1104 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001105 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001106 .mr = 9,
1107 .qr = 8,
1108 };
1109 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__scalar;
1110 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
1111 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__scalar;
1112 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001113
1114 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001115 #ifndef XNN_NO_X8_OPERATORS
1116 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
1117 xnn_params.x8.zip = (struct zip_parameters) {
1118 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__scalar,
1119 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__scalar,
1120 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__scalar,
1121 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__scalar,
1122 };
1123 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001124
1125 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001126 #ifndef XNN_NO_F32_OPERATORS
1127 if (is_wasm_x86) {
1128 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhancb801972019-10-23 02:10:33 -07001129 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__psimd_splat,
1130 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__psimd_splat,
1131 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__psimd_splat,
1132 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__psimd_splat,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001133 .mr = 4,
1134 .nr = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001135 };
1136 } else {
1137 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhancd945c62019-10-25 11:59:50 -07001138 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8s4__psimd,
1139 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8s4__psimd,
1140 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_igemm_ukernel_1x8s4__psimd,
1141 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8s4__psimd,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001142 .mr = 6,
1143 .nr = 8,
Marat Dukhancd945c62019-10-25 11:59:50 -07001144 .log2_sr = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001145 };
1146 }
1147 xnn_params.f32.gemm2 = (struct gemm_parameters) {
1148 .gemm = NULL,
1149 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2c4__psimd,
Marat Dukhan466b5232019-10-09 11:22:20 -07001150 .mr = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001151 .nr = 2,
1152 .log2_kr = 2,
Marat Dukhan466b5232019-10-09 11:22:20 -07001153 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001154 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
Marat Dukhan5098c3e2019-11-07 12:01:19 -08001155 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x4__psimd_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001156 .cr = 4,
1157 .mr = 4,
Marat Dukhan466b5232019-10-09 11:22:20 -07001158 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001159 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
Marat Dukhan5098c3e2019-11-07 12:01:19 -08001160 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__psimd_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001161 .cr = 4,
1162 .mr = 9,
1163 };
1164 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
Marat Dukhan5098c3e2019-11-07 12:01:19 -08001165 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x25__psimd_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001166 .cr = 4,
1167 .mr = 25,
1168 };
1169 xnn_params.f32.avgpool = (struct avgpool_parameters) {
1170 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__psimd,
1171 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__psimd,
1172 .mr = 9,
1173 .qr = 8,
1174 };
1175 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
1176 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__psimd,
1177 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__psimd,
1178 .mr = 9,
1179 .qr = 8,
1180 };
1181 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
1182 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__psimd,
1183 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__psimd,
1184 .mr = 7,
1185 };
1186 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001187 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001188 .mr = 9,
1189 .qr = 8,
1190 };
1191 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001192 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001193 .mr = 4,
1194 };
1195 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001196 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001197 .mr = 9,
1198 };
1199 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001200 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001201 .mr = 9,
1202 .qr = 8,
1203 };
Marat Dukhan69722492019-11-11 19:55:50 -08001204 xnn_params.f32.bilinear = (struct bilinear_parameters) {
1205 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__psimd_c8,
1206 .pixel_tile = 1,
1207 .channel_tile = 8,
1208 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001209 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__psimd;
Marat Dukhan662faa02019-12-09 22:48:16 -08001210 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__psimd_x8;
Marat Dukhan8d3c07e2020-01-02 01:20:59 -08001211 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__psimd_p5_div_x16;
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001212 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -08001213 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__psimd_2x8,
1214 .row_tile = 2,
1215 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001216 };
Marat Dukhanb1a0fc32019-12-02 19:32:02 -08001217 xnn_params.f32.vadd = (struct vbinary_parameters) {
1218 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__psimd_x8,
1219 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__psimd_x8,
1220 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__psimd_x8,
1221 .element_tile = 8,
1222 };
Marat Dukhan69180502019-12-06 15:00:31 -08001223 xnn_params.f32.vdiv = (struct vbinary_parameters) {
1224 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__psimd_x4,
1225 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__psimd_x4,
1226 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__psimd_x4,
1227 .element_tile = 4,
1228 };
Marat Dukhan79e7f842019-12-05 14:35:50 -08001229 xnn_params.f32.vmax = (struct vbinary_parameters) {
1230 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__psimd_x8,
1231 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__psimd_x8,
1232 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__psimd_x8,
1233 .element_tile = 8,
1234 };
1235 xnn_params.f32.vmin = (struct vbinary_parameters) {
1236 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__psimd_x8,
1237 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__psimd_x8,
1238 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__psimd_x8,
1239 .element_tile = 8,
1240 };
Marat Dukhan1e782c42019-11-21 17:02:40 -08001241 xnn_params.f32.vmul = (struct vbinary_parameters) {
1242 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__psimd_x8,
1243 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__psimd_x8,
1244 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__psimd_x8,
Marat Dukhanca2733c2019-11-15 23:21:17 -08001245 .element_tile = 8,
1246 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -08001247 xnn_params.f32.vsub = (struct vbinary_parameters) {
1248 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__psimd_x8,
1249 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__psimd_x8,
1250 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__psimd_x8,
1251 .element_tile = 8,
1252 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001253 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -08001254 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__psimd_2x,
1255 .channel_tile = 4,
1256 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001257 };
1258 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001259
1260 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001261 #ifndef XNN_NO_X32_OPERATORS
1262 xnn_params.x32.pad = (struct pad_parameters) {
1263 .ukernel = xnn_x32_pad_x2__psimd,
1264 .mr = 2,
1265 };
1266 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
1267 xnn_params.x32.zip = (struct zip_parameters) {
1268 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__psimd,
1269 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__psimd,
1270 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__psimd,
1271 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__psimd,
1272 };
1273 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001274
Marat Dukhan1dadbf72019-10-01 10:46:20 -07001275#elif XNN_ARCH_WASM || XNN_ARCH_ASMJS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001276 // Unlike most other architectures, on x86/x86-64 when floating-point instructions
1277 // have no NaN arguments, but produce NaN output, the output NaN has sign bit set.
1278 // We use it to distinguish x86/x86-64 from other architectures, by doing subtraction
1279 // of two infinities (must produce NaN per IEEE 754 standard).
1280 static volatile uint32_t minus_inf = UINT32_C(0xFF800000);
1281 const bool is_wasm_x86 = (int32_t) xnn_stub_wasm_f32_sub(minus_inf, minus_inf) < 0;
1282
1283 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001284 #ifndef XNN_NO_Q8_OPERATORS
1285 xnn_params.q8.gemm = (struct gemm_parameters) {
1286 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_2x2__scalar,
1287 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_2x2__scalar,
1288 .mr = 2,
1289 .nr = 2,
1290 };
1291 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
1292 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up1x9__scalar,
1293 .cr = 1,
1294 .mr = 9,
1295 };
1296 xnn_params.q8.avgpool = (struct avgpool_parameters) {
1297 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__scalar,
1298 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__scalar,
1299 .mr = 9,
1300 .qr = 8,
1301 };
1302 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
1303 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__scalar,
1304 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__scalar,
1305 .mr = 7,
1306 };
1307 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__scalar;
1308 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001309
1310 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001311 #ifndef XNN_NO_U8_OPERATORS
1312 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001313 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001314 .mr = 9,
1315 .qr = 8,
1316 };
1317 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__scalar;
1318 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
1319 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__scalar;
1320 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001321
1322 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001323 #ifndef XNN_NO_X8_OPERATORS
1324 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
1325 xnn_params.x8.zip = (struct zip_parameters) {
1326 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__scalar,
1327 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__scalar,
1328 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__scalar,
1329 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__scalar,
1330 };
1331 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001332
1333 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001334 #ifndef XNN_NO_F32_OPERATORS
1335 if (is_wasm_x86) {
1336 xnn_params.f32.gemm = (struct gemm_parameters) {
1337 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_2x4__scalar,
1338 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_2x4__scalar,
Marat Dukhan436ebe62019-12-04 15:10:12 -08001339 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x4__wasm,
1340 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x4__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001341 .mr = 2,
1342 .nr = 4,
1343 };
1344 } else {
1345 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001346 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x4__wasm,
1347 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x4__wasm,
1348 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x4__wasm,
1349 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x4__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001350 .mr = 4,
1351 .nr = 4,
1352 };
1353 }
1354 xnn_params.f32.gemm2 = (struct gemm_parameters) {
1355 .gemm = NULL,
Marat Dukhan436ebe62019-12-04 15:10:12 -08001356 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2__wasm,
XNNPACK Teamb455b122019-09-27 18:10:33 -07001357 .mr = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001358 .nr = 2,
XNNPACK Teamb455b122019-09-27 18:10:33 -07001359 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001360 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001361 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up1x4__wasm_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001362 .cr = 1,
1363 .mr = 4,
1364 };
1365 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001366 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up1x9__wasm_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001367 .cr = 1,
1368 .mr = 9,
1369 };
1370 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001371 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up1x25__wasm_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001372 .cr = 1,
1373 .mr = 25,
1374 };
1375 xnn_params.f32.avgpool = (struct avgpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001376 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__wasm,
1377 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001378 .mr = 9,
1379 .qr = 8,
1380 };
1381 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001382 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__wasm,
1383 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001384 .mr = 9,
1385 .qr = 8,
1386 };
1387 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001388 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__wasm,
1389 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001390 .mr = 7,
1391 };
1392 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001393 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__wasm_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001394 .mr = 9,
1395 .qr = 8,
1396 };
1397 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001398 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001399 .mr = 4,
1400 };
1401 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001402 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001403 .mr = 9,
1404 };
1405 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001406 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001407 .mr = 9,
1408 .qr = 8,
1409 };
Marat Dukhan69722492019-11-11 19:55:50 -08001410 xnn_params.f32.bilinear = (struct bilinear_parameters) {
1411 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__scalar_c2,
1412 .pixel_tile = 1,
1413 .channel_tile = 2,
1414 };
Marat Dukhan436ebe62019-12-04 15:10:12 -08001415 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__wasm;
Marat Dukhan662faa02019-12-09 22:48:16 -08001416 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__wasm_x4;
Marat Dukhan3a77ea72019-12-23 12:10:24 -08001417 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2;
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001418 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001419 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__wasm_2x4,
Marat Dukhan69c3f2c2019-11-06 12:30:01 -08001420 .row_tile = 4,
1421 .channel_tile = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001422 };
Marat Dukhanb1a0fc32019-12-02 19:32:02 -08001423 xnn_params.f32.vadd = (struct vbinary_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001424 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__wasm_x4,
1425 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__wasm_x4,
1426 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__wasm_x4,
Marat Dukhanb1a0fc32019-12-02 19:32:02 -08001427 .element_tile = 8,
1428 };
Marat Dukhan69180502019-12-06 15:00:31 -08001429 xnn_params.f32.vdiv = (struct vbinary_parameters) {
1430 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__wasm_x2,
1431 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__wasm_x2,
1432 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__wasm_x2,
1433 .element_tile = 2,
1434 };
Marat Dukhan79e7f842019-12-05 14:35:50 -08001435 xnn_params.f32.vmax = (struct vbinary_parameters) {
1436 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__wasm_x4,
1437 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__wasm_x4,
1438 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__wasm_x4,
1439 .element_tile = 8,
1440 };
1441 xnn_params.f32.vmin = (struct vbinary_parameters) {
1442 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__wasm_x4,
1443 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__wasm_x4,
1444 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__wasm_x4,
1445 .element_tile = 8,
1446 };
Marat Dukhan1e782c42019-11-21 17:02:40 -08001447 xnn_params.f32.vmul = (struct vbinary_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001448 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__wasm_x4,
1449 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__wasm_x4,
1450 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__wasm_x4,
Marat Dukhanca2733c2019-11-15 23:21:17 -08001451 .element_tile = 8,
1452 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -08001453 xnn_params.f32.vsub = (struct vbinary_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001454 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__wasm_x4,
1455 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__wasm_x4,
1456 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__wasm_x4,
Marat Dukhan05f3f6d2019-12-03 15:13:53 -08001457 .element_tile = 8,
1458 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001459 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001460 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c1__wasm_2x,
Marat Dukhan49e6ee92019-11-06 15:55:29 -08001461 .channel_tile = 1,
1462 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001463 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001464 #ifndef XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001465 xnn_params.f32.spmm = (struct spmm_parameters) {
Marat Dukhanbff791e2019-10-24 11:05:37 -07001466 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_8x1__scalar,
1467 .mr = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001468 .nr = 1,
1469 };
Erich Elsenc6afd9b2019-10-24 16:10:53 -07001470 xnn_params.f32.spmm2 = (struct spmm_parameters) {
1471 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_8x2__scalar,
1472 .mr = 8,
1473 .nr = 2,
1474 };
1475 xnn_params.f32.spmm4 = (struct spmm_parameters) {
1476 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_8x4__scalar,
1477 .mr = 8,
1478 .nr = 4,
1479 };
Marat Dukhan14fe0b22019-10-23 21:20:07 -07001480 xnn_params.f32.hwc2spchw_dconv3x3c3s2 = (struct hwc2spchw_dconv_parameters) {
1481 .ukernel_with_symm_padding =
1482 (xnn_conv_hwc2spchw_ukernel_function) xnn_f32_conv_hwc2spchw_ukernel_3x3s2p1c3x4__scalar_1x1,
1483 .output_channel_tile = 4,
1484 .output_height_tile = 1,
1485 .output_width_tile = 1,
1486 };
1487 xnn_params.f32.spchw_dwconv3x3 = (struct spchw_dwconv_parameters) {
1488 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3p1__scalar,
1489 .input_width_tile = 1,
1490 .output_width_tile = 1,
1491 .output_height_tile = 1,
1492 };
1493 xnn_params.f32.spchw_dwconv3x3s2 = (struct spchw_dwconv_parameters) {
1494 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3s2p1__scalar,
1495 .input_width_tile = 1,
1496 .output_width_tile = 1,
1497 .output_height_tile = 1,
1498 };
Marat Dukhana99918a2019-11-15 14:40:12 -08001499 xnn_params.f32.spchw_dwconv5x5 = (struct spchw_dwconv_parameters) {
1500 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5p2__scalar,
1501 .input_width_tile = 1,
1502 .output_width_tile = 1,
1503 .output_height_tile = 1,
1504 };
1505 xnn_params.f32.spchw_dwconv5x5s2 = (struct spchw_dwconv_parameters) {
1506 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5s2p2__scalar,
1507 .input_width_tile = 1,
1508 .output_width_tile = 1,
1509 .output_height_tile = 1,
1510 };
Marat Dukhan14fe0b22019-10-23 21:20:07 -07001511 xnn_params.f32.spchw_gavgpool = (struct spchw_gavgpool_parameters) {
1512 .ukernel = (xnn_gavgpool_spchw_ukernel_function) xnn_f32_gavgpool_spchw_ukernel__scalar_x1,
1513 .channel_tile = 1,
1514 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001515 #endif // XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001516 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001517
1518 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001519 #ifndef XNN_NO_X32_OPERATORS
1520 xnn_params.x32.pad = (struct pad_parameters) {
1521 .ukernel = xnn_x32_pad_x2__scalar,
1522 .mr = 2,
1523 };
1524 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__scalar;
1525 xnn_params.x32.zip = (struct zip_parameters) {
1526 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__scalar,
1527 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__scalar,
1528 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__scalar,
1529 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__scalar,
1530 };
1531 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001532
1533#else
1534 #error "Unsupported architecture"
1535#endif
1536 xnn_params.initialized = true;
1537}
1538
Marat Dukhan04f03be2019-11-19 12:36:47 -08001539enum xnn_status xnn_initialize(const struct xnn_allocator* allocator) {
Marat Dukhand343c222019-10-07 09:22:14 -07001540 #ifndef __EMSCRIPTEN__
1541 if (!cpuinfo_initialize()) {
1542 return xnn_status_out_of_memory;
1543 }
1544 #endif
XNNPACK Teamb455b122019-09-27 18:10:33 -07001545 pthread_once(&init_guard, &init);
1546 if (xnn_params.initialized) {
Marat Dukhan04f03be2019-11-19 12:36:47 -08001547 if (allocator != NULL) {
1548 memcpy(&xnn_params.allocator, allocator, sizeof(struct xnn_allocator));
1549 } else {
1550 xnn_params.allocator.allocate = &xnn_allocate;
1551 xnn_params.allocator.reallocate = &xnn_reallocate;
1552 xnn_params.allocator.deallocate = &xnn_deallocate;
1553 xnn_params.allocator.aligned_allocate = &xnn_aligned_allocate;
1554 xnn_params.allocator.aligned_deallocate = &xnn_aligned_deallocate;
1555 }
XNNPACK Teamb455b122019-09-27 18:10:33 -07001556 return xnn_status_success;
1557 } else {
1558 return xnn_status_unsupported_hardware;
1559 }
1560}
1561
1562enum xnn_status xnn_deinitialize(void) {
Marat Dukhand343c222019-10-07 09:22:14 -07001563 #ifndef __EMSCRIPTEN__
1564 cpuinfo_deinitialize();
1565 #endif
XNNPACK Teamb455b122019-09-27 18:10:33 -07001566 return xnn_status_success;
1567}