blob: 18d93204a60fff5e3396b8ed78675633be53748f [file] [log] [blame]
XNNPACK Teamb455b122019-09-27 18:10:33 -07001// Copyright (c) Facebook, Inc. and its affiliates.
2// All rights reserved.
3//
4// Copyright 2019 Google LLC
5//
6// This source code is licensed under the BSD-style license found in the
7// LICENSE file in the root directory of this source tree.
8
9#include <stdbool.h>
10#include <stddef.h>
11#include <stdint.h>
Marat Dukhan04f03be2019-11-19 12:36:47 -080012#include <string.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070013
14#include <pthread.h>
15
Marat Dukhand343c222019-10-07 09:22:14 -070016#ifndef __EMSCRIPTEN__
17 #include <cpuinfo.h>
18#endif
XNNPACK Teamb455b122019-09-27 18:10:33 -070019
20#include <xnnpack.h>
21#include <xnnpack/argmaxpool.h>
22#include <xnnpack/avgpool.h>
Marat Dukhan69722492019-11-11 19:55:50 -080023#include <xnnpack/bilinear.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070024#include <xnnpack/clamp.h>
Marat Dukhan1dadbf72019-10-01 10:46:20 -070025#include <xnnpack/common.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070026#include <xnnpack/conv.h>
27#include <xnnpack/dwconv.h>
28#include <xnnpack/gavgpool.h>
29#include <xnnpack/gemm.h>
30#include <xnnpack/hswish.h>
31#include <xnnpack/igemm.h>
32#include <xnnpack/log.h>
33#include <xnnpack/lut.h>
34#include <xnnpack/maxpool.h>
Marat Dukhan04f03be2019-11-19 12:36:47 -080035#include <xnnpack/memory.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070036#include <xnnpack/pad.h>
37#include <xnnpack/params.h>
38#include <xnnpack/pavgpool.h>
39#include <xnnpack/prelu.h>
Marat Dukhan1edc4542020-01-27 12:40:13 -080040#include <xnnpack/raddstoreexpminusmax.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070041#include <xnnpack/rmax.h>
42#include <xnnpack/spmm.h>
43#include <xnnpack/unpool.h>
44#include <xnnpack/vadd.h>
Marat Dukhan1e782c42019-11-21 17:02:40 -080045#include <xnnpack/vbinary.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070046#include <xnnpack/vmulcaddc.h>
Marat Dukhan1e782c42019-11-21 17:02:40 -080047#include <xnnpack/vunary.h>
XNNPACK Teamb455b122019-09-27 18:10:33 -070048#include <xnnpack/zip.h>
49
50#ifndef XNN_ENABLE_ASSEMBLY
51 #define XNN_ENABLE_ASSEMBLY 1
52#endif
53
54static pthread_once_t init_guard = PTHREAD_ONCE_INIT;
55
56struct xnn_parameters xnn_params = {
57 .initialized = false
58};
59
Marat Dukhan1dadbf72019-10-01 10:46:20 -070060#if XNN_ARCH_PNACL || XNN_ARCH_ASMJS || XNN_ARCH_WASM || XNN_ARCH_WASMSIMD
XNNPACK Teamb455b122019-09-27 18:10:33 -070061 extern uint32_t xnn_stub_wasm_f32_sub(uint32_t a, uint32_t b);
62#endif
Marat Dukhan1dadbf72019-10-01 10:46:20 -070063#if XNN_ARCH_PNACL || XNN_ARCH_WASM || XNN_ARCH_WASMSIMD
XNNPACK Teamb455b122019-09-27 18:10:33 -070064 extern uint32_t xnn_stub_wasm_f32_min(uint32_t a, uint32_t b);
65#endif
66
67static void init(void) {
Marat Dukhan1dadbf72019-10-01 10:46:20 -070068#if XNN_ARCH_ARM
XNNPACK Teamb455b122019-09-27 18:10:33 -070069 if (!cpuinfo_has_arm_neon()) {
70 xnn_log_error("XNNPACK initialization failed: NEON is not supported");
71 return;
72 }
73
74 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -070075 #ifndef XNN_NO_Q8_OPERATORS
76 xnn_params.q8.gemm = (struct gemm_parameters) {
77 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_4x8__neon,
78 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_4x8__neon,
79 .mr = 4,
80 .nr = 8,
81 };
XNNPACK Teamb455b122019-09-27 18:10:33 -070082
Marat Dukhan8fe54e42019-10-10 14:12:59 -070083 #if XNN_ENABLE_ASSEMBLY
84 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
85 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__aarch32_neon,
86 .cr = 8,
87 .mr = 9,
88 };
89 #else
90 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
91 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__neon,
92 .cr = 8,
93 .mr = 9,
94 };
95 #endif
96 xnn_params.q8.avgpool = (struct avgpool_parameters) {
97 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__neon,
98 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__neon,
99 .mr = 9,
100 .qr = 8,
101 };
102 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
103 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__neon,
104 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__neon,
105 .mr = 7,
106 };
107 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__neon;
108 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700109
110 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700111 #ifndef XNN_NO_U8_OPERATORS
112 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800113 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__neon_c16,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700114 .mr = 9,
115 .qr = 8,
116 };
117 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__neon;
118 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__neon;
119 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
120 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700121
122 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700123 #ifndef XNN_NO_X8_OPERATORS
124 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
125 xnn_params.x8.zip = (struct zip_parameters) {
126 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__neon,
127 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__neon,
128 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__neon,
129 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__neon,
130 };
131 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700132
133 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700134 #ifndef XNN_NO_F32_OPERATORS
Frank Barchard32670922019-11-30 21:58:51 -0800135 #if XNN_ENABLE_ASSEMBLY
Frank Barchardf9a34842019-12-12 11:17:50 -0800136 switch (cpuinfo_get_core(0)->uarch) {
137 case cpuinfo_uarch_cortex_a53:
138 case cpuinfo_uarch_cortex_a55:
139 xnn_params.f32.gemm = (struct gemm_parameters) {
140 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch32_neon_cortex_a53,
141 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
142 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
143 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
144 .mr = 4,
145 .nr = 8,
146 };
147 break;
Frank Barchard4d281a52019-12-12 15:49:41 -0800148
149 case cpuinfo_uarch_cortex_a57:
150 case cpuinfo_uarch_cortex_a72:
151 case cpuinfo_uarch_cortex_a73:
152 xnn_params.f32.gemm = (struct gemm_parameters) {
153 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch32_neon_pld_cortex_a75,
154 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
155 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
156 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
157 .mr = 4,
158 .nr = 8,
159 };
160 break;
161
Frank Barchardf9a34842019-12-12 11:17:50 -0800162 default:
163 xnn_params.f32.gemm = (struct gemm_parameters) {
164 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch32_neon_cortex_a75,
165 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
166 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
167 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
168 .mr = 4,
169 .nr = 8,
170 };
171 break;
172 }
Frank Barchard32670922019-11-30 21:58:51 -0800173 #else // XNN_ENABLE_ASSEMBLY
174 xnn_params.f32.gemm = (struct gemm_parameters) {
175 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__neon_lane_ld128,
176 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__neon_lane_ld128,
177 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neon_lane_ld64,
178 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neon_lane_ld64,
179 .mr = 4,
180 .nr = 8,
181 };
182 #endif // XNN_ENABLE_ASSEMBLY
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700183 xnn_params.f32.gemm2 = (struct gemm_parameters) {
184 .gemm = NULL,
Frank Barchard91317c52019-11-22 10:54:35 -0800185 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2__neon_lane_ld64,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700186 .mr = 4,
187 .nr = 2,
188 };
189 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
190 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x4__psimd,
191 .cr = 4,
192 .mr = 4,
193 };
194 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
195 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__neon,
196 .cr = 4,
197 .mr = 9,
198 };
199 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
200 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x25__psimd,
201 .cr = 4,
202 .mr = 25,
203 };
204 xnn_params.f32.avgpool = (struct avgpool_parameters) {
205 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__neon,
206 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__neon,
207 .mr = 9,
208 .qr = 8,
209 };
210 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
211 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__neon,
212 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__neon,
213 .mr = 9,
214 .qr = 8,
215 };
216 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
217 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__neon,
218 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__neon,
219 .mr = 7,
220 };
221 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800222 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700223 .mr = 9,
224 .qr = 8,
225 };
226 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800227 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700228 .mr = 4,
229 };
230 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800231 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700232 .mr = 9,
233 };
234 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800235 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700236 .mr = 9,
237 .qr = 8,
238 };
Marat Dukhan69722492019-11-11 19:55:50 -0800239 xnn_params.f32.bilinear = (struct bilinear_parameters) {
240 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__neon_c8,
241 .pixel_tile = 1,
242 .channel_tile = 8,
243 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700244 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__neon;
Marat Dukhan662faa02019-12-09 22:48:16 -0800245 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__neon_x8;
Marat Dukhan4a24a582020-01-06 13:30:00 -0800246 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__neon_rr2_lut64_p2_nr2recps_x8;
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700247 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -0800248 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__neon_2x8,
249 .row_tile = 2,
250 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700251 };
Marat Dukhan1edc4542020-01-27 12:40:13 -0800252 xnn_params.f32.raddstoreexpminusmax = xnn_f32_raddstoreexpminusmax_ukernel__neon_lut64_p2_x8;
253 xnn_params.f32.rmax = xnn_f32_rmax_ukernel__neon;
Marat Dukhanb1a0fc32019-12-02 19:32:02 -0800254 xnn_params.f32.vadd = (struct vbinary_parameters) {
255 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__neon_x8,
256 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
257 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
258 .element_tile = 8,
259 };
Marat Dukhan69180502019-12-06 15:00:31 -0800260 xnn_params.f32.vdiv = (struct vbinary_parameters) {
261 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__scalar_x2,
262 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__scalar_x2,
263 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__scalar_x2,
264 .element_tile = 2,
265 };
Marat Dukhan79e7f842019-12-05 14:35:50 -0800266 xnn_params.f32.vmax = (struct vbinary_parameters) {
267 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__neon_x8,
268 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
269 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
270 .element_tile = 8,
271 };
272 xnn_params.f32.vmin = (struct vbinary_parameters) {
273 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__neon_x8,
274 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
275 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
276 .element_tile = 8,
277 };
Marat Dukhan1e782c42019-11-21 17:02:40 -0800278 xnn_params.f32.vmul = (struct vbinary_parameters) {
279 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__neon_x8,
280 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
281 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
Marat Dukhanca2733c2019-11-15 23:21:17 -0800282 .element_tile = 8,
283 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -0800284 xnn_params.f32.vsub = (struct vbinary_parameters) {
285 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__neon_x8,
286 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__neon_x8,
287 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__neon_x8,
288 .element_tile = 8,
289 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700290 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -0800291 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__neon_2x,
292 .channel_tile = 4,
293 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700294 };
295 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700296
297 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700298 #ifndef XNN_NO_X32_OPERATORS
299 xnn_params.x32.pad = (struct pad_parameters) {
300 .ukernel = xnn_x32_pad_x2__neon,
301 .mr = 2,
302 };
303 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
304 xnn_params.x32.zip = (struct zip_parameters) {
305 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__neon,
306 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__neon,
307 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__neon,
308 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__neon,
309 };
310 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700311
Marat Dukhan1dadbf72019-10-01 10:46:20 -0700312#elif XNN_ARCH_ARM64
XNNPACK Teamb455b122019-09-27 18:10:33 -0700313
314 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700315 #ifndef XNN_NO_Q8_OPERATORS
316 xnn_params.q8.gemm = (struct gemm_parameters) {
317 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_8x8__neon,
318 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_8x8__neon,
319 .mr = 8,
320 .nr = 8,
321 };
322 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
323 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__neon,
324 .cr = 8,
325 .mr = 9,
326 };
327 xnn_params.q8.avgpool = (struct avgpool_parameters) {
328 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__neon,
329 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__neon,
330 .mr = 9,
331 .qr = 8,
332 };
333 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
334 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__neon,
335 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__neon,
336 .mr = 7,
337 };
338 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__neon;
339 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700340
341 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700342 #ifndef XNN_NO_U8_OPERATORS
343 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800344 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__neon_c16,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700345 .mr = 9,
346 .qr = 8,
347 };
348 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__neon;
349 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
350 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__neon;
351 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700352
353 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700354 #ifndef XNN_NO_X8_OPERATORS
355 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
356 xnn_params.x8.zip = (struct zip_parameters) {
357 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__neon,
358 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__neon,
359 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__neon,
360 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__neon,
361 };
362 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700363
364 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700365 #ifndef XNN_NO_F32_OPERATORS
366 #if XNN_ENABLE_ASSEMBLY
367 switch (cpuinfo_get_core(0)->uarch) {
368 case cpuinfo_uarch_kryo:
369 xnn_params.f32.gemm = (struct gemm_parameters) {
370 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a57,
371 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
372 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
373 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
374 .mr = 4,
375 .nr = 8,
376 };
377 break;
378 case cpuinfo_uarch_cortex_a57:
379 xnn_params.f32.gemm = (struct gemm_parameters) {
380 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a57,
381 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a57,
382 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a57,
383 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a57,
384 .mr = 6,
385 .nr = 8,
386 };
387 break;
388 case cpuinfo_uarch_cortex_a72:
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700389 xnn_params.f32.gemm = (struct gemm_parameters) {
390 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
391 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
392 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
393 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
394 .mr = 4,
395 .nr = 8,
396 };
397 break;
Frank Barchardfbbb40e2020-02-04 17:18:33 -0800398 case cpuinfo_uarch_cortex_a77:
399 case cpuinfo_uarch_exynos_m5:
400 xnn_params.f32.gemm = (struct gemm_parameters) {
401 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a57,
402 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
403 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
404 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
405 .mr = 4,
406 .nr = 8,
407 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700408 case cpuinfo_uarch_cortex_a75:
Frank Barchard263bb092019-10-28 15:28:46 -0700409 case cpuinfo_uarch_cortex_a76:
Marat Dukhan1f5d9bc2020-01-02 09:11:16 -0800410 case cpuinfo_uarch_exynos_m3:
411 case cpuinfo_uarch_exynos_m4:
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700412 xnn_params.f32.gemm = (struct gemm_parameters) {
413 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a75,
414 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a75,
415 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
416 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
417 .mr = 6,
418 .nr = 8,
419 };
420 break;
Marat Dukhan1f5d9bc2020-01-02 09:11:16 -0800421 case cpuinfo_uarch_exynos_m1:
422 case cpuinfo_uarch_exynos_m2:
Frank Barcharddf06d802019-11-20 15:53:46 -0800423 xnn_params.f32.gemm = (struct gemm_parameters) {
424 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8s4__neonfma,
425 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8s4__neonfma,
426 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8s4__neonfma,
427 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8s4__neonfma,
428 .mr = 6,
429 .nr = 8,
430 .log2_sr = 2,
431 };
432 break;
433
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700434 case cpuinfo_uarch_cortex_a53:
435 case cpuinfo_uarch_cortex_a55:
436 xnn_params.f32.gemm = (struct gemm_parameters) {
Frank Barchardbd1d5d92019-10-30 15:53:30 -0700437 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a53,
438 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a53,
439 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a53,
440 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a53,
441 .mr = 6,
442 .nr = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700443 };
444 break;
445 case cpuinfo_uarch_cortex_a73:
446 xnn_params.f32.gemm = (struct gemm_parameters) {
447 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__aarch64_neonfma_cortex_a73,
448 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__aarch64_neonfma_cortex_a73,
449 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
450 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
451 .mr = 6,
452 .nr = 8,
453 };
454 break;
455 default:
456 xnn_params.f32.gemm = (struct gemm_parameters) {
Frank Barchard5cc1cc22019-12-16 15:36:12 -0800457 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__aarch64_neonfma_cortex_a57,
458 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__aarch64_neonfma_cortex_a75,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700459 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
460 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__aarch64_neonfma_cortex_a75,
Frank Barchard5cc1cc22019-12-16 15:36:12 -0800461 .mr = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700462 .nr = 8,
463 };
464 break;
465 }
466 #else // XNN_ENABLE_ASSEMBLY
XNNPACK Teamb455b122019-09-27 18:10:33 -0700467 xnn_params.f32.gemm = (struct gemm_parameters) {
Frank Barchard91317c52019-11-22 10:54:35 -0800468 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8__neonfma_lane_ld64,
469 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8__neonfma_lane_ld64,
470 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__neonfma_lane_ld64,
471 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__neonfma_lane_ld64,
Frank Barchard2af471b2019-10-16 19:10:32 -0700472 .mr = 6,
XNNPACK Teamb455b122019-09-27 18:10:33 -0700473 .nr = 8,
474 };
Frank Barchard32670922019-11-30 21:58:51 -0800475 #endif // XNN_ENABLE_ASSEMBLY
XNNPACK Teamb455b122019-09-27 18:10:33 -0700476
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700477 xnn_params.f32.gemm2 = (struct gemm_parameters) {
478 .gemm = NULL,
Frank Barchard91317c52019-11-22 10:54:35 -0800479 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700480 .mr = 4,
481 .nr = 2,
482 };
483 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
484 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x4__psimd,
485 .cr = 4,
486 .mr = 4,
487 };
488 switch (cpuinfo_get_core(0)->uarch) {
489 case cpuinfo_uarch_kryo:
490 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
491 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__neonfma,
492 .cr = 4,
493 .mr = 9,
494 };
495 break;
XNNPACK Teamb455b122019-09-27 18:10:33 -0700496#if XNN_ENABLE_ASSEMBLY
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700497 case cpuinfo_uarch_cortex_a53:
498 case cpuinfo_uarch_cortex_a55:
499 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
500 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__aarch64_neonfma_cortex_a55,
501 .cr = 4,
502 .mr = 9,
503 };
504 break;
XNNPACK Teamb455b122019-09-27 18:10:33 -0700505#endif
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700506 default:
507 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
508 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x9__neonfma,
509 .cr = 8,
510 .mr = 9,
511 };
512 break;
513 }
514 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
515 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x25__psimd,
516 .cr = 4,
517 .mr = 25,
518 };
519 xnn_params.f32.avgpool = (struct avgpool_parameters) {
520 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__neon,
521 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__neon,
522 .mr = 9,
523 .qr = 8,
524 };
525 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
526 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__neon,
527 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__neon,
528 .mr = 9,
529 .qr = 8,
530 };
531 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
532 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__neon,
533 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__neon,
534 .mr = 7,
535 };
536 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800537 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700538 .mr = 9,
539 .qr = 8,
540 };
541 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800542 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700543 .mr = 4,
544 };
545 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800546 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700547 .mr = 9,
548 };
549 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800550 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700551 .mr = 9,
552 .qr = 8,
553 };
Marat Dukhan69722492019-11-11 19:55:50 -0800554 xnn_params.f32.bilinear = (struct bilinear_parameters) {
555 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__neonfma_c8,
556 .pixel_tile = 1,
557 .channel_tile = 8,
558 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700559 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__neon;
Marat Dukhan662faa02019-12-09 22:48:16 -0800560 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__neonfma_x8;
Marat Dukhan4a24a582020-01-06 13:30:00 -0800561 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__neonfma_rr1_lut64_p2_nr2recps_x16;
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700562 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -0800563 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__neon_2x8,
564 .row_tile = 2,
565 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700566 };
Marat Dukhan1edc4542020-01-27 12:40:13 -0800567 xnn_params.f32.raddstoreexpminusmax = xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x16;
568 xnn_params.f32.rmax = xnn_f32_rmax_ukernel__neon;
Marat Dukhanb1a0fc32019-12-02 19:32:02 -0800569 xnn_params.f32.vadd = (struct vbinary_parameters) {
570 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__neon_x8,
571 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
572 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__neon_x8,
573 .element_tile = 8,
574 };
Marat Dukhan69180502019-12-06 15:00:31 -0800575 xnn_params.f32.vdiv = (struct vbinary_parameters) {
576 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__neon_x8,
577 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__neon_x8,
578 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__neon_x8,
579 .element_tile = 8,
580 };
Marat Dukhan79e7f842019-12-05 14:35:50 -0800581 xnn_params.f32.vmax = (struct vbinary_parameters) {
582 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__neon_x8,
583 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
584 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__neon_x8,
585 .element_tile = 8,
586 };
587 xnn_params.f32.vmin = (struct vbinary_parameters) {
588 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__neon_x8,
589 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
590 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__neon_x8,
591 .element_tile = 8,
592 };
Marat Dukhan1e782c42019-11-21 17:02:40 -0800593 xnn_params.f32.vmul = (struct vbinary_parameters) {
594 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__neon_x8,
595 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
596 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__neon_x8,
Marat Dukhanca2733c2019-11-15 23:21:17 -0800597 .element_tile = 8,
598 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -0800599 xnn_params.f32.vsub = (struct vbinary_parameters) {
600 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__neon_x8,
601 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__neon_x8,
602 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__neon_x8,
603 .element_tile = 8,
604 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700605 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -0800606 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__neonfma_2x,
607 .channel_tile = 4,
608 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700609 };
Marat Dukhanefc47b82019-11-18 09:25:38 -0800610 #ifndef XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700611 xnn_params.f32.spmm = (struct spmm_parameters) {
Erich Elsen9cdade32019-10-16 05:26:59 -0700612 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_16x1__neonfma_pipelined,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700613 .mr = 16,
614 .nr = 1,
XNNPACK Teamb455b122019-09-27 18:10:33 -0700615 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700616 xnn_params.f32.spmm2 = (struct spmm_parameters) {
617 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_16x2__neonfma,
618 .mr = 16,
619 .nr = 2,
620 };
621 xnn_params.f32.spmm4 = (struct spmm_parameters) {
622 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_16x4__neonfma,
623 .mr = 16,
624 .nr = 4,
625 };
626 xnn_params.f32.hwc2spchw_dconv3x3c3s2 = (struct hwc2spchw_dconv_parameters) {
627 .ukernel_with_symm_padding =
628 (xnn_conv_hwc2spchw_ukernel_function) xnn_f32_conv_hwc2spchw_ukernel_3x3s2p1c3x4__neonfma_2x2,
629 .output_channel_tile = 4,
630 .output_height_tile = 2,
631 .output_width_tile = 2,
632 };
633 xnn_params.f32.spchw_dwconv3x3 = (struct spchw_dwconv_parameters) {
634 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3p1__neonfma,
635 .input_width_tile = 4,
636 .output_width_tile = 4,
637 .output_height_tile = 3,
638 };
639 xnn_params.f32.spchw_dwconv3x3s2 = (struct spchw_dwconv_parameters) {
640 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3s2p1__neonfma,
641 .input_width_tile = 4,
642 .output_width_tile = 4,
643 .output_height_tile = 1,
644 };
Marat Dukhana99918a2019-11-15 14:40:12 -0800645 xnn_params.f32.spchw_dwconv5x5 = (struct spchw_dwconv_parameters) {
646 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5p2__neonfma,
647 .input_width_tile = 4,
648 .output_width_tile = 4,
Erich Elsen4ad51152019-11-19 13:11:53 -0800649 .output_height_tile = 3,
Marat Dukhana99918a2019-11-15 14:40:12 -0800650 };
651 xnn_params.f32.spchw_dwconv5x5s2 = (struct spchw_dwconv_parameters) {
652 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5s2p2__neonfma,
653 .input_width_tile = 4,
654 .output_width_tile = 4,
655 .output_height_tile = 1,
656 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700657 xnn_params.f32.spchw_gavgpool = (struct spchw_gavgpool_parameters) {
658 .ukernel = (xnn_gavgpool_spchw_ukernel_function) xnn_f32_gavgpool_spchw_ukernel__neon_x4,
659 .channel_tile = 4,
660 };
Marat Dukhanefc47b82019-11-18 09:25:38 -0800661 #endif // XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700662 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700663
664 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700665 #ifndef XNN_NO_X32_OPERATORS
666 xnn_params.x32.pad = (struct pad_parameters) {
667 .ukernel = xnn_x32_pad_x2__neon,
668 .mr = 2,
669 };
670 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
671 xnn_params.x32.zip = (struct zip_parameters) {
672 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__neon,
673 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__neon,
674 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__neon,
675 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__neon,
676 };
677 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700678
Marat Dukhan1dadbf72019-10-01 10:46:20 -0700679#elif XNN_ARCH_X86 || XNN_ARCH_X86_64
XNNPACK Teamb455b122019-09-27 18:10:33 -0700680 if (!cpuinfo_has_x86_sse2()) {
681 xnn_log_error("XNNPACK initialization failed: SSE2 is not supported");
682 return;
683 }
684
685 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700686 #ifndef XNN_NO_Q8_OPERATORS
687 xnn_params.q8.gemm = (struct gemm_parameters) {
688 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_4x4c2__sse2,
689 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_4x4c2__sse2,
690 .mr = 4,
691 .nr = 4,
692 .log2_kr = 1,
693 };
694 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
695 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up8x9__sse2,
696 .cr = 8,
697 .mr = 9,
698 };
699 xnn_params.q8.avgpool = (struct avgpool_parameters) {
700 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__sse2,
701 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__sse2,
702 .mr = 9,
703 .qr = 8,
704 };
705 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
706 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__sse2,
707 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__sse2,
708 .mr = 7,
709 };
710 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__sse2;
711 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700712
713 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700714 #ifndef XNN_NO_U8_OPERATORS
715 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800716 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__sse2_c16,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700717 .mr = 9,
718 .qr = 8,
719 };
720 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__sse2;
721 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
722 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__sse2;
723 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700724
725 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700726 #ifndef XNN_NO_X8_OPERATORS
727 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
728 xnn_params.x8.zip = (struct zip_parameters) {
729 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__sse2,
730 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__sse2,
731 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__sse2,
732 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__sse2,
733 };
734 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -0700735
736 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700737 #ifndef XNN_NO_F32_OPERATORS
Marat Dukhan0f349c42019-11-27 11:58:54 -0800738 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
739 xnn_params.f32.gemm = (struct gemm_parameters) {
740 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_7x16__avx512f_broadcast,
741 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_7x16__avx512f_broadcast,
742 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16__avx512f_broadcast,
743 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16__avx512f_broadcast,
744 .mr = 7,
745 .nr = 16,
746 };
747 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_fma3()) {
Marat Dukhan27121322019-12-09 14:57:40 -0800748 switch (cpuinfo_get_core(0)->uarch) {
749 case cpuinfo_uarch_zen:
750 xnn_params.f32.gemm = (struct gemm_parameters) {
751 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x16s4__fma3_broadcast,
752 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x16s4__fma3_broadcast,
753 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16s4__fma3_broadcast,
754 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16s4__fma3_broadcast,
755 .mr = 4,
756 .nr = 16,
757 .log2_sr = 2,
758 };
759 break;
760 default:
761 xnn_params.f32.gemm = (struct gemm_parameters) {
762 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_5x16__fma3_broadcast,
763 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_5x16__fma3_broadcast,
764 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16__fma3_broadcast,
765 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16__fma3_broadcast,
766 .mr = 5,
767 .nr = 16,
768 };
769 break;
770 }
Marat Dukhan1025ea32019-11-21 16:01:08 -0800771 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
772 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhaneccfd712019-12-08 16:49:27 -0800773 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_5x16__avx_broadcast,
774 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_5x16__avx_broadcast,
775 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x16__avx_broadcast,
776 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x16__avx_broadcast,
777 .mr = 5,
778 .nr = 16,
Marat Dukhan1025ea32019-11-21 16:01:08 -0800779 };
780 } else {
781 xnn_params.f32.gemm = (struct gemm_parameters) {
782 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__sse_load1,
783 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__sse_load1,
784 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__sse_load1,
785 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__sse_load1,
786 .mr = 4,
787 .nr = 8,
788 };
789 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700790 xnn_params.f32.gemm2 = (struct gemm_parameters) {
791 .gemm = NULL,
792 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2c4__sse,
793 .mr = 4,
794 .nr = 2,
795 .log2_kr = 2,
796 };
Marat Dukhan479f87e2019-11-27 15:17:06 -0800797 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
798 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
799 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x4__avx512f,
800 .cr = 16,
801 .mr = 4,
802 };
803 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
804 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x9__avx512f,
805 .cr = 16,
806 .mr = 9,
807 };
808 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
809 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x25__avx512f,
810 .cr = 16,
811 .mr = 25,
812 };
813 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_fma3()) {
Marat Dukhan17ec5f32019-11-22 13:34:16 -0800814 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
815 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x4__fma3,
816 .cr = 16,
817 .mr = 4,
818 };
819 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
820 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x9__fma3,
821 .cr = 16,
822 .mr = 9,
823 };
824 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
825 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x25__fma3,
826 .cr = 8,
827 .mr = 25,
828 };
829 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
830 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
831 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x4__avx,
832 .cr = 16,
833 .mr = 4,
834 };
835 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
836 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up16x9__avx,
837 .cr = 16,
838 .mr = 9,
839 };
840 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
841 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x25__avx,
842 .cr = 8,
843 .mr = 25,
844 };
845 } else {
846 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
847 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x4__sse,
848 .cr = 8,
849 .mr = 4,
850 };
851 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
852 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x9__sse,
853 .cr = 8,
854 .mr = 9,
855 };
856 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
857 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up8x25__sse,
858 .cr = 8,
859 .mr = 25,
860 };
861 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700862 xnn_params.f32.avgpool = (struct avgpool_parameters) {
863 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__sse,
864 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__sse,
865 .mr = 9,
866 .qr = 8,
867 };
868 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
869 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__sse,
870 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__sse,
871 .mr = 9,
872 .qr = 8,
873 };
874 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
875 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__sse,
876 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__sse,
877 .mr = 7,
878 };
879 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800880 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__sse_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700881 .mr = 9,
882 .qr = 8,
883 };
884 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800885 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__sse2_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700886 .mr = 4,
887 };
888 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800889 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__sse2_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700890 .mr = 9,
891 };
892 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -0800893 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__sse2_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700894 .mr = 9,
895 .qr = 8,
896 };
Marat Dukhan69722492019-11-11 19:55:50 -0800897 xnn_params.f32.bilinear = (struct bilinear_parameters) {
898 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__sse_c8,
899 .pixel_tile = 1,
900 .channel_tile = 8,
901 };
Marat Dukhane2c3f292019-11-27 15:40:54 -0800902 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
903 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__avx512f;
904 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
905 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__avx;
906 } else {
907 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__sse;
908 }
Marat Dukhan662faa02019-12-09 22:48:16 -0800909 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
910 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__avx512f_x32;
911 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_fma3()) {
912 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__fma3_x16;
913 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
914 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__avx_x16;
915 } else {
916 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__sse_x8;
917 }
Marat Dukhanfa0a4322020-01-06 16:14:29 -0800918 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx2()) {
919 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x40;
920 } else {
921 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__sse2_p5_div_x16;
922 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700923 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -0800924 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__sse2_2x8,
925 .row_tile = 2,
926 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -0700927 };
Marat Dukhan1edc4542020-01-27 12:40:13 -0800928 xnn_params.f32.raddstoreexpminusmax = xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x20_acc2;
929 xnn_params.f32.rmax = xnn_f32_rmax_ukernel__sse;
Marat Dukhan9a88efe2019-12-10 15:54:24 -0800930 if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx512f()) {
931 xnn_params.f32.vadd = (struct vbinary_parameters) {
932 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__avx512f_x32,
933 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx512f_x32,
934 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx512f_x32,
935 .element_tile = 32,
936 };
937 xnn_params.f32.vdiv = (struct vbinary_parameters) {
938 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__avx512f_x32,
939 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__avx512f_x32,
940 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__avx512f_x32,
941 .element_tile = 32,
942 };
943 xnn_params.f32.vmax = (struct vbinary_parameters) {
944 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__avx512f_x32,
945 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx512f_x32,
946 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx512f_x32,
947 .element_tile = 32,
948 };
949 xnn_params.f32.vmin = (struct vbinary_parameters) {
950 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__avx512f_x32,
951 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx512f_x32,
952 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx512f_x32,
953 .element_tile = 32,
954 };
955 xnn_params.f32.vmul = (struct vbinary_parameters) {
956 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__avx512f_x32,
957 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx512f_x32,
958 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx512f_x32,
959 .element_tile = 32,
960 };
961 xnn_params.f32.vsub = (struct vbinary_parameters) {
962 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__avx512f_x32,
963 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__avx512f_x32,
964 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__avx512f_x32,
965 .element_tile = 32,
966 };
967 } else if (!XNN_PLATFORM_MOBILE && cpuinfo_has_x86_avx()) {
968 xnn_params.f32.vadd = (struct vbinary_parameters) {
969 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__avx_x16,
970 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx_x16,
971 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__avx_x16,
972 .element_tile = 16,
973 };
974 xnn_params.f32.vdiv = (struct vbinary_parameters) {
975 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__avx_x16,
976 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__avx_x16,
977 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__avx_x16,
978 .element_tile = 16,
979 };
980 xnn_params.f32.vmax = (struct vbinary_parameters) {
981 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__avx_x16,
982 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx_x16,
983 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__avx_x16,
984 .element_tile = 16,
985 };
986 xnn_params.f32.vmin = (struct vbinary_parameters) {
987 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__avx_x16,
988 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx_x16,
989 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__avx_x16,
990 .element_tile = 16,
991 };
992 xnn_params.f32.vmul = (struct vbinary_parameters) {
993 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__avx_x16,
994 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx_x16,
995 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__avx_x16,
996 .element_tile = 16,
997 };
998 xnn_params.f32.vsub = (struct vbinary_parameters) {
999 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__avx_x16,
1000 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__avx_x16,
1001 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__avx_x16,
1002 .element_tile = 16,
1003 };
1004 } else {
1005 xnn_params.f32.vadd = (struct vbinary_parameters) {
1006 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__sse_x8,
1007 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__sse_x8,
1008 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__sse_x8,
1009 .element_tile = 8,
1010 };
1011 xnn_params.f32.vdiv = (struct vbinary_parameters) {
1012 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__sse_x8,
1013 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__sse_x8,
1014 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__sse_x8,
1015 .element_tile = 8,
1016 };
1017 xnn_params.f32.vmax = (struct vbinary_parameters) {
1018 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__sse_x8,
1019 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__sse_x8,
1020 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__sse_x8,
1021 .element_tile = 8,
1022 };
1023 xnn_params.f32.vmin = (struct vbinary_parameters) {
1024 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__sse_x8,
1025 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__sse_x8,
1026 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__sse_x8,
1027 .element_tile = 8,
1028 };
1029 xnn_params.f32.vmul = (struct vbinary_parameters) {
1030 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__sse_x8,
1031 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__sse_x8,
1032 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__sse_x8,
1033 .element_tile = 8,
1034 };
1035 xnn_params.f32.vsub = (struct vbinary_parameters) {
1036 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__sse_x8,
1037 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__sse_x8,
1038 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__sse_x8,
1039 .element_tile = 8,
1040 };
1041 }
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001042 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -08001043 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__sse_2x,
1044 .channel_tile = 4,
1045 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001046 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001047 #ifndef XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001048 xnn_params.f32.spmm = (struct spmm_parameters) {
1049 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_4x1__sse,
1050 .mr = 4,
1051 .nr = 1,
1052 };
1053 xnn_params.f32.spchw_dwconv3x3 = (struct spchw_dwconv_parameters) {
1054 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3p1__sse,
1055 .input_width_tile = 4,
1056 .output_width_tile = 4,
1057 .output_height_tile = 1,
1058 };
1059 xnn_params.f32.spchw_dwconv3x3s2 = (struct spchw_dwconv_parameters) {
1060 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3s2p1__sse,
1061 .input_width_tile = 4,
1062 .output_width_tile = 4,
1063 .output_height_tile = 1,
1064 };
1065 xnn_params.f32.spchw_gavgpool = (struct spchw_gavgpool_parameters) {
1066 .ukernel = (xnn_gavgpool_spchw_ukernel_function) xnn_f32_gavgpool_spchw_ukernel__sse_x4,
1067 .channel_tile = 4,
1068 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001069 #endif // XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001070 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001071
1072 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001073 #ifndef XNN_NO_X32_OPERATORS
1074 xnn_params.x32.pad = (struct pad_parameters) {
1075 .ukernel = xnn_x32_pad_x2__sse2,
1076 .mr = 2,
1077 };
1078 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
1079 xnn_params.x32.zip = (struct zip_parameters) {
1080 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__sse2,
1081 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__sse2,
1082 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__sse2,
1083 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__sse2,
1084 };
1085 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001086
Marat Dukhan1dadbf72019-10-01 10:46:20 -07001087#elif XNN_ARCH_PNACL || XNN_ARCH_WASMSIMD
Marat Dukhan466b5232019-10-09 11:22:20 -07001088 // Unlike most other architectures, on x86/x86-64 when floating-point instructions
1089 // have no NaN arguments, but produce NaN output, the output NaN has sign bit set.
1090 // We use it to distinguish x86/x86-64 from other architectures, by doing subtraction
1091 // of two infinities (must produce NaN per IEEE 754 standard).
1092 static volatile uint32_t minus_inf = UINT32_C(0xFF800000);
1093 const bool is_wasm_x86 = (int32_t) xnn_stub_wasm_f32_sub(minus_inf, minus_inf) < 0;
1094
XNNPACK Teamb455b122019-09-27 18:10:33 -07001095 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001096 #ifndef XNN_NO_Q8_OPERATORS
1097 xnn_params.q8.gemm = (struct gemm_parameters) {
1098 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_2x2__scalar,
1099 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_2x2__scalar,
1100 .mr = 2,
1101 .nr = 2,
1102 };
1103 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
1104 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up1x9__scalar,
1105 .cr = 1,
1106 .mr = 9,
1107 };
1108 xnn_params.q8.avgpool = (struct avgpool_parameters) {
1109 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__scalar,
1110 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__scalar,
1111 .mr = 9,
1112 .qr = 8,
1113 };
1114 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
1115 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__scalar,
1116 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__scalar,
1117 .mr = 7,
1118 };
1119 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__scalar;
1120 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001121
1122 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001123 #ifndef XNN_NO_U8_OPERATORS
1124 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001125 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001126 .mr = 9,
1127 .qr = 8,
1128 };
1129 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__scalar;
1130 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
1131 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__scalar;
1132 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001133
1134 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001135 #ifndef XNN_NO_X8_OPERATORS
1136 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
1137 xnn_params.x8.zip = (struct zip_parameters) {
1138 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__scalar,
1139 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__scalar,
1140 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__scalar,
1141 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__scalar,
1142 };
1143 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001144
1145 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001146 #ifndef XNN_NO_F32_OPERATORS
1147 if (is_wasm_x86) {
1148 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhancb801972019-10-23 02:10:33 -07001149 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x8__psimd_splat,
1150 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x8__psimd_splat,
1151 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x8__psimd_splat,
1152 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8__psimd_splat,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001153 .mr = 4,
1154 .nr = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001155 };
1156 } else {
1157 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhancd945c62019-10-25 11:59:50 -07001158 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_6x8s4__psimd,
1159 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_6x8s4__psimd,
1160 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_igemm_ukernel_1x8s4__psimd,
1161 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x8s4__psimd,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001162 .mr = 6,
1163 .nr = 8,
Marat Dukhancd945c62019-10-25 11:59:50 -07001164 .log2_sr = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001165 };
1166 }
1167 xnn_params.f32.gemm2 = (struct gemm_parameters) {
1168 .gemm = NULL,
1169 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2c4__psimd,
Marat Dukhan466b5232019-10-09 11:22:20 -07001170 .mr = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001171 .nr = 2,
1172 .log2_kr = 2,
Marat Dukhan466b5232019-10-09 11:22:20 -07001173 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001174 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
Marat Dukhan5098c3e2019-11-07 12:01:19 -08001175 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x4__psimd_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001176 .cr = 4,
1177 .mr = 4,
Marat Dukhan466b5232019-10-09 11:22:20 -07001178 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001179 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
Marat Dukhan5098c3e2019-11-07 12:01:19 -08001180 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x9__psimd_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001181 .cr = 4,
1182 .mr = 9,
1183 };
1184 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
Marat Dukhan5098c3e2019-11-07 12:01:19 -08001185 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up4x25__psimd_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001186 .cr = 4,
1187 .mr = 25,
1188 };
1189 xnn_params.f32.avgpool = (struct avgpool_parameters) {
1190 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__psimd,
1191 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__psimd,
1192 .mr = 9,
1193 .qr = 8,
1194 };
1195 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
1196 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__psimd,
1197 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__psimd,
1198 .mr = 9,
1199 .qr = 8,
1200 };
1201 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
1202 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__psimd,
1203 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__psimd,
1204 .mr = 7,
1205 };
1206 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001207 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001208 .mr = 9,
1209 .qr = 8,
1210 };
1211 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001212 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001213 .mr = 4,
1214 };
1215 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001216 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001217 .mr = 9,
1218 };
1219 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001220 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__psimd_c4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001221 .mr = 9,
1222 .qr = 8,
1223 };
Marat Dukhan69722492019-11-11 19:55:50 -08001224 xnn_params.f32.bilinear = (struct bilinear_parameters) {
1225 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__psimd_c8,
1226 .pixel_tile = 1,
1227 .channel_tile = 8,
1228 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001229 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__psimd;
Marat Dukhan662faa02019-12-09 22:48:16 -08001230 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__psimd_x8;
Marat Dukhan8d3c07e2020-01-02 01:20:59 -08001231 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__psimd_p5_div_x16;
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001232 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan69c3f2c2019-11-06 12:30:01 -08001233 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__psimd_2x8,
1234 .row_tile = 2,
1235 .channel_tile = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001236 };
Marat Dukhan1edc4542020-01-27 12:40:13 -08001237 xnn_params.f32.raddstoreexpminusmax = xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x16_acc2;
1238 xnn_params.f32.rmax = xnn_f32_rmax_ukernel__psimd;
Marat Dukhanb1a0fc32019-12-02 19:32:02 -08001239 xnn_params.f32.vadd = (struct vbinary_parameters) {
1240 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__psimd_x8,
1241 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__psimd_x8,
1242 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__psimd_x8,
1243 .element_tile = 8,
1244 };
Marat Dukhan69180502019-12-06 15:00:31 -08001245 xnn_params.f32.vdiv = (struct vbinary_parameters) {
1246 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__psimd_x4,
1247 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__psimd_x4,
1248 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__psimd_x4,
1249 .element_tile = 4,
1250 };
Marat Dukhan79e7f842019-12-05 14:35:50 -08001251 xnn_params.f32.vmax = (struct vbinary_parameters) {
1252 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__psimd_x8,
1253 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__psimd_x8,
1254 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__psimd_x8,
1255 .element_tile = 8,
1256 };
1257 xnn_params.f32.vmin = (struct vbinary_parameters) {
1258 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__psimd_x8,
1259 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__psimd_x8,
1260 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__psimd_x8,
1261 .element_tile = 8,
1262 };
Marat Dukhan1e782c42019-11-21 17:02:40 -08001263 xnn_params.f32.vmul = (struct vbinary_parameters) {
1264 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__psimd_x8,
1265 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__psimd_x8,
1266 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__psimd_x8,
Marat Dukhanca2733c2019-11-15 23:21:17 -08001267 .element_tile = 8,
1268 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -08001269 xnn_params.f32.vsub = (struct vbinary_parameters) {
1270 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__psimd_x8,
1271 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__psimd_x8,
1272 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__psimd_x8,
1273 .element_tile = 8,
1274 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001275 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan49e6ee92019-11-06 15:55:29 -08001276 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c4__psimd_2x,
1277 .channel_tile = 4,
1278 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001279 };
1280 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001281
1282 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001283 #ifndef XNN_NO_X32_OPERATORS
1284 xnn_params.x32.pad = (struct pad_parameters) {
1285 .ukernel = xnn_x32_pad_x2__psimd,
1286 .mr = 2,
1287 };
1288 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__psimd;
1289 xnn_params.x32.zip = (struct zip_parameters) {
1290 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__psimd,
1291 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__psimd,
1292 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__psimd,
1293 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__psimd,
1294 };
1295 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001296
Marat Dukhan1dadbf72019-10-01 10:46:20 -07001297#elif XNN_ARCH_WASM || XNN_ARCH_ASMJS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001298 // Unlike most other architectures, on x86/x86-64 when floating-point instructions
1299 // have no NaN arguments, but produce NaN output, the output NaN has sign bit set.
1300 // We use it to distinguish x86/x86-64 from other architectures, by doing subtraction
1301 // of two infinities (must produce NaN per IEEE 754 standard).
1302 static volatile uint32_t minus_inf = UINT32_C(0xFF800000);
1303 const bool is_wasm_x86 = (int32_t) xnn_stub_wasm_f32_sub(minus_inf, minus_inf) < 0;
1304
1305 /**************************** Q8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001306 #ifndef XNN_NO_Q8_OPERATORS
1307 xnn_params.q8.gemm = (struct gemm_parameters) {
1308 .gemm = (xnn_gemm_ukernel_function) xnn_q8_gemm_ukernel_2x2__scalar,
1309 .igemm = (xnn_igemm_ukernel_function) xnn_q8_igemm_ukernel_2x2__scalar,
1310 .mr = 2,
1311 .nr = 2,
1312 };
1313 xnn_params.q8.dwconv[0] = (struct dwconv_parameters) {
1314 .up = (xnn_dwconv_up_ukernel_function) xnn_q8_dwconv_ukernel_up1x9__scalar,
1315 .cr = 1,
1316 .mr = 9,
1317 };
1318 xnn_params.q8.avgpool = (struct avgpool_parameters) {
1319 .up = (xnn_avgpool_up_ukernel_function) xnn_q8_avgpool_ukernel_up9__scalar,
1320 .mp = (xnn_avgpool_mp_ukernel_function) xnn_q8_avgpool_ukernel_mp9p8q__scalar,
1321 .mr = 9,
1322 .qr = 8,
1323 };
1324 xnn_params.q8.gavgpool = (struct gavgpool_parameters) {
1325 .up = (xnn_gavgpool_up_ukernel_function) xnn_q8_gavgpool_ukernel_up7__scalar,
1326 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_q8_gavgpool_ukernel_mp7p7q__scalar,
1327 .mr = 7,
1328 };
1329 xnn_params.q8.vadd = (xnn_vadd_ukernel_function) xnn_q8_vadd_ukernel__scalar;
1330 #endif // XNN_NO_Q8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001331
1332 /**************************** U8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001333 #ifndef XNN_NO_U8_OPERATORS
1334 xnn_params.u8.maxpool = (struct maxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001335 .ukernel = (xnn_maxpool_ukernel_function) xnn_u8_maxpool_ukernel_9p8x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001336 .mr = 9,
1337 .qr = 8,
1338 };
1339 xnn_params.u8.clamp = (xnn_univector_ukernel_function) xnn_u8_clamp_ukernel__scalar;
1340 xnn_params.u8.lut32norm = xnn_u8_lut32norm_ukernel__scalar;
1341 xnn_params.u8.rmax = xnn_u8_rmax_ukernel__scalar;
1342 #endif // XNN_NO_U8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001343
1344 /**************************** X8 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001345 #ifndef XNN_NO_X8_OPERATORS
1346 xnn_params.x8.lut = xnn_x8_lut_ukernel__scalar;
1347 xnn_params.x8.zip = (struct zip_parameters) {
1348 .x2 = (xnn_zipc_ukernel_function) xnn_x8_zip_x2_ukernel__scalar,
1349 .x3 = (xnn_zipc_ukernel_function) xnn_x8_zip_x3_ukernel__scalar,
1350 .x4 = (xnn_zipc_ukernel_function) xnn_x8_zip_x4_ukernel__scalar,
1351 .xm = (xnn_zipv_ukernel_function) xnn_x8_zip_xm_ukernel__scalar,
1352 };
1353 #endif // XNN_NO_X8_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001354
1355 /**************************** F32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001356 #ifndef XNN_NO_F32_OPERATORS
1357 if (is_wasm_x86) {
1358 xnn_params.f32.gemm = (struct gemm_parameters) {
1359 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_2x4__scalar,
1360 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_2x4__scalar,
Marat Dukhan436ebe62019-12-04 15:10:12 -08001361 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x4__wasm,
1362 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x4__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001363 .mr = 2,
1364 .nr = 4,
1365 };
1366 } else {
1367 xnn_params.f32.gemm = (struct gemm_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001368 .gemm = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_4x4__wasm,
1369 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x4__wasm,
1370 .gemm1 = (xnn_gemm_ukernel_function) xnn_f32_gemm_ukernel_1x4__wasm,
1371 .igemm1 = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_1x4__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001372 .mr = 4,
1373 .nr = 4,
1374 };
1375 }
1376 xnn_params.f32.gemm2 = (struct gemm_parameters) {
1377 .gemm = NULL,
Marat Dukhan436ebe62019-12-04 15:10:12 -08001378 .igemm = (xnn_igemm_ukernel_function) xnn_f32_igemm_ukernel_4x2__wasm,
XNNPACK Teamb455b122019-09-27 18:10:33 -07001379 .mr = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001380 .nr = 2,
XNNPACK Teamb455b122019-09-27 18:10:33 -07001381 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001382 xnn_params.f32.dwconv[0] = (struct dwconv_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001383 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up1x4__wasm_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001384 .cr = 1,
1385 .mr = 4,
1386 };
1387 xnn_params.f32.dwconv[1] = (struct dwconv_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001388 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up1x9__wasm_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001389 .cr = 1,
1390 .mr = 9,
1391 };
1392 xnn_params.f32.dwconv[2] = (struct dwconv_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001393 .up = (xnn_dwconv_up_ukernel_function) xnn_f32_dwconv_ukernel_up1x25__wasm_acc2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001394 .cr = 1,
1395 .mr = 25,
1396 };
1397 xnn_params.f32.avgpool = (struct avgpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001398 .up = (xnn_avgpool_up_ukernel_function) xnn_f32_avgpool_ukernel_up9__wasm,
1399 .mp = (xnn_avgpool_mp_ukernel_function) xnn_f32_avgpool_ukernel_mp9p8q__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001400 .mr = 9,
1401 .qr = 8,
1402 };
1403 xnn_params.f32.pavgpool = (struct pavgpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001404 .up = (xnn_pavgpool_up_ukernel_function) xnn_f32_pavgpool_ukernel_up9__wasm,
1405 .mp = (xnn_pavgpool_mp_ukernel_function) xnn_f32_pavgpool_ukernel_mp9p8q__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001406 .mr = 9,
1407 .qr = 8,
1408 };
1409 xnn_params.f32.gavgpool = (struct gavgpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001410 .up = (xnn_gavgpool_up_ukernel_function) xnn_f32_gavgpool_ukernel_up7__wasm,
1411 .mp = (xnn_gavgpool_mp_ukernel_function) xnn_f32_gavgpool_ukernel_mp7p7q__wasm,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001412 .mr = 7,
1413 };
1414 xnn_params.f32.maxpool = (struct maxpool_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001415 .ukernel = (xnn_maxpool_ukernel_function) xnn_f32_maxpool_ukernel_9p8x__wasm_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001416 .mr = 9,
1417 .qr = 8,
1418 };
1419 xnn_params.f32.argmaxpool[0] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001420 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_4x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001421 .mr = 4,
1422 };
1423 xnn_params.f32.argmaxpool[1] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001424 .up = (xnn_argmaxpool_up_ukernel_function) xnn_f32_argmaxpool_ukernel_9x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001425 .mr = 9,
1426 };
1427 xnn_params.f32.argmaxpool[2] = (struct argmaxpool_parameters) {
Marat Dukhan329da642019-11-19 21:44:39 -08001428 .mp = (xnn_argmaxpool_mp_ukernel_function) xnn_f32_argmaxpool_ukernel_9p8x__scalar_c1,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001429 .mr = 9,
1430 .qr = 8,
1431 };
Marat Dukhan69722492019-11-11 19:55:50 -08001432 xnn_params.f32.bilinear = (struct bilinear_parameters) {
1433 .ukernel = (xnn_bilinear_ukernel_function) xnn_f32_bilinear_ukernel__scalar_c2,
1434 .pixel_tile = 1,
1435 .channel_tile = 2,
1436 };
Marat Dukhan436ebe62019-12-04 15:10:12 -08001437 xnn_params.f32.clamp = (xnn_univector_ukernel_function) xnn_f32_clamp_ukernel__wasm;
Marat Dukhan662faa02019-12-09 22:48:16 -08001438 xnn_params.f32.hswish = (xnn_univector_ukernel_function) xnn_f32_hswish_ukernel__wasm_x4;
Marat Dukhan3a77ea72019-12-23 12:10:24 -08001439 xnn_params.f32.sigmoid = (xnn_univector_ukernel_function) xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2;
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001440 xnn_params.f32.prelu = (struct prelu_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001441 .ukernel = (xnn_prelu_ukernel_function) xnn_f32_prelu_ukernel__wasm_2x4,
Marat Dukhan69c3f2c2019-11-06 12:30:01 -08001442 .row_tile = 4,
1443 .channel_tile = 4,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001444 };
Marat Dukhan1edc4542020-01-27 12:40:13 -08001445 xnn_params.f32.raddstoreexpminusmax = xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2;
1446 xnn_params.f32.rmax = xnn_f32_rmax_ukernel__scalar;
Marat Dukhanb1a0fc32019-12-02 19:32:02 -08001447 xnn_params.f32.vadd = (struct vbinary_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001448 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vadd_ukernel__wasm_x4,
1449 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__wasm_x4,
1450 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vaddc_ukernel__wasm_x4,
Marat Dukhanb1a0fc32019-12-02 19:32:02 -08001451 .element_tile = 8,
1452 };
Marat Dukhan69180502019-12-06 15:00:31 -08001453 xnn_params.f32.vdiv = (struct vbinary_parameters) {
1454 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdiv_ukernel__wasm_x2,
1455 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vdivc_ukernel__wasm_x2,
1456 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrdivc_ukernel__wasm_x2,
1457 .element_tile = 2,
1458 };
Marat Dukhan79e7f842019-12-05 14:35:50 -08001459 xnn_params.f32.vmax = (struct vbinary_parameters) {
1460 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmax_ukernel__wasm_x4,
1461 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__wasm_x4,
1462 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmaxc_ukernel__wasm_x4,
1463 .element_tile = 8,
1464 };
1465 xnn_params.f32.vmin = (struct vbinary_parameters) {
1466 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmin_ukernel__wasm_x4,
1467 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__wasm_x4,
1468 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vminc_ukernel__wasm_x4,
1469 .element_tile = 8,
1470 };
Marat Dukhan1e782c42019-11-21 17:02:40 -08001471 xnn_params.f32.vmul = (struct vbinary_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001472 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmul_ukernel__wasm_x4,
1473 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__wasm_x4,
1474 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vmulc_ukernel__wasm_x4,
Marat Dukhanca2733c2019-11-15 23:21:17 -08001475 .element_tile = 8,
1476 };
Marat Dukhan05f3f6d2019-12-03 15:13:53 -08001477 xnn_params.f32.vsub = (struct vbinary_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001478 .op_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsub_ukernel__wasm_x4,
1479 .opc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vsubc_ukernel__wasm_x4,
1480 .ropc_ukernel = (xnn_vbinary_ukernel_function) xnn_f32_vrsubc_ukernel__wasm_x4,
Marat Dukhan05f3f6d2019-12-03 15:13:53 -08001481 .element_tile = 8,
1482 };
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001483 xnn_params.f32.vmulcaddc = (struct vmulcaddc_parameters) {
Marat Dukhan436ebe62019-12-04 15:10:12 -08001484 .ukernel = (xnn_vmulcaddc_ukernel_function) xnn_f32_vmulcaddc_ukernel_c1__wasm_2x,
Marat Dukhan49e6ee92019-11-06 15:55:29 -08001485 .channel_tile = 1,
1486 .row_tile = 2,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001487 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001488 #ifndef XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001489 xnn_params.f32.spmm = (struct spmm_parameters) {
Marat Dukhanbff791e2019-10-24 11:05:37 -07001490 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_8x1__scalar,
1491 .mr = 8,
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001492 .nr = 1,
1493 };
Erich Elsenc6afd9b2019-10-24 16:10:53 -07001494 xnn_params.f32.spmm2 = (struct spmm_parameters) {
1495 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_8x2__scalar,
1496 .mr = 8,
1497 .nr = 2,
1498 };
1499 xnn_params.f32.spmm4 = (struct spmm_parameters) {
1500 .ukernel = (xnn_spmm_ukernel_function) xnn_f32_spmm_ukernel_8x4__scalar,
1501 .mr = 8,
1502 .nr = 4,
1503 };
Marat Dukhan14fe0b22019-10-23 21:20:07 -07001504 xnn_params.f32.hwc2spchw_dconv3x3c3s2 = (struct hwc2spchw_dconv_parameters) {
1505 .ukernel_with_symm_padding =
1506 (xnn_conv_hwc2spchw_ukernel_function) xnn_f32_conv_hwc2spchw_ukernel_3x3s2p1c3x4__scalar_1x1,
1507 .output_channel_tile = 4,
1508 .output_height_tile = 1,
1509 .output_width_tile = 1,
1510 };
1511 xnn_params.f32.spchw_dwconv3x3 = (struct spchw_dwconv_parameters) {
1512 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3p1__scalar,
1513 .input_width_tile = 1,
1514 .output_width_tile = 1,
1515 .output_height_tile = 1,
1516 };
1517 xnn_params.f32.spchw_dwconv3x3s2 = (struct spchw_dwconv_parameters) {
1518 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_3x3s2p1__scalar,
1519 .input_width_tile = 1,
1520 .output_width_tile = 1,
1521 .output_height_tile = 1,
1522 };
Marat Dukhana99918a2019-11-15 14:40:12 -08001523 xnn_params.f32.spchw_dwconv5x5 = (struct spchw_dwconv_parameters) {
1524 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5p2__scalar,
1525 .input_width_tile = 1,
1526 .output_width_tile = 1,
1527 .output_height_tile = 1,
1528 };
1529 xnn_params.f32.spchw_dwconv5x5s2 = (struct spchw_dwconv_parameters) {
1530 .ukernel = (xnn_dwconv_spchw_ukernel_function) xnn_f32_dwconv_spchw_ukernel_5x5s2p2__scalar,
1531 .input_width_tile = 1,
1532 .output_width_tile = 1,
1533 .output_height_tile = 1,
1534 };
Marat Dukhan14fe0b22019-10-23 21:20:07 -07001535 xnn_params.f32.spchw_gavgpool = (struct spchw_gavgpool_parameters) {
1536 .ukernel = (xnn_gavgpool_spchw_ukernel_function) xnn_f32_gavgpool_spchw_ukernel__scalar_x1,
1537 .channel_tile = 1,
1538 };
Marat Dukhanefc47b82019-11-18 09:25:38 -08001539 #endif // XNN_NO_NCHW_OPERATORS
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001540 #endif // XNN_NO_F32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001541
1542 /**************************** X32 micro-kernels ****************************/
Marat Dukhan8fe54e42019-10-10 14:12:59 -07001543 #ifndef XNN_NO_X32_OPERATORS
1544 xnn_params.x32.pad = (struct pad_parameters) {
1545 .ukernel = xnn_x32_pad_x2__scalar,
1546 .mr = 2,
1547 };
1548 xnn_params.x32.unpool = (xnn_unpool_ukernel_function) xnn_x32_unpool_ukernel__scalar;
1549 xnn_params.x32.zip = (struct zip_parameters) {
1550 .x2 = (xnn_zipc_ukernel_function) xnn_x32_zip_x2_ukernel__scalar,
1551 .x3 = (xnn_zipc_ukernel_function) xnn_x32_zip_x3_ukernel__scalar,
1552 .x4 = (xnn_zipc_ukernel_function) xnn_x32_zip_x4_ukernel__scalar,
1553 .xm = (xnn_zipv_ukernel_function) xnn_x32_zip_xm_ukernel__scalar,
1554 };
1555 #endif // XNN_NO_X32_OPERATORS
XNNPACK Teamb455b122019-09-27 18:10:33 -07001556
1557#else
1558 #error "Unsupported architecture"
1559#endif
1560 xnn_params.initialized = true;
1561}
1562
Marat Dukhan04f03be2019-11-19 12:36:47 -08001563enum xnn_status xnn_initialize(const struct xnn_allocator* allocator) {
Marat Dukhand343c222019-10-07 09:22:14 -07001564 #ifndef __EMSCRIPTEN__
1565 if (!cpuinfo_initialize()) {
1566 return xnn_status_out_of_memory;
1567 }
1568 #endif
XNNPACK Teamb455b122019-09-27 18:10:33 -07001569 pthread_once(&init_guard, &init);
1570 if (xnn_params.initialized) {
Marat Dukhan04f03be2019-11-19 12:36:47 -08001571 if (allocator != NULL) {
1572 memcpy(&xnn_params.allocator, allocator, sizeof(struct xnn_allocator));
1573 } else {
1574 xnn_params.allocator.allocate = &xnn_allocate;
1575 xnn_params.allocator.reallocate = &xnn_reallocate;
1576 xnn_params.allocator.deallocate = &xnn_deallocate;
1577 xnn_params.allocator.aligned_allocate = &xnn_aligned_allocate;
1578 xnn_params.allocator.aligned_deallocate = &xnn_aligned_deallocate;
1579 }
XNNPACK Teamb455b122019-09-27 18:10:33 -07001580 return xnn_status_success;
1581 } else {
1582 return xnn_status_unsupported_hardware;
1583 }
1584}
1585
1586enum xnn_status xnn_deinitialize(void) {
Marat Dukhand343c222019-10-07 09:22:14 -07001587 #ifndef __EMSCRIPTEN__
1588 cpuinfo_deinitialize();
1589 #endif
XNNPACK Teamb455b122019-09-27 18:10:33 -07001590 return xnn_status_success;
1591}