blob: d1d6ab4e052a4188e45d33c0eef3416f8346a3cd [file] [log] [blame]
Anthony Barbierf45d5a92018-01-24 16:23:15 +00001/*
2 * Copyright (c) 2017-2018 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Jenkinsb3a371b2018-05-23 11:36:53 +010024#include "arm_compute/graph.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000025#include "support/ToolchainSupport.h"
26#include "utils/GraphUtils.h"
27#include "utils/Utils.h"
28
29#include <cstdlib>
30#include <tuple>
31
32using namespace arm_compute::utils;
Jenkinsb3a371b2018-05-23 11:36:53 +010033using namespace arm_compute::graph::frontend;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000034using namespace arm_compute::graph_utils;
35
36/** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API
37 *
38 * @param[in] argc Number of arguments
Jenkinsb3a371b2018-05-23 11:36:53 +010039 * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL, 2 = OpenCL with Tuner), [optional] Path to the weights folder, [optional] image, [optional] labels, [optional] Fast math for convolution layer (0 = DISABLED, 1 = ENABLED) )
Anthony Barbierf45d5a92018-01-24 16:23:15 +000040 */
Jenkinsb3a371b2018-05-23 11:36:53 +010041class InceptionV3Example : public Example
Anthony Barbierf45d5a92018-01-24 16:23:15 +000042{
43public:
44 void do_setup(int argc, char **argv) override
45 {
46 std::string data_path; /* Path to the trainable data */
47 std::string image; /* Image data */
48 std::string label; /* Label data */
49
Anthony Barbier06ea0482018-02-22 15:45:35 +000050 // Create a preprocessor object
51 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
Anthony Barbierf45d5a92018-01-24 16:23:15 +000052
Anthony Barbier06ea0482018-02-22 15:45:35 +000053 // Set target. 0 (NEON), 1 (OpenCL), 2 (OpenCL with Tuner). By default it is NEON
Jenkinsb3a371b2018-05-23 11:36:53 +010054 const int target = argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0;
55 Target target_hint = set_target_hint(target);
56 FastMathHint fast_math_hint = FastMathHint::DISABLED;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000057
58 // Parse arguments
59 if(argc < 2)
60 {
61 // Print help
Jenkinsb3a371b2018-05-23 11:36:53 +010062 std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000063 std::cout << "No data folder provided: using random values\n\n";
64 }
65 else if(argc == 2)
66 {
Jenkinsb3a371b2018-05-23 11:36:53 +010067 std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000068 std::cout << "No data folder provided: using random values\n\n";
69 }
70 else if(argc == 3)
71 {
72 data_path = argv[2];
Jenkinsb3a371b2018-05-23 11:36:53 +010073 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000074 std::cout << "No image provided: using random values\n\n";
75 }
76 else if(argc == 4)
77 {
78 data_path = argv[2];
79 image = argv[3];
Jenkinsb3a371b2018-05-23 11:36:53 +010080 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000081 std::cout << "No text file with labels provided: skipping output accessor\n\n";
82 }
Jenkinsb3a371b2018-05-23 11:36:53 +010083 else if(argc == 5)
Anthony Barbierf45d5a92018-01-24 16:23:15 +000084 {
85 data_path = argv[2];
86 image = argv[3];
87 label = argv[4];
Jenkinsb3a371b2018-05-23 11:36:53 +010088 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " " << argv[4] << " [fast_math_hint]\n\n";
89 std::cout << "No fast math info provided: disabling fast math\n\n";
90 }
91 else
92 {
93 data_path = argv[2];
94 image = argv[3];
95 label = argv[4];
96 fast_math_hint = (std::strtol(argv[5], nullptr, 1) == 0) ? FastMathHint::DISABLED : FastMathHint::ENABLED;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000097 }
98
Jenkinsb3a371b2018-05-23 11:36:53 +010099 graph << target_hint
100 << fast_math_hint
101 << InputLayer(TensorDescriptor(TensorShape(299U, 299U, 3U, 1U), DataType::F32),
102 get_input_accessor(image, std::move(preprocessor), false))
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000103 << ConvolutionLayer(3U, 3U, 32U,
104 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy"),
105 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100106 .set_name("Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000107 << BatchNormalizationLayer(get_weights_accessor(data_path,
108 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
109 get_weights_accessor(data_path,
110 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
111 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
112 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100113 0.001f)
114 .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
115 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000116 << ConvolutionLayer(3U, 3U, 32U,
117 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy"),
118 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100119 .set_name("Conv2d_2a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000120 << BatchNormalizationLayer(get_weights_accessor(data_path,
121 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
122 get_weights_accessor(data_path,
123 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
124 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
125 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100126 0.001f)
127 .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
128 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000129
130 << ConvolutionLayer(3U, 3U, 64U,
131 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy"),
132 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100133 .set_name("Conv2d_2b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000134 << BatchNormalizationLayer(get_weights_accessor(data_path,
135 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
136 get_weights_accessor(data_path,
137 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
138 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
139 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100140 0.001f)
141 .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
142 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000143
Jenkinsb3a371b2018-05-23 11:36:53 +0100144 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000145
146 << ConvolutionLayer(1U, 1U, 80U,
147 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy"),
148 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100149 .set_name("Conv2d_3b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000150 << BatchNormalizationLayer(get_weights_accessor(data_path,
151 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
152 get_weights_accessor(data_path,
153 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
154 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
155 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100156 0.001f)
157 .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
158 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000159
160 << ConvolutionLayer(3U, 3U, 192U,
161 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy"),
162 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100163 .set_name("Conv2d_4a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000164 << BatchNormalizationLayer(get_weights_accessor(data_path,
165 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
166 get_weights_accessor(data_path,
167 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
168 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
169 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100170 0.001f)
171 .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
172 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000173
Jenkinsb3a371b2018-05-23 11:36:53 +0100174 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000175
Jenkinsb3a371b2018-05-23 11:36:53 +0100176 graph << get_inception_node_A(data_path, "Mixed_5b", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000177 32U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100178 .set_name("Mixed_5b/concat");
179 graph << get_inception_node_A(data_path, "Mixed_5c", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000180 64U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100181 .set_name("Mixed_5c/concat");
182 graph << get_inception_node_A(data_path, "Mixed_5d", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000183 64U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100184 .set_name("Mixed_5d/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000185
Jenkinsb3a371b2018-05-23 11:36:53 +0100186 graph << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000187
Jenkinsb3a371b2018-05-23 11:36:53 +0100188 graph << get_inception_node_C(data_path, "Mixed_6b", 192U, std::make_tuple(128U, 128U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000189 std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100190 .set_name("Mixed_6b/concat");
191 graph << get_inception_node_C(data_path, "Mixed_6c", 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000192 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100193 .set_name("Mixed_6c/concat");
194 graph << get_inception_node_C(data_path, "Mixed_6d", 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000195 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100196 .set_name("Mixed_6d/concat");
197 graph << get_inception_node_C(data_path, "Mixed_6e", 192U, std::make_tuple(192U, 192U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000198 std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100199 .set_name("Mixed_6e/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000200
Jenkinsb3a371b2018-05-23 11:36:53 +0100201 graph << get_inception_node_D(data_path, "Mixed_7a", std::make_tuple(192U, 320U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000202 std::make_tuple(192U, 192U, 192U, 192U))
Jenkinsb3a371b2018-05-23 11:36:53 +0100203 .set_name("Mixed_7a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000204
Jenkinsb3a371b2018-05-23 11:36:53 +0100205 graph << get_inception_node_E(data_path, "Mixed_7b", 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000206 std::make_tuple(448U, 384U, 384U, 384U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100207 .set_name("Mixed_7b/concat");
208 graph << get_inception_node_E(data_path, "Mixed_7c", 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000209 std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100210 .set_name("Mixed_7c/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000211
Jenkinsb3a371b2018-05-23 11:36:53 +0100212 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000213 << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
214 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy"),
215 get_weights_accessor(data_path,
216 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
217 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100218 .set_name("Logits/Conv2d_1c_1x1/convolution")
219 << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
220 << SoftmaxLayer().set_name("Predictions/Softmax")
221 << OutputLayer(get_output_accessor(label, 5));
Anthony Barbier06ea0482018-02-22 15:45:35 +0000222
Jenkinsb3a371b2018-05-23 11:36:53 +0100223 // Finalize graph
224 GraphConfig config;
225 config.use_tuner = (target == 2);
226 graph.finalize(target_hint, config);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000227 }
228
229 void do_run() override
230 {
231 graph.run();
232 }
233
234private:
Jenkinsb3a371b2018-05-23 11:36:53 +0100235 Stream graph{ 0, "InceptionV3" };
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000236
237private:
238 BranchLayer get_inception_node_A(const std::string &data_path, std::string &&param_path,
239 unsigned int a_filt,
240 std::tuple<unsigned int, unsigned int> b_filters,
241 std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
242 unsigned int d_filt,
243 bool is_name_different = false)
244 {
245 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000246
247 // This is due to a naming issue in the tf model
248 std::string conv_id0 = "_0a_";
249 std::string conv_id1 = "2d_0b_";
250 if(is_name_different)
251 {
252 conv_id0 = "_0b_";
253 conv_id1 = "_1_0c_";
254 }
255
Jenkinsb3a371b2018-05-23 11:36:53 +0100256 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000257 i_a << ConvolutionLayer(
258 1U, 1U, a_filt,
259 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
260 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
261 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100262 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000263 << BatchNormalizationLayer(
264 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
265 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
266 get_random_accessor(1.f, 1.f),
267 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100268 0.001f)
269 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
270 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000271
Jenkinsb3a371b2018-05-23 11:36:53 +0100272 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000273 i_b << ConvolutionLayer(
274 1U, 1U, std::get<0>(b_filters),
275 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy"),
276 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
277 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100278 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000279 << BatchNormalizationLayer(
280 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
281 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
282 get_random_accessor(1.f, 1.f),
283 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100284 0.001f)
285 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
286 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000287 << ConvolutionLayer(
288 5U, 5U, std::get<1>(b_filters),
289 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy"),
290 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
291 PadStrideInfo(1, 1, 2, 2))
Jenkinsb3a371b2018-05-23 11:36:53 +0100292 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000293 << BatchNormalizationLayer(
294 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
295 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
296 get_random_accessor(1.f, 1.f),
297 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100298 0.001f)
299 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
300 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000301
Jenkinsb3a371b2018-05-23 11:36:53 +0100302 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000303 i_c << ConvolutionLayer(
304 1U, 1U, std::get<0>(c_filters),
305 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
306 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
307 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100308 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000309 << BatchNormalizationLayer(
310 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
311 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
312 get_random_accessor(1.f, 1.f),
313 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100314 0.001f)
315 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
316 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000317 << ConvolutionLayer(
318 3U, 3U, std::get<1>(c_filters),
319 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
320 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
321 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100322 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000323 << BatchNormalizationLayer(
324 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
325 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
326 get_random_accessor(1.f, 1.f),
327 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100328 0.001f)
329 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
330 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000331 << ConvolutionLayer(
332 3U, 3U, std::get<2>(c_filters),
333 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy"),
334 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
335 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100336 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000337 << BatchNormalizationLayer(
338 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
339 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
340 get_random_accessor(1.f, 1.f),
341 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100342 0.001f)
343 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
344 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000345
Jenkinsb3a371b2018-05-23 11:36:53 +0100346 SubStream i_d(graph);
347 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000348 << ConvolutionLayer(
349 1U, 1U, d_filt,
350 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
351 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
352 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100353 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000354 << BatchNormalizationLayer(
355 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
356 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
357 get_random_accessor(1.f, 1.f),
358 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100359 0.001f)
360 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
361 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000362
363 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
364 }
365
366 BranchLayer get_inception_node_B(const std::string &data_path, std::string &&param_path,
367 unsigned int a_filt,
368 std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
369 {
370 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100371 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000372 i_a << ConvolutionLayer(
373 3U, 3U, a_filt,
374 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy"),
375 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
376 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100377 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000378 << BatchNormalizationLayer(
379 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
380 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
381 get_random_accessor(1.f, 1.f),
382 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100383 0.001f)
384 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
385 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000386
Jenkinsb3a371b2018-05-23 11:36:53 +0100387 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000388 i_b << ConvolutionLayer(
389 1U, 1U, std::get<0>(b_filters),
390 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
391 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
392 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100393 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000394 << BatchNormalizationLayer(
395 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
396 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
397 get_random_accessor(1.f, 1.f),
398 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100399 0.001f)
400 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
401 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000402 << ConvolutionLayer(
403 3U, 3U, std::get<1>(b_filters),
404 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy"),
405 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
406 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100407 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000408 << BatchNormalizationLayer(
409 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
410 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
411 get_random_accessor(1.f, 1.f),
412 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100413 0.001f)
414 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
415 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000416 << ConvolutionLayer(
417 3U, 3U, std::get<2>(b_filters),
418 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy"),
419 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
420 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100421 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000422 << BatchNormalizationLayer(
423 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
424 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
425 get_random_accessor(1.f, 1.f),
426 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100427 0.001f)
428 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
429 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000430
Jenkinsb3a371b2018-05-23 11:36:53 +0100431 SubStream i_c(graph);
432 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000433
434 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
435 }
436
437 BranchLayer get_inception_node_C(const std::string &data_path, std::string &&param_path,
438 unsigned int a_filt,
439 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
440 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
441 unsigned int d_filt)
442 {
443 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100444 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000445 i_a << ConvolutionLayer(
446 1U, 1U, a_filt,
447 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
448 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
449 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100450 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000451 << BatchNormalizationLayer(
452 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
453 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
454 get_random_accessor(1.f, 1.f),
455 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100456 0.001f)
457 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
458 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000459
Jenkinsb3a371b2018-05-23 11:36:53 +0100460 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000461 i_b << ConvolutionLayer(
462 1U, 1U, std::get<0>(b_filters),
463 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
464 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
465 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100466 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000467 << BatchNormalizationLayer(
468 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
469 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
470 get_random_accessor(1.f, 1.f),
471 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100472 0.001f)
473 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
474 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000475 << ConvolutionLayer(
476 7U, 1U, std::get<1>(b_filters),
477 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
478 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
479 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100480 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000481 << BatchNormalizationLayer(
482 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
483 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
484 get_random_accessor(1.f, 1.f),
485 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100486 0.001f)
487 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
488 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000489 << ConvolutionLayer(
490 1U, 7U, std::get<2>(b_filters),
491 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
492 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
493 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100494 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000495 << BatchNormalizationLayer(
496 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
497 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
498 get_random_accessor(1.f, 1.f),
499 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100500 0.001f)
501 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
502 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000503
Jenkinsb3a371b2018-05-23 11:36:53 +0100504 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000505 i_c << ConvolutionLayer(
506 1U, 1U, std::get<0>(c_filters),
507 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
508 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
509 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100510 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000511 << BatchNormalizationLayer(
512 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
513 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
514 get_random_accessor(1.f, 1.f),
515 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100516 0.001f)
517 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
518 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000519 << ConvolutionLayer(
520 1U, 7U, std::get<1>(c_filters),
521 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy"),
522 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
523 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100524 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000525 << BatchNormalizationLayer(
526 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
527 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
528 get_random_accessor(1.f, 1.f),
529 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100530 0.001f)
531 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
532 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000533 << ConvolutionLayer(
534 7U, 1U, std::get<2>(c_filters),
535 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy"),
536 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
537 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100538 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000539 << BatchNormalizationLayer(
540 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
541 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
542 get_random_accessor(1.f, 1.f),
543 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100544 0.001f)
545 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
546 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000547 << ConvolutionLayer(
548 1U, 7U, std::get<3>(c_filters),
549 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy"),
550 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
551 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100552 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000553 << BatchNormalizationLayer(
554 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
555 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
556 get_random_accessor(1.f, 1.f),
557 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100558 0.001f)
559 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
560 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000561 << ConvolutionLayer(
562 7U, 1U, std::get<4>(c_filters),
563 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy"),
564 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
565 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100566 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000567 << BatchNormalizationLayer(
568 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
569 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
570 get_random_accessor(1.f, 1.f),
571 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100572 0.001f)
573 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
574 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000575
Jenkinsb3a371b2018-05-23 11:36:53 +0100576 SubStream i_d(graph);
577 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000578 << ConvolutionLayer(
579 1U, 1U, d_filt,
580 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
581 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
582 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100583 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000584 << BatchNormalizationLayer(
585 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
586 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
587 get_random_accessor(1.f, 1.f),
588 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100589 0.001f)
590 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
591 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000592
593 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
594 }
595
596 BranchLayer get_inception_node_D(const std::string &data_path, std::string &&param_path,
597 std::tuple<unsigned int, unsigned int> a_filters,
598 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
599 {
600 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100601 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000602 i_a << ConvolutionLayer(
603 1U, 1U, std::get<0>(a_filters),
604 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
605 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
606 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100607 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000608 << BatchNormalizationLayer(
609 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
610 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
611 get_random_accessor(1.f, 1.f),
612 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100613 0.001f)
614 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
615 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000616 << ConvolutionLayer(
617 3U, 3U, std::get<1>(a_filters),
618 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy"),
619 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
620 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100621 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000622 << BatchNormalizationLayer(
623 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
624 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
625 get_random_accessor(1.f, 1.f),
626 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100627 0.001f)
628 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
629 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000630
Jenkinsb3a371b2018-05-23 11:36:53 +0100631 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000632 i_b << ConvolutionLayer(
633 1U, 1U, std::get<0>(b_filters),
634 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
635 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
636 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100637 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000638 << BatchNormalizationLayer(
639 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
640 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
641 get_random_accessor(1.f, 1.f),
642 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100643 0.001f)
644 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
645 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000646 << ConvolutionLayer(
647 7U, 1U, std::get<1>(b_filters),
648 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"),
649 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
650 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100651 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000652 << BatchNormalizationLayer(
653 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
654 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
655 get_random_accessor(1.f, 1.f),
656 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100657 0.001f)
658 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
659 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000660 << ConvolutionLayer(
661 1U, 7U, std::get<2>(b_filters),
662 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"),
663 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
664 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100665 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000666 << BatchNormalizationLayer(
667 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
668 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
669 get_random_accessor(1.f, 1.f),
670 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100671 0.001f)
672 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
673 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000674 << ConvolutionLayer(
675 3U, 3U, std::get<3>(b_filters),
676 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy"),
677 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
678 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100679 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000680 << BatchNormalizationLayer(
681 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
682 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
683 get_random_accessor(1.f, 1.f),
684 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100685 0.001f)
686 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
687 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000688
Jenkinsb3a371b2018-05-23 11:36:53 +0100689 SubStream i_c(graph);
690 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000691
692 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
693 }
694
695 BranchLayer get_inception_node_E(const std::string &data_path, std::string &&param_path,
696 unsigned int a_filt,
697 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
698 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
699 unsigned int d_filt,
700 bool is_name_different = false)
701 {
702 // This is due to a naming issue in the tf model
703 std::string conv_id = "_0b_";
704 if(is_name_different)
705 {
706 conv_id = "_0c_";
707 }
708
709 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100710 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000711 i_a << ConvolutionLayer(
712 1U, 1U, a_filt,
713 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"),
714 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
715 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100716 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000717 << BatchNormalizationLayer(
718 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
719 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
720 get_random_accessor(1.f, 1.f),
721 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100722 0.001f)
723 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
724 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000725
Jenkinsb3a371b2018-05-23 11:36:53 +0100726 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000727 i_b << ConvolutionLayer(
728 1U, 1U, std::get<0>(b_filters),
729 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"),
730 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
731 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100732 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000733 << BatchNormalizationLayer(
734 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
735 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
736 get_random_accessor(1.f, 1.f),
737 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100738 0.001f)
739 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
740 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000741
Jenkinsb3a371b2018-05-23 11:36:53 +0100742 SubStream i_b1(static_cast<IStream &>(i_b));
743 i_b1 << ConvolutionLayer(
744 3U, 1U, std::get<1>(b_filters),
745 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000746 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
747 PadStrideInfo(1, 1, 1, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100748 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000749 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100750 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
751 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000752 get_random_accessor(1.f, 1.f),
Jenkinsb3a371b2018-05-23 11:36:53 +0100753 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
754 0.001f)
755 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
756 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000757
Jenkinsb3a371b2018-05-23 11:36:53 +0100758 SubStream i_b2(static_cast<IStream &>(i_b));
759 i_b2 << ConvolutionLayer(
760 1U, 3U, std::get<2>(b_filters),
761 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000762 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
763 PadStrideInfo(1, 1, 0, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100764 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000765 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100766 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
767 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000768 get_random_accessor(1.f, 1.f),
Jenkinsb3a371b2018-05-23 11:36:53 +0100769 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
770 0.001f)
771 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
772 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000773
Jenkinsb3a371b2018-05-23 11:36:53 +0100774 // Merge b1 and b2
775 i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
776
777 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000778 i_c << ConvolutionLayer(
779 1U, 1U, std::get<0>(c_filters),
780 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"),
781 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
782 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100783 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000784 << BatchNormalizationLayer(
785 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
786 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
787 get_random_accessor(1.f, 1.f),
788 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100789 0.001f)
790 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
791 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000792 << ConvolutionLayer(
793 3U, 3U, std::get<1>(c_filters),
794 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"),
795 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
796 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100797 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000798 << BatchNormalizationLayer(
799 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
800 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
801 get_random_accessor(1.f, 1.f),
802 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100803 0.001f)
804 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
805 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000806
Jenkinsb3a371b2018-05-23 11:36:53 +0100807 SubStream i_c1(static_cast<IStream &>(i_c));
808 i_c1 << ConvolutionLayer(
809 3U, 1U, std::get<2>(c_filters),
810 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy"),
811 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
812 PadStrideInfo(1, 1, 1, 0))
813 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
814 << BatchNormalizationLayer(
815 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
816 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
817 get_random_accessor(1.f, 1.f),
818 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
819 0.001f)
820 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
821 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
822
823 SubStream i_c2(static_cast<IStream &>(i_c));
824 i_c2 << ConvolutionLayer(
825 1U, 3U, std::get<3>(c_filters),
826 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy"),
827 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
828 PadStrideInfo(1, 1, 0, 1))
829 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
830 << BatchNormalizationLayer(
831 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
832 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
833 get_random_accessor(1.f, 1.f),
834 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
835 0.001f)
836 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
837 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
838
839 // Merge i_c1 and i_c2
840 i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
841
842 SubStream i_d(graph);
843 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000844 << ConvolutionLayer(
845 1U, 1U, d_filt,
846 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"),
847 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
848 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100849 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000850 << BatchNormalizationLayer(
851 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
852 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
853 get_random_accessor(1.f, 1.f),
854 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100855 0.001f)
856 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
857 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000858
859 return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
860 }
861};
862
863/** Main program for Inception V3
864 *
865 * @param[in] argc Number of arguments
Jenkinsb3a371b2018-05-23 11:36:53 +0100866 * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL, 2 = OpenCL with Tuner), [optional] Path to the weights folder, [optional] image, [optional] labels, [optional] Fast math for convolution layer (0 = DISABLED, 1 = ENABLED) )
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000867 */
868int main(int argc, char **argv)
869{
870 return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
871}