blob: 517e4920cbf5c20da03b6aabb1511135cece85a8 [file] [log] [blame]
Anthony Barbierf45d5a92018-01-24 16:23:15 +00001/*
2 * Copyright (c) 2017-2018 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Jenkinsb3a371b2018-05-23 11:36:53 +010024#include "arm_compute/graph.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000025#include "support/ToolchainSupport.h"
Jenkins52ba29e2018-08-29 15:32:11 +000026#include "utils/CommonGraphOptions.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000027#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
Anthony Barbierf45d5a92018-01-24 16:23:15 +000030using namespace arm_compute::utils;
Jenkinsb3a371b2018-05-23 11:36:53 +010031using namespace arm_compute::graph::frontend;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000032using namespace arm_compute::graph_utils;
33
Jenkinsb9abeae2018-11-22 11:58:08 +000034/** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API */
Jenkinsb3a371b2018-05-23 11:36:53 +010035class InceptionV3Example : public Example
Anthony Barbierf45d5a92018-01-24 16:23:15 +000036{
37public:
Jenkins52ba29e2018-08-29 15:32:11 +000038 InceptionV3Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000040 {
Jenkins52ba29e2018-08-29 15:32:11 +000041 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
46
47 // Consume common parameters
48 common_params = consume_common_graph_parameters(common_opts);
49
50 // Return when help menu is requested
51 if(common_params.help)
52 {
53 cmd_parser.print_help(argv[0]);
54 return false;
55 }
56
Jenkins52ba29e2018-08-29 15:32:11 +000057 // Checks
58 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Jenkins52ba29e2018-08-29 15:32:11 +000059
60 // Print parameter values
61 std::cout << common_params << std::endl;
62
63 // Get trainable parameters data path
64 std::string data_path = common_params.data_path;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000065
Anthony Barbier06ea0482018-02-22 15:45:35 +000066 // Create a preprocessor object
67 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
Anthony Barbierf45d5a92018-01-24 16:23:15 +000068
Jenkins52ba29e2018-08-29 15:32:11 +000069 // Create input descriptor
70 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
71 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
Anthony Barbierf45d5a92018-01-24 16:23:15 +000072
Jenkins52ba29e2018-08-29 15:32:11 +000073 // Set weights trained layout
74 const DataLayout weights_layout = DataLayout::NCHW;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000075
Jenkins52ba29e2018-08-29 15:32:11 +000076 graph << common_params.target
77 << common_params.fast_math_hint
78 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
Anthony Barbierf45d5a92018-01-24 16:23:15 +000079 << ConvolutionLayer(3U, 3U, 32U,
Jenkins52ba29e2018-08-29 15:32:11 +000080 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +000081 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +010082 .set_name("Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000083 << BatchNormalizationLayer(get_weights_accessor(data_path,
84 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
85 get_weights_accessor(data_path,
86 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
87 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
88 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +010089 0.001f)
90 .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
91 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000092 << ConvolutionLayer(3U, 3U, 32U,
Jenkins52ba29e2018-08-29 15:32:11 +000093 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +000094 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +010095 .set_name("Conv2d_2a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000096 << BatchNormalizationLayer(get_weights_accessor(data_path,
97 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
98 get_weights_accessor(data_path,
99 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
100 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
101 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100102 0.001f)
103 .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
104 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000105
106 << ConvolutionLayer(3U, 3U, 64U,
Jenkins52ba29e2018-08-29 15:32:11 +0000107 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000108 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100109 .set_name("Conv2d_2b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000110 << BatchNormalizationLayer(get_weights_accessor(data_path,
111 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
112 get_weights_accessor(data_path,
113 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
114 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
115 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100116 0.001f)
117 .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
118 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000119
Jenkinsb3a371b2018-05-23 11:36:53 +0100120 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000121
122 << ConvolutionLayer(1U, 1U, 80U,
Jenkins52ba29e2018-08-29 15:32:11 +0000123 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000124 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100125 .set_name("Conv2d_3b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000126 << BatchNormalizationLayer(get_weights_accessor(data_path,
127 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
128 get_weights_accessor(data_path,
129 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
130 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
131 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100132 0.001f)
133 .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
134 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000135
136 << ConvolutionLayer(3U, 3U, 192U,
Jenkins52ba29e2018-08-29 15:32:11 +0000137 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000138 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100139 .set_name("Conv2d_4a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000140 << BatchNormalizationLayer(get_weights_accessor(data_path,
141 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
142 get_weights_accessor(data_path,
143 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
144 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
145 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100146 0.001f)
147 .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
148 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000149
Jenkinsb3a371b2018-05-23 11:36:53 +0100150 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000151
Jenkins52ba29e2018-08-29 15:32:11 +0000152 graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000153 32U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100154 .set_name("Mixed_5b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000155 graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000156 64U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100157 .set_name("Mixed_5c/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000158 graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000159 64U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100160 .set_name("Mixed_5d/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000161
Jenkins52ba29e2018-08-29 15:32:11 +0000162 graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000163
Jenkins52ba29e2018-08-29 15:32:11 +0000164 graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000165 std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100166 .set_name("Mixed_6b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000167 graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000168 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100169 .set_name("Mixed_6c/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000170 graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000171 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100172 .set_name("Mixed_6d/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000173 graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000174 std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100175 .set_name("Mixed_6e/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000176
Jenkins52ba29e2018-08-29 15:32:11 +0000177 graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000178 std::make_tuple(192U, 192U, 192U, 192U))
Jenkinsb3a371b2018-05-23 11:36:53 +0100179 .set_name("Mixed_7a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000180
Jenkins52ba29e2018-08-29 15:32:11 +0000181 graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000182 std::make_tuple(448U, 384U, 384U, 384U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100183 .set_name("Mixed_7b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000184 graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000185 std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100186 .set_name("Mixed_7c/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000187
Jenkinsb3a371b2018-05-23 11:36:53 +0100188 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000189 << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
Jenkins52ba29e2018-08-29 15:32:11 +0000190 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000191 get_weights_accessor(data_path,
192 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
193 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100194 .set_name("Logits/Conv2d_1c_1x1/convolution")
195 << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
196 << SoftmaxLayer().set_name("Predictions/Softmax")
Jenkins52ba29e2018-08-29 15:32:11 +0000197 << OutputLayer(get_output_accessor(common_params, 5));
Anthony Barbier06ea0482018-02-22 15:45:35 +0000198
Jenkinsb3a371b2018-05-23 11:36:53 +0100199 // Finalize graph
200 GraphConfig config;
Jenkins52ba29e2018-08-29 15:32:11 +0000201 config.num_threads = common_params.threads;
202 config.use_tuner = common_params.enable_tuner;
203 config.tuner_file = common_params.tuner_file;
204
205 graph.finalize(common_params.target, config);
206
207 return true;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000208 }
209
210 void do_run() override
211 {
212 graph.run();
213 }
214
215private:
Jenkins52ba29e2018-08-29 15:32:11 +0000216 CommandLineParser cmd_parser;
217 CommonGraphOptions common_opts;
218 CommonGraphParams common_params;
219 Stream graph;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000220
221private:
Jenkinsb9abeae2018-11-22 11:58:08 +0000222 ConcatLayer get_inception_node_A(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000223 unsigned int a_filt,
224 std::tuple<unsigned int, unsigned int> b_filters,
225 std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
226 unsigned int d_filt,
227 bool is_name_different = false)
228 {
229 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000230
231 // This is due to a naming issue in the tf model
232 std::string conv_id0 = "_0a_";
233 std::string conv_id1 = "2d_0b_";
234 if(is_name_different)
235 {
236 conv_id0 = "_0b_";
237 conv_id1 = "_1_0c_";
238 }
239
Jenkinsb3a371b2018-05-23 11:36:53 +0100240 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000241 i_a << ConvolutionLayer(
242 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000243 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000244 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
245 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100246 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000247 << BatchNormalizationLayer(
248 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
249 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
250 get_random_accessor(1.f, 1.f),
251 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100252 0.001f)
253 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
254 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000255
Jenkinsb3a371b2018-05-23 11:36:53 +0100256 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000257 i_b << ConvolutionLayer(
258 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000259 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000260 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
261 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100262 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000263 << BatchNormalizationLayer(
264 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
265 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
266 get_random_accessor(1.f, 1.f),
267 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100268 0.001f)
269 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
270 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000271 << ConvolutionLayer(
272 5U, 5U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000273 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000274 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
275 PadStrideInfo(1, 1, 2, 2))
Jenkinsb3a371b2018-05-23 11:36:53 +0100276 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000277 << BatchNormalizationLayer(
278 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
279 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
280 get_random_accessor(1.f, 1.f),
281 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100282 0.001f)
283 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
284 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000285
Jenkinsb3a371b2018-05-23 11:36:53 +0100286 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000287 i_c << ConvolutionLayer(
288 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000289 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000290 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
291 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100292 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000293 << BatchNormalizationLayer(
294 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
295 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
296 get_random_accessor(1.f, 1.f),
297 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100298 0.001f)
299 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
300 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000301 << ConvolutionLayer(
302 3U, 3U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000303 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000304 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
305 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100306 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000307 << BatchNormalizationLayer(
308 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
309 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
310 get_random_accessor(1.f, 1.f),
311 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100312 0.001f)
313 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
314 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000315 << ConvolutionLayer(
316 3U, 3U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000317 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000318 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
319 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100320 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000321 << BatchNormalizationLayer(
322 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
323 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
324 get_random_accessor(1.f, 1.f),
325 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100326 0.001f)
327 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
328 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000329
Jenkinsb3a371b2018-05-23 11:36:53 +0100330 SubStream i_d(graph);
331 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000332 << ConvolutionLayer(
333 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000334 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000335 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
336 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100337 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000338 << BatchNormalizationLayer(
339 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
340 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
341 get_random_accessor(1.f, 1.f),
342 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100343 0.001f)
344 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
345 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000346
Jenkinsb9abeae2018-11-22 11:58:08 +0000347 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000348 }
349
Jenkinsb9abeae2018-11-22 11:58:08 +0000350 ConcatLayer get_inception_node_B(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000351 unsigned int a_filt,
352 std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
353 {
354 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100355 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000356 i_a << ConvolutionLayer(
357 3U, 3U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000358 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000359 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
360 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100361 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000362 << BatchNormalizationLayer(
363 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
364 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
365 get_random_accessor(1.f, 1.f),
366 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100367 0.001f)
368 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
369 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000370
Jenkinsb3a371b2018-05-23 11:36:53 +0100371 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000372 i_b << ConvolutionLayer(
373 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000374 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000375 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
376 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100377 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000378 << BatchNormalizationLayer(
379 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
380 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
381 get_random_accessor(1.f, 1.f),
382 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100383 0.001f)
384 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
385 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000386 << ConvolutionLayer(
387 3U, 3U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000388 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000389 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
390 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100391 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000392 << BatchNormalizationLayer(
393 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
394 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
395 get_random_accessor(1.f, 1.f),
396 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100397 0.001f)
398 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
399 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000400 << ConvolutionLayer(
401 3U, 3U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000402 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000403 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
404 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100405 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000406 << BatchNormalizationLayer(
407 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
408 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
409 get_random_accessor(1.f, 1.f),
410 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100411 0.001f)
412 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
413 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000414
Jenkinsb3a371b2018-05-23 11:36:53 +0100415 SubStream i_c(graph);
416 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000417
Jenkinsb9abeae2018-11-22 11:58:08 +0000418 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000419 }
420
Jenkinsb9abeae2018-11-22 11:58:08 +0000421 ConcatLayer get_inception_node_C(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000422 unsigned int a_filt,
423 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
424 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
425 unsigned int d_filt)
426 {
427 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100428 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000429 i_a << ConvolutionLayer(
430 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000431 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000432 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
433 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100434 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000435 << BatchNormalizationLayer(
436 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
437 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
438 get_random_accessor(1.f, 1.f),
439 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100440 0.001f)
441 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
442 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000443
Jenkinsb3a371b2018-05-23 11:36:53 +0100444 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000445 i_b << ConvolutionLayer(
446 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000447 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000448 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
449 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100450 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000451 << BatchNormalizationLayer(
452 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
453 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
454 get_random_accessor(1.f, 1.f),
455 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100456 0.001f)
457 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
458 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000459 << ConvolutionLayer(
460 7U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000461 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000462 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
463 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100464 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000465 << BatchNormalizationLayer(
466 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
467 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
468 get_random_accessor(1.f, 1.f),
469 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100470 0.001f)
471 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
472 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000473 << ConvolutionLayer(
474 1U, 7U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000475 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000476 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
477 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100478 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000479 << BatchNormalizationLayer(
480 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
481 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
482 get_random_accessor(1.f, 1.f),
483 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100484 0.001f)
485 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
486 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000487
Jenkinsb3a371b2018-05-23 11:36:53 +0100488 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000489 i_c << ConvolutionLayer(
490 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000491 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000492 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
493 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100494 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000495 << BatchNormalizationLayer(
496 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
497 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
498 get_random_accessor(1.f, 1.f),
499 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100500 0.001f)
501 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
502 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000503 << ConvolutionLayer(
504 1U, 7U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000505 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000506 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
507 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100508 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000509 << BatchNormalizationLayer(
510 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
511 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
512 get_random_accessor(1.f, 1.f),
513 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100514 0.001f)
515 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
516 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000517 << ConvolutionLayer(
518 7U, 1U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000519 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000520 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
521 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100522 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000523 << BatchNormalizationLayer(
524 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
525 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
526 get_random_accessor(1.f, 1.f),
527 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100528 0.001f)
529 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
530 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000531 << ConvolutionLayer(
532 1U, 7U, std::get<3>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000533 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000534 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
535 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100536 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000537 << BatchNormalizationLayer(
538 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
539 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
540 get_random_accessor(1.f, 1.f),
541 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100542 0.001f)
543 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
544 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000545 << ConvolutionLayer(
546 7U, 1U, std::get<4>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000547 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000548 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
549 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100550 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000551 << BatchNormalizationLayer(
552 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
553 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
554 get_random_accessor(1.f, 1.f),
555 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100556 0.001f)
557 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
558 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000559
Jenkinsb3a371b2018-05-23 11:36:53 +0100560 SubStream i_d(graph);
561 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000562 << ConvolutionLayer(
563 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000564 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000565 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
566 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100567 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000568 << BatchNormalizationLayer(
569 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
570 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
571 get_random_accessor(1.f, 1.f),
572 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100573 0.001f)
574 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
575 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000576
Jenkinsb9abeae2018-11-22 11:58:08 +0000577 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000578 }
579
Jenkinsb9abeae2018-11-22 11:58:08 +0000580 ConcatLayer get_inception_node_D(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Jenkins52ba29e2018-08-29 15:32:11 +0000581 std::tuple<unsigned int, unsigned int> a_filters,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000582 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
583 {
584 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100585 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000586 i_a << ConvolutionLayer(
587 1U, 1U, std::get<0>(a_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000588 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000589 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
590 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100591 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000592 << BatchNormalizationLayer(
593 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
594 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
595 get_random_accessor(1.f, 1.f),
596 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100597 0.001f)
598 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
599 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000600 << ConvolutionLayer(
601 3U, 3U, std::get<1>(a_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000602 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000603 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
604 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100605 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000606 << BatchNormalizationLayer(
607 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
608 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
609 get_random_accessor(1.f, 1.f),
610 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100611 0.001f)
612 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
613 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000614
Jenkinsb3a371b2018-05-23 11:36:53 +0100615 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000616 i_b << ConvolutionLayer(
617 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000618 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000619 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
620 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100621 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000622 << BatchNormalizationLayer(
623 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
624 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
625 get_random_accessor(1.f, 1.f),
626 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100627 0.001f)
628 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
629 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000630 << ConvolutionLayer(
631 7U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000632 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000633 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
634 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100635 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000636 << BatchNormalizationLayer(
637 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
638 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
639 get_random_accessor(1.f, 1.f),
640 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100641 0.001f)
642 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
643 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000644 << ConvolutionLayer(
645 1U, 7U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000646 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000647 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
648 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100649 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000650 << BatchNormalizationLayer(
651 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
652 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
653 get_random_accessor(1.f, 1.f),
654 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100655 0.001f)
656 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
657 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000658 << ConvolutionLayer(
659 3U, 3U, std::get<3>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000660 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000661 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
662 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100663 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000664 << BatchNormalizationLayer(
665 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
666 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
667 get_random_accessor(1.f, 1.f),
668 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100669 0.001f)
670 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
671 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000672
Jenkinsb3a371b2018-05-23 11:36:53 +0100673 SubStream i_c(graph);
674 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000675
Jenkinsb9abeae2018-11-22 11:58:08 +0000676 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000677 }
678
Jenkinsb9abeae2018-11-22 11:58:08 +0000679 ConcatLayer get_inception_node_E(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000680 unsigned int a_filt,
681 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
682 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
683 unsigned int d_filt,
684 bool is_name_different = false)
685 {
686 // This is due to a naming issue in the tf model
687 std::string conv_id = "_0b_";
688 if(is_name_different)
689 {
690 conv_id = "_0c_";
691 }
692
693 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100694 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000695 i_a << ConvolutionLayer(
696 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000697 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000698 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
699 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100700 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000701 << BatchNormalizationLayer(
702 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
703 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
704 get_random_accessor(1.f, 1.f),
705 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100706 0.001f)
707 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
708 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000709
Jenkinsb3a371b2018-05-23 11:36:53 +0100710 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000711 i_b << ConvolutionLayer(
712 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000713 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000714 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
715 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100716 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000717 << BatchNormalizationLayer(
718 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
719 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
720 get_random_accessor(1.f, 1.f),
721 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100722 0.001f)
723 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
724 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000725
Jenkins52ba29e2018-08-29 15:32:11 +0000726 SubStream i_b1(i_b);
Jenkinsb3a371b2018-05-23 11:36:53 +0100727 i_b1 << ConvolutionLayer(
728 3U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000729 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000730 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
731 PadStrideInfo(1, 1, 1, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100732 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000733 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100734 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
735 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000736 get_random_accessor(1.f, 1.f),
Jenkinsb3a371b2018-05-23 11:36:53 +0100737 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
738 0.001f)
739 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
740 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000741
Jenkins52ba29e2018-08-29 15:32:11 +0000742 SubStream i_b2(i_b);
Jenkinsb3a371b2018-05-23 11:36:53 +0100743 i_b2 << ConvolutionLayer(
744 1U, 3U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000745 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000746 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
747 PadStrideInfo(1, 1, 0, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100748 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000749 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100750 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
751 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000752 get_random_accessor(1.f, 1.f),
Jenkinsb3a371b2018-05-23 11:36:53 +0100753 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
754 0.001f)
755 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
756 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000757
Jenkinsb3a371b2018-05-23 11:36:53 +0100758 // Merge b1 and b2
Jenkinsb9abeae2018-11-22 11:58:08 +0000759 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
Jenkinsb3a371b2018-05-23 11:36:53 +0100760
761 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000762 i_c << ConvolutionLayer(
763 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000764 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000765 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
766 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100767 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000768 << BatchNormalizationLayer(
769 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
770 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
771 get_random_accessor(1.f, 1.f),
772 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100773 0.001f)
774 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
775 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000776 << ConvolutionLayer(
777 3U, 3U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000778 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000779 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
780 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100781 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000782 << BatchNormalizationLayer(
783 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
784 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
785 get_random_accessor(1.f, 1.f),
786 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100787 0.001f)
788 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
789 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000790
Jenkins52ba29e2018-08-29 15:32:11 +0000791 SubStream i_c1(i_c);
Jenkinsb3a371b2018-05-23 11:36:53 +0100792 i_c1 << ConvolutionLayer(
793 3U, 1U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000794 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
Jenkinsb3a371b2018-05-23 11:36:53 +0100795 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
796 PadStrideInfo(1, 1, 1, 0))
797 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
798 << BatchNormalizationLayer(
799 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
800 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
801 get_random_accessor(1.f, 1.f),
802 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
803 0.001f)
804 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
805 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
806
Jenkins52ba29e2018-08-29 15:32:11 +0000807 SubStream i_c2(i_c);
Jenkinsb3a371b2018-05-23 11:36:53 +0100808 i_c2 << ConvolutionLayer(
809 1U, 3U, std::get<3>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000810 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout),
Jenkinsb3a371b2018-05-23 11:36:53 +0100811 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
812 PadStrideInfo(1, 1, 0, 1))
813 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
814 << BatchNormalizationLayer(
815 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
816 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
817 get_random_accessor(1.f, 1.f),
818 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
819 0.001f)
820 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
821 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
822
823 // Merge i_c1 and i_c2
Jenkinsb9abeae2018-11-22 11:58:08 +0000824 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
Jenkinsb3a371b2018-05-23 11:36:53 +0100825
826 SubStream i_d(graph);
827 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000828 << ConvolutionLayer(
829 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000830 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000831 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
832 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100833 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000834 << BatchNormalizationLayer(
835 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
836 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
837 get_random_accessor(1.f, 1.f),
838 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100839 0.001f)
840 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
841 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000842
Jenkinsb9abeae2018-11-22 11:58:08 +0000843 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000844 }
845};
846
847/** Main program for Inception V3
848 *
Jenkinsb9abeae2018-11-22 11:58:08 +0000849 * Model is based on:
850 * https://arxiv.org/abs/1512.00567
851 * "Rethinking the Inception Architecture for Computer Vision"
852 * Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens, Zbigniew Wojna
853 *
Jenkins514be652019-02-28 12:25:18 +0000854 * Provenance: download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz
855 *
Jenkins52ba29e2018-08-29 15:32:11 +0000856 * @note To list all the possible arguments execute the binary appended with the --help option
857 *
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000858 * @param[in] argc Number of arguments
Jenkins52ba29e2018-08-29 15:32:11 +0000859 * @param[in] argv Arguments
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000860 */
861int main(int argc, char **argv)
862{
863 return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
864}