blob: 0a1e312c1f0b68e9834ff27226e6bbae4504c13a [file] [log] [blame]
Anthony Barbierf45d5a92018-01-24 16:23:15 +00001/*
Jenkins4ba87db2019-05-23 17:11:51 +01002 * Copyright (c) 2017-2019 ARM Limited.
Anthony Barbierf45d5a92018-01-24 16:23:15 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Jenkinsb3a371b2018-05-23 11:36:53 +010024#include "arm_compute/graph.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000025#include "support/ToolchainSupport.h"
Jenkins52ba29e2018-08-29 15:32:11 +000026#include "utils/CommonGraphOptions.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000027#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
Anthony Barbierf45d5a92018-01-24 16:23:15 +000030using namespace arm_compute::utils;
Jenkinsb3a371b2018-05-23 11:36:53 +010031using namespace arm_compute::graph::frontend;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000032using namespace arm_compute::graph_utils;
33
Jenkinsb9abeae2018-11-22 11:58:08 +000034/** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API */
Jenkinsb3a371b2018-05-23 11:36:53 +010035class InceptionV3Example : public Example
Anthony Barbierf45d5a92018-01-24 16:23:15 +000036{
37public:
Jenkins52ba29e2018-08-29 15:32:11 +000038 InceptionV3Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000040 {
Jenkins52ba29e2018-08-29 15:32:11 +000041 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
46
47 // Consume common parameters
48 common_params = consume_common_graph_parameters(common_opts);
49
50 // Return when help menu is requested
51 if(common_params.help)
52 {
53 cmd_parser.print_help(argv[0]);
54 return false;
55 }
56
Jenkins52ba29e2018-08-29 15:32:11 +000057 // Checks
58 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Jenkins52ba29e2018-08-29 15:32:11 +000059
60 // Print parameter values
61 std::cout << common_params << std::endl;
62
63 // Get trainable parameters data path
64 std::string data_path = common_params.data_path;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000065
Anthony Barbier06ea0482018-02-22 15:45:35 +000066 // Create a preprocessor object
67 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
Anthony Barbierf45d5a92018-01-24 16:23:15 +000068
Jenkins52ba29e2018-08-29 15:32:11 +000069 // Create input descriptor
70 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
71 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
Anthony Barbierf45d5a92018-01-24 16:23:15 +000072
Jenkins52ba29e2018-08-29 15:32:11 +000073 // Set weights trained layout
74 const DataLayout weights_layout = DataLayout::NCHW;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000075
Jenkins52ba29e2018-08-29 15:32:11 +000076 graph << common_params.target
77 << common_params.fast_math_hint
78 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
Anthony Barbierf45d5a92018-01-24 16:23:15 +000079 << ConvolutionLayer(3U, 3U, 32U,
Jenkins52ba29e2018-08-29 15:32:11 +000080 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +000081 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +010082 .set_name("Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000083 << BatchNormalizationLayer(get_weights_accessor(data_path,
84 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
85 get_weights_accessor(data_path,
86 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
87 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
88 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +010089 0.001f)
90 .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
91 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000092 << ConvolutionLayer(3U, 3U, 32U,
Jenkins52ba29e2018-08-29 15:32:11 +000093 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +000094 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +010095 .set_name("Conv2d_2a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000096 << BatchNormalizationLayer(get_weights_accessor(data_path,
97 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
98 get_weights_accessor(data_path,
99 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
100 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
101 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100102 0.001f)
103 .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
104 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000105
106 << ConvolutionLayer(3U, 3U, 64U,
Jenkins52ba29e2018-08-29 15:32:11 +0000107 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000108 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100109 .set_name("Conv2d_2b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000110 << BatchNormalizationLayer(get_weights_accessor(data_path,
111 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
112 get_weights_accessor(data_path,
113 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
114 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
115 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100116 0.001f)
117 .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
118 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000119
Jenkinsb3a371b2018-05-23 11:36:53 +0100120 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000121
122 << ConvolutionLayer(1U, 1U, 80U,
Jenkins52ba29e2018-08-29 15:32:11 +0000123 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000124 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100125 .set_name("Conv2d_3b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000126 << BatchNormalizationLayer(get_weights_accessor(data_path,
127 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
128 get_weights_accessor(data_path,
129 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
130 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
131 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100132 0.001f)
133 .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
134 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000135
136 << ConvolutionLayer(3U, 3U, 192U,
Jenkins52ba29e2018-08-29 15:32:11 +0000137 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000138 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100139 .set_name("Conv2d_4a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000140 << BatchNormalizationLayer(get_weights_accessor(data_path,
141 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
142 get_weights_accessor(data_path,
143 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
144 get_random_accessor(1.f, 1.f), get_weights_accessor(data_path,
145 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100146 0.001f)
147 .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
148 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000149
Jenkinsb3a371b2018-05-23 11:36:53 +0100150 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000151
Jenkins52ba29e2018-08-29 15:32:11 +0000152 graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000153 32U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100154 .set_name("Mixed_5b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000155 graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000156 64U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100157 .set_name("Mixed_5c/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000158 graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000159 64U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100160 .set_name("Mixed_5d/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000161
Jenkins52ba29e2018-08-29 15:32:11 +0000162 graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000163
Jenkins52ba29e2018-08-29 15:32:11 +0000164 graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000165 std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100166 .set_name("Mixed_6b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000167 graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000168 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100169 .set_name("Mixed_6c/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000170 graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000171 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100172 .set_name("Mixed_6d/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000173 graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000174 std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100175 .set_name("Mixed_6e/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000176
Jenkins52ba29e2018-08-29 15:32:11 +0000177 graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000178 std::make_tuple(192U, 192U, 192U, 192U))
Jenkinsb3a371b2018-05-23 11:36:53 +0100179 .set_name("Mixed_7a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000180
Jenkins52ba29e2018-08-29 15:32:11 +0000181 graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000182 std::make_tuple(448U, 384U, 384U, 384U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100183 .set_name("Mixed_7b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000184 graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000185 std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100186 .set_name("Mixed_7c/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000187
Jenkinsb3a371b2018-05-23 11:36:53 +0100188 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000189 << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
Jenkins52ba29e2018-08-29 15:32:11 +0000190 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000191 get_weights_accessor(data_path,
192 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
193 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100194 .set_name("Logits/Conv2d_1c_1x1/convolution")
195 << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
196 << SoftmaxLayer().set_name("Predictions/Softmax")
Jenkins52ba29e2018-08-29 15:32:11 +0000197 << OutputLayer(get_output_accessor(common_params, 5));
Anthony Barbier06ea0482018-02-22 15:45:35 +0000198
Jenkinsb3a371b2018-05-23 11:36:53 +0100199 // Finalize graph
200 GraphConfig config;
Jenkins52ba29e2018-08-29 15:32:11 +0000201 config.num_threads = common_params.threads;
202 config.use_tuner = common_params.enable_tuner;
Jenkins4ba87db2019-05-23 17:11:51 +0100203 config.tuner_mode = common_params.tuner_mode;
Jenkins52ba29e2018-08-29 15:32:11 +0000204 config.tuner_file = common_params.tuner_file;
205
206 graph.finalize(common_params.target, config);
207
208 return true;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000209 }
210
211 void do_run() override
212 {
213 graph.run();
214 }
215
216private:
Jenkins52ba29e2018-08-29 15:32:11 +0000217 CommandLineParser cmd_parser;
218 CommonGraphOptions common_opts;
219 CommonGraphParams common_params;
220 Stream graph;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000221
222private:
Jenkinsb9abeae2018-11-22 11:58:08 +0000223 ConcatLayer get_inception_node_A(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000224 unsigned int a_filt,
225 std::tuple<unsigned int, unsigned int> b_filters,
226 std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
227 unsigned int d_filt,
228 bool is_name_different = false)
229 {
230 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000231
232 // This is due to a naming issue in the tf model
233 std::string conv_id0 = "_0a_";
234 std::string conv_id1 = "2d_0b_";
235 if(is_name_different)
236 {
237 conv_id0 = "_0b_";
238 conv_id1 = "_1_0c_";
239 }
240
Jenkinsb3a371b2018-05-23 11:36:53 +0100241 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000242 i_a << ConvolutionLayer(
243 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000244 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000245 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
246 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100247 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000248 << BatchNormalizationLayer(
249 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
250 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
251 get_random_accessor(1.f, 1.f),
252 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100253 0.001f)
254 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
255 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000256
Jenkinsb3a371b2018-05-23 11:36:53 +0100257 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000258 i_b << ConvolutionLayer(
259 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000260 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000261 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
262 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100263 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000264 << BatchNormalizationLayer(
265 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
266 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
267 get_random_accessor(1.f, 1.f),
268 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100269 0.001f)
270 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
271 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000272 << ConvolutionLayer(
273 5U, 5U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000274 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000275 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
276 PadStrideInfo(1, 1, 2, 2))
Jenkinsb3a371b2018-05-23 11:36:53 +0100277 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000278 << BatchNormalizationLayer(
279 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
280 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
281 get_random_accessor(1.f, 1.f),
282 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100283 0.001f)
284 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
285 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000286
Jenkinsb3a371b2018-05-23 11:36:53 +0100287 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000288 i_c << ConvolutionLayer(
289 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000290 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000291 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
292 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100293 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000294 << BatchNormalizationLayer(
295 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
296 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
297 get_random_accessor(1.f, 1.f),
298 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100299 0.001f)
300 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
301 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000302 << ConvolutionLayer(
303 3U, 3U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000304 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000305 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
306 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100307 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000308 << BatchNormalizationLayer(
309 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
310 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
311 get_random_accessor(1.f, 1.f),
312 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100313 0.001f)
314 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
315 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000316 << ConvolutionLayer(
317 3U, 3U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000318 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000319 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
320 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100321 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000322 << BatchNormalizationLayer(
323 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
324 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
325 get_random_accessor(1.f, 1.f),
326 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100327 0.001f)
328 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
329 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000330
Jenkinsb3a371b2018-05-23 11:36:53 +0100331 SubStream i_d(graph);
332 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000333 << ConvolutionLayer(
334 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000335 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000336 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
337 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100338 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000339 << BatchNormalizationLayer(
340 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
341 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
342 get_random_accessor(1.f, 1.f),
343 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100344 0.001f)
345 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
346 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000347
Jenkinsb9abeae2018-11-22 11:58:08 +0000348 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000349 }
350
Jenkinsb9abeae2018-11-22 11:58:08 +0000351 ConcatLayer get_inception_node_B(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000352 unsigned int a_filt,
353 std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
354 {
355 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100356 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000357 i_a << ConvolutionLayer(
358 3U, 3U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000359 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000360 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
361 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100362 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000363 << BatchNormalizationLayer(
364 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
365 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
366 get_random_accessor(1.f, 1.f),
367 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100368 0.001f)
369 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
370 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000371
Jenkinsb3a371b2018-05-23 11:36:53 +0100372 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000373 i_b << ConvolutionLayer(
374 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000375 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000376 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
377 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100378 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000379 << BatchNormalizationLayer(
380 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
381 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
382 get_random_accessor(1.f, 1.f),
383 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100384 0.001f)
385 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
386 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000387 << ConvolutionLayer(
388 3U, 3U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000389 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000390 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
391 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100392 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000393 << BatchNormalizationLayer(
394 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
395 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
396 get_random_accessor(1.f, 1.f),
397 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100398 0.001f)
399 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
400 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000401 << ConvolutionLayer(
402 3U, 3U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000403 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000404 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
405 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100406 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000407 << BatchNormalizationLayer(
408 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
409 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
410 get_random_accessor(1.f, 1.f),
411 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100412 0.001f)
413 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
414 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000415
Jenkinsb3a371b2018-05-23 11:36:53 +0100416 SubStream i_c(graph);
417 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000418
Jenkinsb9abeae2018-11-22 11:58:08 +0000419 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000420 }
421
Jenkinsb9abeae2018-11-22 11:58:08 +0000422 ConcatLayer get_inception_node_C(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000423 unsigned int a_filt,
424 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
425 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
426 unsigned int d_filt)
427 {
428 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100429 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000430 i_a << ConvolutionLayer(
431 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000432 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000433 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
434 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100435 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000436 << BatchNormalizationLayer(
437 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
438 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
439 get_random_accessor(1.f, 1.f),
440 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100441 0.001f)
442 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
443 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000444
Jenkinsb3a371b2018-05-23 11:36:53 +0100445 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000446 i_b << ConvolutionLayer(
447 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000448 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000449 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
450 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100451 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000452 << BatchNormalizationLayer(
453 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
454 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
455 get_random_accessor(1.f, 1.f),
456 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100457 0.001f)
458 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
459 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000460 << ConvolutionLayer(
461 7U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000462 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000463 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
464 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100465 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000466 << BatchNormalizationLayer(
467 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
468 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
469 get_random_accessor(1.f, 1.f),
470 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100471 0.001f)
472 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
473 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000474 << ConvolutionLayer(
475 1U, 7U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000476 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000477 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
478 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100479 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000480 << BatchNormalizationLayer(
481 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
482 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
483 get_random_accessor(1.f, 1.f),
484 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100485 0.001f)
486 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
487 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000488
Jenkinsb3a371b2018-05-23 11:36:53 +0100489 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000490 i_c << ConvolutionLayer(
491 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000492 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000493 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
494 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100495 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000496 << BatchNormalizationLayer(
497 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
498 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
499 get_random_accessor(1.f, 1.f),
500 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100501 0.001f)
502 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
503 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000504 << ConvolutionLayer(
505 1U, 7U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000506 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000507 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
508 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100509 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000510 << BatchNormalizationLayer(
511 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
512 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
513 get_random_accessor(1.f, 1.f),
514 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100515 0.001f)
516 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
517 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000518 << ConvolutionLayer(
519 7U, 1U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000520 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000521 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
522 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100523 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000524 << BatchNormalizationLayer(
525 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
526 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
527 get_random_accessor(1.f, 1.f),
528 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100529 0.001f)
530 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
531 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000532 << ConvolutionLayer(
533 1U, 7U, std::get<3>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000534 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000535 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
536 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100537 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000538 << BatchNormalizationLayer(
539 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
540 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
541 get_random_accessor(1.f, 1.f),
542 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100543 0.001f)
544 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
545 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000546 << ConvolutionLayer(
547 7U, 1U, std::get<4>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000548 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000549 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
550 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100551 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000552 << BatchNormalizationLayer(
553 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
554 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
555 get_random_accessor(1.f, 1.f),
556 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100557 0.001f)
558 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
559 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000560
Jenkinsb3a371b2018-05-23 11:36:53 +0100561 SubStream i_d(graph);
562 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000563 << ConvolutionLayer(
564 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000565 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000566 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
567 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100568 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000569 << BatchNormalizationLayer(
570 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
571 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
572 get_random_accessor(1.f, 1.f),
573 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100574 0.001f)
575 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
576 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000577
Jenkinsb9abeae2018-11-22 11:58:08 +0000578 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000579 }
580
Jenkinsb9abeae2018-11-22 11:58:08 +0000581 ConcatLayer get_inception_node_D(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Jenkins52ba29e2018-08-29 15:32:11 +0000582 std::tuple<unsigned int, unsigned int> a_filters,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000583 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
584 {
585 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100586 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000587 i_a << ConvolutionLayer(
588 1U, 1U, std::get<0>(a_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000589 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000590 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
591 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100592 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000593 << BatchNormalizationLayer(
594 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
595 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
596 get_random_accessor(1.f, 1.f),
597 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100598 0.001f)
599 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
600 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000601 << ConvolutionLayer(
602 3U, 3U, std::get<1>(a_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000603 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000604 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
605 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100606 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000607 << BatchNormalizationLayer(
608 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
609 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
610 get_random_accessor(1.f, 1.f),
611 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100612 0.001f)
613 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
614 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000615
Jenkinsb3a371b2018-05-23 11:36:53 +0100616 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000617 i_b << ConvolutionLayer(
618 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000619 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000620 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
621 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100622 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000623 << BatchNormalizationLayer(
624 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
625 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
626 get_random_accessor(1.f, 1.f),
627 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100628 0.001f)
629 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
630 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000631 << ConvolutionLayer(
632 7U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000633 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000634 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
635 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100636 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000637 << BatchNormalizationLayer(
638 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
639 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
640 get_random_accessor(1.f, 1.f),
641 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100642 0.001f)
643 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
644 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000645 << ConvolutionLayer(
646 1U, 7U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000647 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000648 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
649 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100650 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000651 << BatchNormalizationLayer(
652 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
653 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
654 get_random_accessor(1.f, 1.f),
655 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100656 0.001f)
657 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
658 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000659 << ConvolutionLayer(
660 3U, 3U, std::get<3>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000661 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000662 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
663 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100664 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000665 << BatchNormalizationLayer(
666 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
667 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
668 get_random_accessor(1.f, 1.f),
669 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100670 0.001f)
671 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
672 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000673
Jenkinsb3a371b2018-05-23 11:36:53 +0100674 SubStream i_c(graph);
675 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000676
Jenkinsb9abeae2018-11-22 11:58:08 +0000677 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000678 }
679
Jenkinsb9abeae2018-11-22 11:58:08 +0000680 ConcatLayer get_inception_node_E(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000681 unsigned int a_filt,
682 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
683 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
684 unsigned int d_filt,
685 bool is_name_different = false)
686 {
687 // This is due to a naming issue in the tf model
688 std::string conv_id = "_0b_";
689 if(is_name_different)
690 {
691 conv_id = "_0c_";
692 }
693
694 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100695 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000696 i_a << ConvolutionLayer(
697 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000698 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000699 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
700 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100701 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000702 << BatchNormalizationLayer(
703 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
704 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
705 get_random_accessor(1.f, 1.f),
706 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100707 0.001f)
708 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
709 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000710
Jenkinsb3a371b2018-05-23 11:36:53 +0100711 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000712 i_b << ConvolutionLayer(
713 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000714 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000715 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
716 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100717 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000718 << BatchNormalizationLayer(
719 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
720 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
721 get_random_accessor(1.f, 1.f),
722 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100723 0.001f)
724 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
725 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000726
Jenkins52ba29e2018-08-29 15:32:11 +0000727 SubStream i_b1(i_b);
Jenkinsb3a371b2018-05-23 11:36:53 +0100728 i_b1 << ConvolutionLayer(
729 3U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000730 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000731 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
732 PadStrideInfo(1, 1, 1, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100733 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000734 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100735 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
736 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000737 get_random_accessor(1.f, 1.f),
Jenkinsb3a371b2018-05-23 11:36:53 +0100738 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
739 0.001f)
740 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
741 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000742
Jenkins52ba29e2018-08-29 15:32:11 +0000743 SubStream i_b2(i_b);
Jenkinsb3a371b2018-05-23 11:36:53 +0100744 i_b2 << ConvolutionLayer(
745 1U, 3U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000746 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000747 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
748 PadStrideInfo(1, 1, 0, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100749 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000750 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100751 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
752 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000753 get_random_accessor(1.f, 1.f),
Jenkinsb3a371b2018-05-23 11:36:53 +0100754 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
755 0.001f)
756 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
757 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000758
Jenkinsb3a371b2018-05-23 11:36:53 +0100759 // Merge b1 and b2
Jenkinsb9abeae2018-11-22 11:58:08 +0000760 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
Jenkinsb3a371b2018-05-23 11:36:53 +0100761
762 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000763 i_c << ConvolutionLayer(
764 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000765 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000766 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
767 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100768 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000769 << BatchNormalizationLayer(
770 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
771 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
772 get_random_accessor(1.f, 1.f),
773 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100774 0.001f)
775 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
776 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000777 << ConvolutionLayer(
778 3U, 3U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000779 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000780 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
781 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100782 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000783 << BatchNormalizationLayer(
784 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
785 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
786 get_random_accessor(1.f, 1.f),
787 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100788 0.001f)
789 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
790 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000791
Jenkins52ba29e2018-08-29 15:32:11 +0000792 SubStream i_c1(i_c);
Jenkinsb3a371b2018-05-23 11:36:53 +0100793 i_c1 << ConvolutionLayer(
794 3U, 1U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000795 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
Jenkinsb3a371b2018-05-23 11:36:53 +0100796 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
797 PadStrideInfo(1, 1, 1, 0))
798 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
799 << BatchNormalizationLayer(
800 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
801 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
802 get_random_accessor(1.f, 1.f),
803 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
804 0.001f)
805 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
806 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
807
Jenkins52ba29e2018-08-29 15:32:11 +0000808 SubStream i_c2(i_c);
Jenkinsb3a371b2018-05-23 11:36:53 +0100809 i_c2 << ConvolutionLayer(
810 1U, 3U, std::get<3>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000811 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout),
Jenkinsb3a371b2018-05-23 11:36:53 +0100812 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
813 PadStrideInfo(1, 1, 0, 1))
814 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
815 << BatchNormalizationLayer(
816 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
817 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
818 get_random_accessor(1.f, 1.f),
819 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
820 0.001f)
821 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
822 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
823
824 // Merge i_c1 and i_c2
Jenkinsb9abeae2018-11-22 11:58:08 +0000825 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
Jenkinsb3a371b2018-05-23 11:36:53 +0100826
827 SubStream i_d(graph);
828 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000829 << ConvolutionLayer(
830 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000831 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000832 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
833 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100834 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000835 << BatchNormalizationLayer(
836 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
837 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
838 get_random_accessor(1.f, 1.f),
839 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100840 0.001f)
841 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
842 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000843
Jenkinsb9abeae2018-11-22 11:58:08 +0000844 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000845 }
846};
847
848/** Main program for Inception V3
849 *
Jenkinsb9abeae2018-11-22 11:58:08 +0000850 * Model is based on:
851 * https://arxiv.org/abs/1512.00567
852 * "Rethinking the Inception Architecture for Computer Vision"
853 * Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens, Zbigniew Wojna
854 *
Jenkins514be652019-02-28 12:25:18 +0000855 * Provenance: download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz
856 *
Jenkins52ba29e2018-08-29 15:32:11 +0000857 * @note To list all the possible arguments execute the binary appended with the --help option
858 *
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000859 * @param[in] argc Number of arguments
Jenkins52ba29e2018-08-29 15:32:11 +0000860 * @param[in] argv Arguments
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000861 */
862int main(int argc, char **argv)
863{
864 return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
865}