blob: 1de6a5fad75b11ab67bc5812750bae7db0c4d91a [file] [log] [blame]
Anthony Barbierf45d5a92018-01-24 16:23:15 +00001/*
Jenkins4ba87db2019-05-23 17:11:51 +01002 * Copyright (c) 2017-2019 ARM Limited.
Anthony Barbierf45d5a92018-01-24 16:23:15 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Jenkinsb3a371b2018-05-23 11:36:53 +010024#include "arm_compute/graph.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000025#include "support/ToolchainSupport.h"
Jenkins52ba29e2018-08-29 15:32:11 +000026#include "utils/CommonGraphOptions.h"
Anthony Barbierf45d5a92018-01-24 16:23:15 +000027#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
Anthony Barbierf45d5a92018-01-24 16:23:15 +000030using namespace arm_compute::utils;
Jenkinsb3a371b2018-05-23 11:36:53 +010031using namespace arm_compute::graph::frontend;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000032using namespace arm_compute::graph_utils;
33
Jenkinsb9abeae2018-11-22 11:58:08 +000034/** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API */
Jenkinsb3a371b2018-05-23 11:36:53 +010035class InceptionV3Example : public Example
Anthony Barbierf45d5a92018-01-24 16:23:15 +000036{
37public:
Jenkins52ba29e2018-08-29 15:32:11 +000038 InceptionV3Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000040 {
Jenkins52ba29e2018-08-29 15:32:11 +000041 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
Jenkins0e205f72019-11-28 16:53:35 +000046 cmd_parser.validate();
Jenkins52ba29e2018-08-29 15:32:11 +000047
48 // Consume common parameters
49 common_params = consume_common_graph_parameters(common_opts);
50
51 // Return when help menu is requested
52 if(common_params.help)
53 {
54 cmd_parser.print_help(argv[0]);
55 return false;
56 }
57
Jenkins52ba29e2018-08-29 15:32:11 +000058 // Checks
59 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Jenkins52ba29e2018-08-29 15:32:11 +000060
61 // Print parameter values
62 std::cout << common_params << std::endl;
63
64 // Get trainable parameters data path
65 std::string data_path = common_params.data_path;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000066
Anthony Barbier06ea0482018-02-22 15:45:35 +000067 // Create a preprocessor object
68 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
Anthony Barbierf45d5a92018-01-24 16:23:15 +000069
Jenkins52ba29e2018-08-29 15:32:11 +000070 // Create input descriptor
71 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
72 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
Anthony Barbierf45d5a92018-01-24 16:23:15 +000073
Jenkins52ba29e2018-08-29 15:32:11 +000074 // Set weights trained layout
75 const DataLayout weights_layout = DataLayout::NCHW;
Anthony Barbierf45d5a92018-01-24 16:23:15 +000076
Jenkins52ba29e2018-08-29 15:32:11 +000077 graph << common_params.target
78 << common_params.fast_math_hint
79 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
Anthony Barbierf45d5a92018-01-24 16:23:15 +000080 << ConvolutionLayer(3U, 3U, 32U,
Jenkins52ba29e2018-08-29 15:32:11 +000081 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +000082 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +010083 .set_name("Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000084 << BatchNormalizationLayer(get_weights_accessor(data_path,
85 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
86 get_weights_accessor(data_path,
87 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +000088 nullptr, get_weights_accessor(data_path,
89 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +010090 0.001f)
91 .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
92 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000093 << ConvolutionLayer(3U, 3U, 32U,
Jenkins52ba29e2018-08-29 15:32:11 +000094 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +000095 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +010096 .set_name("Conv2d_2a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +000097 << BatchNormalizationLayer(get_weights_accessor(data_path,
98 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
99 get_weights_accessor(data_path,
100 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000101 nullptr, get_weights_accessor(data_path,
102 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100103 0.001f)
104 .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
105 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000106
107 << ConvolutionLayer(3U, 3U, 64U,
Jenkins52ba29e2018-08-29 15:32:11 +0000108 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000109 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100110 .set_name("Conv2d_2b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000111 << BatchNormalizationLayer(get_weights_accessor(data_path,
112 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
113 get_weights_accessor(data_path,
114 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000115 nullptr, get_weights_accessor(data_path,
116 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100117 0.001f)
118 .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
119 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000120
Jenkinsb3a371b2018-05-23 11:36:53 +0100121 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000122
123 << ConvolutionLayer(1U, 1U, 80U,
Jenkins52ba29e2018-08-29 15:32:11 +0000124 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000125 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100126 .set_name("Conv2d_3b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000127 << BatchNormalizationLayer(get_weights_accessor(data_path,
128 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
129 get_weights_accessor(data_path,
130 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000131 nullptr, get_weights_accessor(data_path,
132 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100133 0.001f)
134 .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
135 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000136
137 << ConvolutionLayer(3U, 3U, 192U,
Jenkins52ba29e2018-08-29 15:32:11 +0000138 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000139 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100140 .set_name("Conv2d_4a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000141 << BatchNormalizationLayer(get_weights_accessor(data_path,
142 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
143 get_weights_accessor(data_path,
144 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000145 nullptr, get_weights_accessor(data_path,
146 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100147 0.001f)
148 .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
149 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000150
Jenkinsb3a371b2018-05-23 11:36:53 +0100151 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000152
Jenkins52ba29e2018-08-29 15:32:11 +0000153 graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000154 32U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100155 .set_name("Mixed_5b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000156 graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000157 64U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100158 .set_name("Mixed_5c/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000159 graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000160 64U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100161 .set_name("Mixed_5d/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000162
Jenkins52ba29e2018-08-29 15:32:11 +0000163 graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000164
Jenkins52ba29e2018-08-29 15:32:11 +0000165 graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000166 std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100167 .set_name("Mixed_6b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000168 graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000169 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100170 .set_name("Mixed_6c/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000171 graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000172 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100173 .set_name("Mixed_6d/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000174 graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000175 std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100176 .set_name("Mixed_6e/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000177
Jenkins52ba29e2018-08-29 15:32:11 +0000178 graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000179 std::make_tuple(192U, 192U, 192U, 192U))
Jenkinsb3a371b2018-05-23 11:36:53 +0100180 .set_name("Mixed_7a/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000181
Jenkins52ba29e2018-08-29 15:32:11 +0000182 graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000183 std::make_tuple(448U, 384U, 384U, 384U), 192U)
Jenkinsb3a371b2018-05-23 11:36:53 +0100184 .set_name("Mixed_7b/concat");
Jenkins52ba29e2018-08-29 15:32:11 +0000185 graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000186 std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
Jenkinsb3a371b2018-05-23 11:36:53 +0100187 .set_name("Mixed_7c/concat");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000188
Jenkinsb3a371b2018-05-23 11:36:53 +0100189 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000190 << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
Jenkins52ba29e2018-08-29 15:32:11 +0000191 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000192 get_weights_accessor(data_path,
193 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
194 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100195 .set_name("Logits/Conv2d_1c_1x1/convolution")
196 << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
197 << SoftmaxLayer().set_name("Predictions/Softmax")
Jenkins52ba29e2018-08-29 15:32:11 +0000198 << OutputLayer(get_output_accessor(common_params, 5));
Anthony Barbier06ea0482018-02-22 15:45:35 +0000199
Jenkinsb3a371b2018-05-23 11:36:53 +0100200 // Finalize graph
201 GraphConfig config;
Jenkins52ba29e2018-08-29 15:32:11 +0000202 config.num_threads = common_params.threads;
203 config.use_tuner = common_params.enable_tuner;
Jenkins4ba87db2019-05-23 17:11:51 +0100204 config.tuner_mode = common_params.tuner_mode;
Jenkins52ba29e2018-08-29 15:32:11 +0000205 config.tuner_file = common_params.tuner_file;
206
207 graph.finalize(common_params.target, config);
208
209 return true;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000210 }
211
212 void do_run() override
213 {
214 graph.run();
215 }
216
217private:
Jenkins52ba29e2018-08-29 15:32:11 +0000218 CommandLineParser cmd_parser;
219 CommonGraphOptions common_opts;
220 CommonGraphParams common_params;
221 Stream graph;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000222
223private:
Jenkinsb9abeae2018-11-22 11:58:08 +0000224 ConcatLayer get_inception_node_A(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000225 unsigned int a_filt,
226 std::tuple<unsigned int, unsigned int> b_filters,
227 std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
228 unsigned int d_filt,
229 bool is_name_different = false)
230 {
231 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000232
233 // This is due to a naming issue in the tf model
234 std::string conv_id0 = "_0a_";
235 std::string conv_id1 = "2d_0b_";
236 if(is_name_different)
237 {
238 conv_id0 = "_0b_";
239 conv_id1 = "_1_0c_";
240 }
241
Jenkinsb3a371b2018-05-23 11:36:53 +0100242 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000243 i_a << ConvolutionLayer(
244 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000245 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000246 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
247 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100248 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000249 << BatchNormalizationLayer(
250 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
251 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000252 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000253 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100254 0.001f)
255 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
256 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000257
Jenkinsb3a371b2018-05-23 11:36:53 +0100258 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000259 i_b << ConvolutionLayer(
260 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000261 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000262 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
263 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100264 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000265 << BatchNormalizationLayer(
266 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
267 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000268 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000269 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100270 0.001f)
271 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
272 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000273 << ConvolutionLayer(
274 5U, 5U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000275 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000276 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
277 PadStrideInfo(1, 1, 2, 2))
Jenkinsb3a371b2018-05-23 11:36:53 +0100278 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000279 << BatchNormalizationLayer(
280 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
281 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000282 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000283 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100284 0.001f)
285 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
286 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000287
Jenkinsb3a371b2018-05-23 11:36:53 +0100288 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000289 i_c << ConvolutionLayer(
290 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000291 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000292 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
293 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100294 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000295 << BatchNormalizationLayer(
296 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
297 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000298 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000299 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100300 0.001f)
301 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
302 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000303 << ConvolutionLayer(
304 3U, 3U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000305 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000306 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
307 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100308 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000309 << BatchNormalizationLayer(
310 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
311 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000312 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000313 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100314 0.001f)
315 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
316 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000317 << ConvolutionLayer(
318 3U, 3U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000319 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000320 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
321 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100322 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000323 << BatchNormalizationLayer(
324 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
325 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000326 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000327 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100328 0.001f)
329 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
330 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000331
Jenkinsb3a371b2018-05-23 11:36:53 +0100332 SubStream i_d(graph);
333 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000334 << ConvolutionLayer(
335 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000336 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000337 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
338 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100339 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000340 << BatchNormalizationLayer(
341 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
342 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000343 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000344 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100345 0.001f)
346 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
347 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000348
Jenkinsb9abeae2018-11-22 11:58:08 +0000349 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000350 }
351
Jenkinsb9abeae2018-11-22 11:58:08 +0000352 ConcatLayer get_inception_node_B(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000353 unsigned int a_filt,
354 std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
355 {
356 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100357 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000358 i_a << ConvolutionLayer(
359 3U, 3U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000360 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000361 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
362 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100363 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000364 << BatchNormalizationLayer(
365 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
366 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000367 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000368 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100369 0.001f)
370 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
371 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000372
Jenkinsb3a371b2018-05-23 11:36:53 +0100373 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000374 i_b << ConvolutionLayer(
375 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000376 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000377 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
378 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100379 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000380 << BatchNormalizationLayer(
381 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
382 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000383 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000384 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100385 0.001f)
386 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
387 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000388 << ConvolutionLayer(
389 3U, 3U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000390 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000391 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
392 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100393 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000394 << BatchNormalizationLayer(
395 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
396 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000397 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000398 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100399 0.001f)
400 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
401 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000402 << ConvolutionLayer(
403 3U, 3U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000404 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000405 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
406 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100407 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000408 << BatchNormalizationLayer(
409 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
410 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000411 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000412 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100413 0.001f)
414 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
415 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000416
Jenkinsb3a371b2018-05-23 11:36:53 +0100417 SubStream i_c(graph);
418 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000419
Jenkinsb9abeae2018-11-22 11:58:08 +0000420 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000421 }
422
Jenkinsb9abeae2018-11-22 11:58:08 +0000423 ConcatLayer get_inception_node_C(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000424 unsigned int a_filt,
425 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
426 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
427 unsigned int d_filt)
428 {
429 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100430 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000431 i_a << ConvolutionLayer(
432 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000433 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000434 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
435 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100436 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000437 << BatchNormalizationLayer(
438 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
439 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000440 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000441 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100442 0.001f)
443 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
444 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000445
Jenkinsb3a371b2018-05-23 11:36:53 +0100446 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000447 i_b << ConvolutionLayer(
448 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000449 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000450 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
451 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100452 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000453 << BatchNormalizationLayer(
454 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
455 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000456 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000457 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100458 0.001f)
459 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
460 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000461 << ConvolutionLayer(
462 7U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000463 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000464 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
465 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100466 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000467 << BatchNormalizationLayer(
468 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
469 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000470 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000471 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100472 0.001f)
473 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
474 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000475 << ConvolutionLayer(
476 1U, 7U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000477 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000478 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
479 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100480 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000481 << BatchNormalizationLayer(
482 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
483 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000484 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000485 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100486 0.001f)
487 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
488 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000489
Jenkinsb3a371b2018-05-23 11:36:53 +0100490 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000491 i_c << ConvolutionLayer(
492 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000493 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000494 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
495 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100496 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000497 << BatchNormalizationLayer(
498 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
499 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000500 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000501 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100502 0.001f)
503 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
504 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000505 << ConvolutionLayer(
506 1U, 7U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000507 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000508 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
509 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100510 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000511 << BatchNormalizationLayer(
512 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
513 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000514 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000515 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100516 0.001f)
517 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
518 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000519 << ConvolutionLayer(
520 7U, 1U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000521 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000522 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
523 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100524 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000525 << BatchNormalizationLayer(
526 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
527 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000528 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000529 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100530 0.001f)
531 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
532 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000533 << ConvolutionLayer(
534 1U, 7U, std::get<3>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000535 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000536 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
537 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100538 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000539 << BatchNormalizationLayer(
540 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
541 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000542 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000543 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100544 0.001f)
545 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
546 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000547 << ConvolutionLayer(
548 7U, 1U, std::get<4>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000549 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000550 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
551 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100552 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000553 << BatchNormalizationLayer(
554 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
555 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000556 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000557 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100558 0.001f)
559 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
560 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000561
Jenkinsb3a371b2018-05-23 11:36:53 +0100562 SubStream i_d(graph);
563 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000564 << ConvolutionLayer(
565 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000566 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000567 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
568 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100569 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000570 << BatchNormalizationLayer(
571 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
572 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000573 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000574 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100575 0.001f)
576 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
577 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000578
Jenkinsb9abeae2018-11-22 11:58:08 +0000579 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000580 }
581
Jenkinsb9abeae2018-11-22 11:58:08 +0000582 ConcatLayer get_inception_node_D(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Jenkins52ba29e2018-08-29 15:32:11 +0000583 std::tuple<unsigned int, unsigned int> a_filters,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000584 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
585 {
586 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100587 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000588 i_a << ConvolutionLayer(
589 1U, 1U, std::get<0>(a_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000590 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000591 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
592 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100593 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000594 << BatchNormalizationLayer(
595 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
596 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000597 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000598 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100599 0.001f)
600 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
601 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000602 << ConvolutionLayer(
603 3U, 3U, std::get<1>(a_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000604 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000605 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
606 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100607 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000608 << BatchNormalizationLayer(
609 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
610 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000611 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000612 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100613 0.001f)
614 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
615 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000616
Jenkinsb3a371b2018-05-23 11:36:53 +0100617 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000618 i_b << ConvolutionLayer(
619 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000620 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000621 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
622 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100623 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000624 << BatchNormalizationLayer(
625 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
626 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000627 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000628 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100629 0.001f)
630 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
631 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000632 << ConvolutionLayer(
633 7U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000634 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000635 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
636 PadStrideInfo(1, 1, 3, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100637 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000638 << BatchNormalizationLayer(
639 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
640 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000641 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000642 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100643 0.001f)
644 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
645 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000646 << ConvolutionLayer(
647 1U, 7U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000648 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000649 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
650 PadStrideInfo(1, 1, 0, 3))
Jenkinsb3a371b2018-05-23 11:36:53 +0100651 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000652 << BatchNormalizationLayer(
653 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
654 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000655 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000656 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100657 0.001f)
658 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
659 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000660 << ConvolutionLayer(
661 3U, 3U, std::get<3>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000662 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000663 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
664 PadStrideInfo(2, 2, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100665 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000666 << BatchNormalizationLayer(
667 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
668 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000669 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000670 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100671 0.001f)
672 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
673 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000674
Jenkinsb3a371b2018-05-23 11:36:53 +0100675 SubStream i_c(graph);
676 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000677
Jenkinsb9abeae2018-11-22 11:58:08 +0000678 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000679 }
680
Jenkinsb9abeae2018-11-22 11:58:08 +0000681 ConcatLayer get_inception_node_E(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000682 unsigned int a_filt,
683 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
684 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
685 unsigned int d_filt,
686 bool is_name_different = false)
687 {
688 // This is due to a naming issue in the tf model
689 std::string conv_id = "_0b_";
690 if(is_name_different)
691 {
692 conv_id = "_0c_";
693 }
694
695 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
Jenkinsb3a371b2018-05-23 11:36:53 +0100696 SubStream i_a(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000697 i_a << ConvolutionLayer(
698 1U, 1U, a_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000699 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000700 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
701 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100702 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000703 << BatchNormalizationLayer(
704 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
705 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000706 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000707 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100708 0.001f)
709 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
710 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000711
Jenkinsb3a371b2018-05-23 11:36:53 +0100712 SubStream i_b(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000713 i_b << ConvolutionLayer(
714 1U, 1U, std::get<0>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000715 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000716 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
717 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100718 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000719 << BatchNormalizationLayer(
720 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
721 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000722 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000723 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100724 0.001f)
725 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
726 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000727
Jenkins52ba29e2018-08-29 15:32:11 +0000728 SubStream i_b1(i_b);
Jenkinsb3a371b2018-05-23 11:36:53 +0100729 i_b1 << ConvolutionLayer(
730 3U, 1U, std::get<1>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000731 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000732 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
733 PadStrideInfo(1, 1, 1, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100734 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000735 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100736 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
737 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000738 nullptr,
Jenkinsb3a371b2018-05-23 11:36:53 +0100739 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
740 0.001f)
741 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
742 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000743
Jenkins52ba29e2018-08-29 15:32:11 +0000744 SubStream i_b2(i_b);
Jenkinsb3a371b2018-05-23 11:36:53 +0100745 i_b2 << ConvolutionLayer(
746 1U, 3U, std::get<2>(b_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000747 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000748 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
749 PadStrideInfo(1, 1, 0, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100750 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000751 << BatchNormalizationLayer(
Jenkinsb3a371b2018-05-23 11:36:53 +0100752 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
753 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000754 nullptr,
Jenkinsb3a371b2018-05-23 11:36:53 +0100755 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
756 0.001f)
757 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
758 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000759
Jenkinsb3a371b2018-05-23 11:36:53 +0100760 // Merge b1 and b2
Jenkinsb9abeae2018-11-22 11:58:08 +0000761 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
Jenkinsb3a371b2018-05-23 11:36:53 +0100762
763 SubStream i_c(graph);
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000764 i_c << ConvolutionLayer(
765 1U, 1U, std::get<0>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000766 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000767 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
768 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100769 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000770 << BatchNormalizationLayer(
771 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
772 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000773 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000774 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100775 0.001f)
776 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
777 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000778 << ConvolutionLayer(
779 3U, 3U, std::get<1>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000780 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000781 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
782 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100783 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000784 << BatchNormalizationLayer(
785 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
786 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000787 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000788 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100789 0.001f)
790 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
791 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000792
Jenkins52ba29e2018-08-29 15:32:11 +0000793 SubStream i_c1(i_c);
Jenkinsb3a371b2018-05-23 11:36:53 +0100794 i_c1 << ConvolutionLayer(
795 3U, 1U, std::get<2>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000796 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
Jenkinsb3a371b2018-05-23 11:36:53 +0100797 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
798 PadStrideInfo(1, 1, 1, 0))
799 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
800 << BatchNormalizationLayer(
801 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
802 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000803 nullptr,
Jenkinsb3a371b2018-05-23 11:36:53 +0100804 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
805 0.001f)
806 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
807 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
808
Jenkins52ba29e2018-08-29 15:32:11 +0000809 SubStream i_c2(i_c);
Jenkinsb3a371b2018-05-23 11:36:53 +0100810 i_c2 << ConvolutionLayer(
811 1U, 3U, std::get<3>(c_filters),
Jenkins52ba29e2018-08-29 15:32:11 +0000812 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout),
Jenkinsb3a371b2018-05-23 11:36:53 +0100813 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
814 PadStrideInfo(1, 1, 0, 1))
815 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
816 << BatchNormalizationLayer(
817 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
818 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000819 nullptr,
Jenkinsb3a371b2018-05-23 11:36:53 +0100820 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
821 0.001f)
822 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
823 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
824
825 // Merge i_c1 and i_c2
Jenkinsb9abeae2018-11-22 11:58:08 +0000826 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
Jenkinsb3a371b2018-05-23 11:36:53 +0100827
828 SubStream i_d(graph);
829 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000830 << ConvolutionLayer(
831 1U, 1U, d_filt,
Jenkins52ba29e2018-08-29 15:32:11 +0000832 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000833 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
834 PadStrideInfo(1, 1, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100835 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000836 << BatchNormalizationLayer(
837 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
838 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
Jenkins0e205f72019-11-28 16:53:35 +0000839 nullptr,
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000840 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
Jenkinsb3a371b2018-05-23 11:36:53 +0100841 0.001f)
842 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
843 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000844
Jenkinsb9abeae2018-11-22 11:58:08 +0000845 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000846 }
847};
848
849/** Main program for Inception V3
850 *
Jenkinsb9abeae2018-11-22 11:58:08 +0000851 * Model is based on:
852 * https://arxiv.org/abs/1512.00567
853 * "Rethinking the Inception Architecture for Computer Vision"
854 * Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens, Zbigniew Wojna
855 *
Jenkins514be652019-02-28 12:25:18 +0000856 * Provenance: download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz
857 *
Jenkins52ba29e2018-08-29 15:32:11 +0000858 * @note To list all the possible arguments execute the binary appended with the --help option
859 *
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000860 * @param[in] argc Number of arguments
Jenkins52ba29e2018-08-29 15:32:11 +0000861 * @param[in] argv Arguments
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000862 */
863int main(int argc, char **argv)
864{
865 return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
866}