blob: 9e6d91962efdcf16e4d63eb5f2c1c28638f68098 [file] [log] [blame]
Kaizenbf8b01d2017-10-12 14:26:51 +01001/*
Anthony Barbier06ea0482018-02-22 15:45:35 +00002 * Copyright (c) 2017-2018 ARM Limited.
Kaizenbf8b01d2017-10-12 14:26:51 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Jenkinsb3a371b2018-05-23 11:36:53 +010024#include "arm_compute/graph.h"
Kaizenbf8b01d2017-10-12 14:26:51 +010025#include "support/ToolchainSupport.h"
26#include "utils/GraphUtils.h"
27#include "utils/Utils.h"
28
29#include <cstdlib>
30#include <iostream>
31#include <memory>
32
Anthony Barbierf45d5a92018-01-24 16:23:15 +000033using namespace arm_compute::utils;
Jenkinsb3a371b2018-05-23 11:36:53 +010034using namespace arm_compute::graph::frontend;
Kaizenbf8b01d2017-10-12 14:26:51 +010035using namespace arm_compute::graph_utils;
36
Kaizenbf8b01d2017-10-12 14:26:51 +010037/** Example demonstrating how to implement AlexNet's network using the Compute Library's graph API
38 *
39 * @param[in] argc Number of arguments
Jenkinsb3a371b2018-05-23 11:36:53 +010040 * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL, 2 = OpenCL with Tuner), [optional] Path to the weights folder, [optional] image, [optional] labels, [optional] Fast math for convolution layer (0 = DISABLED, 1 = ENABLED) )
Kaizenbf8b01d2017-10-12 14:26:51 +010041 */
Anthony Barbierf45d5a92018-01-24 16:23:15 +000042class GraphAlexnetExample : public Example
Kaizenbf8b01d2017-10-12 14:26:51 +010043{
Anthony Barbierf45d5a92018-01-24 16:23:15 +000044public:
45 void do_setup(int argc, char **argv) override
46 {
47 std::string data_path; /* Path to the trainable data */
48 std::string image; /* Image data */
49 std::string label; /* Label data */
Anthony Barbier8a3da6f2017-10-23 18:55:17 +010050
Anthony Barbier06ea0482018-02-22 15:45:35 +000051 // Create a preprocessor object
52 const std::array<float, 3> mean_rgb{ { 122.68f, 116.67f, 104.01f } };
53 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<CaffePreproccessor>(mean_rgb);
Kaizenbf8b01d2017-10-12 14:26:51 +010054
Anthony Barbier06ea0482018-02-22 15:45:35 +000055 // Set target. 0 (NEON), 1 (OpenCL), 2 (OpenCL with Tuner). By default it is NEON
Jenkinsb3a371b2018-05-23 11:36:53 +010056 const int target = argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0;
57 Target target_hint = set_target_hint(target);
Anthony Barbier06ea0482018-02-22 15:45:35 +000058
Jenkinsb3a371b2018-05-23 11:36:53 +010059 const bool is_neon = (target_hint == Target::NEON);
60 ConvolutionMethod convolution_5x5_hint = is_neon ? ConvolutionMethod::GEMM : ConvolutionMethod::DIRECT;
61 ConvolutionMethod convolution_3x3_hint = ConvolutionMethod::DEFAULT;
62 FastMathHint fast_math_hint = FastMathHint::DISABLED;
Anthony Barbier8140e1e2017-12-14 23:48:46 +000063
Anthony Barbierf45d5a92018-01-24 16:23:15 +000064 // Parse arguments
65 if(argc < 2)
66 {
67 // Print help
Jenkinsb3a371b2018-05-23 11:36:53 +010068 std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000069 std::cout << "No data folder provided: using random values\n\n";
70 }
71 else if(argc == 2)
72 {
Jenkinsb3a371b2018-05-23 11:36:53 +010073 std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000074 std::cout << "No data folder provided: using random values\n\n";
75 }
76 else if(argc == 3)
77 {
78 data_path = argv[2];
Jenkinsb3a371b2018-05-23 11:36:53 +010079 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000080 std::cout << "No image provided: using random values\n\n";
81 }
82 else if(argc == 4)
83 {
84 data_path = argv[2];
85 image = argv[3];
Jenkinsb3a371b2018-05-23 11:36:53 +010086 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels] [fast_math_hint]\n\n";
Anthony Barbierf45d5a92018-01-24 16:23:15 +000087 std::cout << "No text file with labels provided: skipping output accessor\n\n";
88 }
Jenkinsb3a371b2018-05-23 11:36:53 +010089 else if(argc == 5)
Anthony Barbierf45d5a92018-01-24 16:23:15 +000090 {
91 data_path = argv[2];
92 image = argv[3];
93 label = argv[4];
Jenkinsb3a371b2018-05-23 11:36:53 +010094 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " " << argv[4] << " [fast_math_hint]\n\n";
95 std::cout << "No fast math info provided: disabling fast math\n\n";
96 }
97 else
98 {
99 data_path = argv[2];
100 image = argv[3];
101 label = argv[4];
102 fast_math_hint = (std::strtol(argv[5], nullptr, 1) == 0) ? FastMathHint::DISABLED : FastMathHint::ENABLED;
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000103 }
104
105 graph << target_hint
Jenkinsb3a371b2018-05-23 11:36:53 +0100106 << fast_math_hint
107 << InputLayer(TensorDescriptor(TensorShape(227U, 227U, 3U, 1U), DataType::F32),
108 get_input_accessor(image, std::move(preprocessor)))
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000109 // Layer 1
110 << ConvolutionLayer(
111 11U, 11U, 96U,
112 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy"),
113 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"),
114 PadStrideInfo(4, 4, 0, 0))
Jenkinsb3a371b2018-05-23 11:36:53 +0100115 .set_name("conv1")
116 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu1")
117 << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm1")
118 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))).set_name("pool1")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000119 // Layer 2
Anthony Barbier06ea0482018-02-22 15:45:35 +0000120 << convolution_5x5_hint
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000121 << ConvolutionLayer(
122 5U, 5U, 256U,
123 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy"),
124 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"),
125 PadStrideInfo(1, 1, 2, 2), 2)
Jenkinsb3a371b2018-05-23 11:36:53 +0100126 .set_name("conv2")
127 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu2")
128 << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm2")
129 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))).set_name("pool2")
130 << convolution_3x3_hint
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000131 // Layer 3
132 << ConvolutionLayer(
133 3U, 3U, 384U,
134 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy"),
135 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"),
136 PadStrideInfo(1, 1, 1, 1))
Jenkinsb3a371b2018-05-23 11:36:53 +0100137 .set_name("conv3")
138 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu3")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000139 // Layer 4
140 << ConvolutionLayer(
141 3U, 3U, 384U,
142 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy"),
143 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"),
144 PadStrideInfo(1, 1, 1, 1), 2)
Jenkinsb3a371b2018-05-23 11:36:53 +0100145 .set_name("conv4")
146 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu4")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000147 // Layer 5
148 << ConvolutionLayer(
149 3U, 3U, 256U,
150 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy"),
151 get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"),
152 PadStrideInfo(1, 1, 1, 1), 2)
Jenkinsb3a371b2018-05-23 11:36:53 +0100153 .set_name("conv5")
154 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu5")
155 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))).set_name("pool5")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000156 // Layer 6
157 << FullyConnectedLayer(
158 4096U,
159 get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy"),
160 get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy"))
Jenkinsb3a371b2018-05-23 11:36:53 +0100161 .set_name("fc6")
162 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu6")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000163 // Layer 7
164 << FullyConnectedLayer(
165 4096U,
166 get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy"),
167 get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy"))
Jenkinsb3a371b2018-05-23 11:36:53 +0100168 .set_name("fc7")
169 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu7")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000170 // Layer 8
171 << FullyConnectedLayer(
172 1000U,
173 get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy"),
174 get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy"))
Jenkinsb3a371b2018-05-23 11:36:53 +0100175 .set_name("fc8")
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000176 // Softmax
Jenkinsb3a371b2018-05-23 11:36:53 +0100177 << SoftmaxLayer().set_name("prob")
178 << OutputLayer(get_output_accessor(label, 5));
Anthony Barbier06ea0482018-02-22 15:45:35 +0000179
Jenkinsb3a371b2018-05-23 11:36:53 +0100180 // Finalize graph
181 GraphConfig config;
182 config.use_tuner = (target == 2);
183 graph.finalize(target_hint, config);
Kaizenbf8b01d2017-10-12 14:26:51 +0100184 }
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000185 void do_run() override
Kaizenbf8b01d2017-10-12 14:26:51 +0100186 {
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000187 // Run graph
188 graph.run();
Kaizenbf8b01d2017-10-12 14:26:51 +0100189 }
190
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000191private:
Jenkinsb3a371b2018-05-23 11:36:53 +0100192 Stream graph{ 0, "AlexNet" };
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000193};
Kaizenbf8b01d2017-10-12 14:26:51 +0100194
195/** Main program for AlexNet
196 *
197 * @param[in] argc Number of arguments
Jenkinsb3a371b2018-05-23 11:36:53 +0100198 * @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL, 2 = OpenCL with Tuner), [optional] Path to the weights folder, [optional] image, [optional] labels, [optional] Fast math for convolution layer (0 = DISABLED, 1 = ENABLED) )
Kaizenbf8b01d2017-10-12 14:26:51 +0100199 */
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000200int main(int argc, char **argv)
Kaizenbf8b01d2017-10-12 14:26:51 +0100201{
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000202 return arm_compute::utils::run_example<GraphAlexnetExample>(argc, argv);
Kaizenbf8b01d2017-10-12 14:26:51 +0100203}