blob: afa5d97214733bf707941d20c8fa000eefe4fccf [file] [log] [blame]
Anthony Barbierdbdab852017-06-23 15:42:00 +01001/*
2 * Copyright (c) 2017 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/runtime/NEON/functions/NEDirectConvolutionLayer.h"
25
26#include "arm_compute/core/PixelValue.h"
27#include "arm_compute/core/Utils.h"
28#include "arm_compute/core/Validate.h"
29#include "arm_compute/runtime/NEON/NEScheduler.h"
30
31#include <cmath>
32#include <tuple>
33
34using namespace arm_compute;
35
Kaizen8938bd32017-09-28 14:38:23 +010036NEDirectConvolutionLayer::NEDirectConvolutionLayer(std::shared_ptr<IMemoryManager> memory_manager)
Anthony Barbier8140e1e2017-12-14 23:48:46 +000037 : _memory_group(std::move(memory_manager)), _accumulate_bias_kernel(), _conv_kernel(), _input_border_handler(), _accumulator(), _has_bias(false)
Anthony Barbierdbdab852017-06-23 15:42:00 +010038{
39}
40
41void NEDirectConvolutionLayer::configure(ITensor *input, const ITensor *weights, const ITensor *bias, ITensor *output, const PadStrideInfo &conv_info)
42{
Anthony Barbierdbdab852017-06-23 15:42:00 +010043 // Free accumulator
44 if(_accumulator.buffer() != nullptr)
45 {
46 _accumulator.allocator()->free();
47 }
48
Anthony Barbier8140e1e2017-12-14 23:48:46 +000049 // Check if bias should be added in the convolution result
50 _has_bias = (bias != nullptr);
51
Anthony Barbierdbdab852017-06-23 15:42:00 +010052 // Allocate the intermediate accumulator tensor in case of fixed point input
Anthony Barbier8140e1e2017-12-14 23:48:46 +000053 if(is_data_type_fixed_point(input->info()->data_type()))
Anthony Barbierdbdab852017-06-23 15:42:00 +010054 {
Anthony Barbier8140e1e2017-12-14 23:48:46 +000055 const DataType promoted_dt = (input->info()->data_type() == DataType::QS8) ? DataType::QS16 : DataType::QS32;
56 _accumulator.allocator()->init(TensorInfo(output->info()->tensor_shape(), 1, promoted_dt, output->info()->fixed_point_position()));
57 _memory_group.manage(&_accumulator);
58 _conv_kernel.configure(input, weights, &_accumulator, conv_info);
59 if(_has_bias)
Kaizen8938bd32017-09-28 14:38:23 +010060 {
Kaizen8938bd32017-09-28 14:38:23 +010061 _accumulate_bias_kernel.configure(&_accumulator, bias, output);
Kaizen8938bd32017-09-28 14:38:23 +010062 }
Anthony Barbier8140e1e2017-12-14 23:48:46 +000063 _accumulator.allocator()->allocate();
64 }
65 else
66 {
67 _conv_kernel.configure(input, weights, output, conv_info);
68 if(_has_bias)
Kaizen8938bd32017-09-28 14:38:23 +010069 {
Kaizen8938bd32017-09-28 14:38:23 +010070 _accumulate_bias_kernel.configure(output, bias);
Kaizen8938bd32017-09-28 14:38:23 +010071 }
Anthony Barbierdbdab852017-06-23 15:42:00 +010072 }
73
74 // Add zero padding XY
Kaizenbf8b01d2017-10-12 14:26:51 +010075 _input_border_handler.configure(input, _conv_kernel.border_size(), BorderMode::CONSTANT, PixelValue(static_cast<float>(0.f)));
Anthony Barbierdbdab852017-06-23 15:42:00 +010076}
77
Anthony Barbier8140e1e2017-12-14 23:48:46 +000078Status NEDirectConvolutionLayer::validate(const ITensorInfo *input, const ITensorInfo *weights, const ITensorInfo *bias, const ITensorInfo *output, const PadStrideInfo &conv_info)
79{
80 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, weights, output);
81
82 DataType data_type = output->data_type();
83 if(is_data_type_fixed_point(data_type))
84 {
85 // Promote data type in case of fixed point
86 data_type = ((data_type == DataType::QS8) ? DataType::QS16 : DataType::QS32);
87 }
88 TensorInfo accumulator(output->clone()->set_is_resizable(true).reset_padding().set_data_type(data_type));
89
90 // Validate Convolution kernel
91 ARM_COMPUTE_RETURN_ON_ERROR(NEDirectConvolutionLayerKernel::validate(input, weights, &accumulator, conv_info));
92
93 // Validate bias
94 ARM_COMPUTE_RETURN_ERROR_ON_MSG((bias == nullptr) && is_data_type_fixed_point(data_type),
95 "Biases should be provided for fixed point inputs");
96 if(bias != nullptr)
97 {
98 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(weights, bias);
99 ARM_COMPUTE_RETURN_ERROR_ON_MSG(bias->dimension(0) != weights->dimension(3),
100 "Biases size and number of input feature maps should match");
101 ARM_COMPUTE_RETURN_ERROR_ON_MSG(bias->num_dimensions() > 1, "Biases should be one dimensional");
102
103 // Validate bias kernel
104 ARM_COMPUTE_RETURN_ON_ERROR(NEDirectConvolutionLayerBiasAccumulateKernel::validate(&accumulator, bias, output));
105 }
106
107 return Status{};
108}
109
Anthony Barbierdbdab852017-06-23 15:42:00 +0100110void NEDirectConvolutionLayer::run()
111{
Kaizen8938bd32017-09-28 14:38:23 +0100112 NEScheduler::get().schedule(&_input_border_handler, Window::DimZ);
113
114 _memory_group.acquire();
Anthony Barbierdbdab852017-06-23 15:42:00 +0100115
116 NEScheduler::get().schedule(&_conv_kernel, Window::DimZ);
Anthony Barbier8140e1e2017-12-14 23:48:46 +0000117 if(_has_bias)
118 {
119 NEScheduler::get().schedule(&_accumulate_bias_kernel, Window::DimY);
120 }
Kaizen8938bd32017-09-28 14:38:23 +0100121
122 _memory_group.release();
Anthony Barbierdbdab852017-06-23 15:42:00 +0100123}