IVGCVSW-5245 Support Fused Activations for Dynamic Tensors

* Move ProcessActivation from Convert<LayerName>
  to SetupAndTrackLayerOutputSlot by passing optional ActivationFn
* Connect Activation after IsTensorInfoSet() is called

Signed-off-by: Kevin May <kevin.may@arm.com>
Change-Id: I472bcb65b12ae6e934bd1e9af8a6f6aceb311c0e
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index fa67f79..450b91f 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -1400,6 +1400,7 @@
                                   ConversionData& data,
                                   const armnn::TensorInfo* overrideOutputInfo = nullptr,
                                   const std::function <void (const armnn::TensorInfo&, bool&)>& validateFunc = nullptr,
+                                  const ActivationFn& activationFunction = ActivationFn::kActivationNone,
                                   bool inferOutputShapes = false)
 {
     using HalOperand = typename HalPolicy::Operand;
@@ -1447,7 +1448,25 @@
     }
 
     const uint32_t operandIndex = operation.outputs[operationOutputIndex];
-    data.m_OutputSlotForOperand[operandIndex] = &outputSlot;
+
+    if (activationFunction != ActivationFn::kActivationNone)
+    {
+        const armnn::TensorInfo& activationOutputInfo = outputSlot.GetTensorInfo();
+        armnn::IConnectableLayer* const endLayer = ProcessActivation(activationOutputInfo, activationFunction,
+                                                                     &layer, data);
+
+        if (!endLayer)
+        {
+            return Fail("%s: ProcessActivation failed", __func__);
+        }
+
+        armnn::IOutputSlot& activationOutputSlot = endLayer->GetOutputSlot(layerOutputIndex);
+        data.m_OutputSlotForOperand[operandIndex] = &activationOutputSlot;
+    }
+    else
+    {
+        data.m_OutputSlotForOperand[operandIndex] = &outputSlot;
+    }
 
     return true;
 }
@@ -1498,7 +1517,8 @@
                                   const HalModel& model,
                                   ConversionData& data,
                                   const armnn::TensorInfo* overrideOutputInfo = nullptr,
-                                  const std::function <void (const armnn::TensorInfo&, bool&)>& validateFunc = nullptr)
+                                  const std::function <void (const armnn::TensorInfo&, bool&)>& validateFunc = nullptr,
+                                  const ActivationFn& activationFunction = ActivationFn::kActivationNone)
 {
     return SetupAndTrackLayerOutputSlot<HalPolicy>(operation,
                                                    outputIndex,
@@ -1507,7 +1527,8 @@
                                                    model,
                                                    data,
                                                    overrideOutputInfo,
-                                                   validateFunc);
+                                                   validateFunc,
+                                                   activationFunction);
 }
 
 template<typename HalPolicy,
@@ -1782,12 +1803,6 @@
         return Fail("%s: AddPooling2dLayer failed", __func__);
     }
 
-    armnn::IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, pooling2dLayer, data);
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
     input.Connect(pooling2dLayer->GetInputSlot(0));
 
     if (!isSupported)
@@ -1795,7 +1810,8 @@
         return false;
     }
 
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *pooling2dLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 template<typename HalPolicy,
@@ -1859,22 +1875,16 @@
     }
 
     armnn::IConnectableLayer* const startLayer = data.m_Network->AddAdditionLayer();
-    armnn::IConnectableLayer* const endLayer   = ProcessActivation(outputInfo, activationFunction, startLayer, data);
 
-    if (endLayer != nullptr)
+    bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+    if (!isReshapeSupported)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
-        if (!isReshapeSupported)
-        {
-            return false;
-        }
+        return false;
+    }
 
-        return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
-    }
-    else
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activationFunction);
+
 }
 
 template<typename HalPolicy,
@@ -2426,16 +2436,10 @@
         return Fail("%s: AddConvolution2dLayer failed", __func__);
     }
 
-    armnn::IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, startLayer, data);
-
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
     input.Connect(startLayer->GetInputSlot(0));
 
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 template<typename HalPolicy,
@@ -2657,15 +2661,10 @@
         return Fail("%s: AddDepthwiseConvolution2dLayer failed", __func__);
     }
 
-    armnn::IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, startLayer, data);
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
     input.Connect(startLayer->GetInputSlot(0));
 
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 template<typename HalPolicy,
@@ -2786,19 +2785,16 @@
     }
 
     armnn::IConnectableLayer* const startLayer = data.m_Network->AddDivisionLayer();
-    armnn::IConnectableLayer* const endLayer   = ProcessActivation(outputInfo, activationFunction, startLayer, data);
 
-    if (endLayer)
+    bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+    if (!isReshapeSupported)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
-        if (!isReshapeSupported)
-        {
-            return false;
-        }
-
-        return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+        return false;
     }
-    return Fail("%s: ProcessActivation failed", __func__);
+
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activationFunction);
+
 }
 
 template<typename HalPolicy,
@@ -3107,32 +3103,25 @@
 
     armnn::IConnectableLayer* startLayer =
             data.m_Network->AddFullyConnectedLayer(desc, weights, armnn::Optional<armnn::ConstTensor>(bias));
-    armnn::IConnectableLayer* endLayer = ProcessActivation(outputInfo, activationFunction, startLayer, data);
 
-    if (endLayer != nullptr)
+    if (inputInfo.GetNumDimensions() > 2U)
     {
-        if (inputInfo.GetNumDimensions() > 2U)
-        {
-            armnn::ReshapeDescriptor reshapeDescriptor;
-            reshapeDescriptor.m_TargetShape = reshapedInfo.GetShape();
+        armnn::ReshapeDescriptor reshapeDescriptor;
+        reshapeDescriptor.m_TargetShape = reshapedInfo.GetShape();
 
-            armnn::IConnectableLayer* reshapeLayer = data.m_Network->AddReshapeLayer(reshapeDescriptor);
-            assert(reshapeLayer != nullptr);
-            input.Connect(reshapeLayer->GetInputSlot(0));
-            reshapeLayer->GetOutputSlot(0).SetTensorInfo(reshapedInfo);
-            reshapeLayer->GetOutputSlot(0).Connect(startLayer->GetInputSlot(0));
-        }
-        else
-        {
-            input.Connect(startLayer->GetInputSlot(0));
-        }
-
-        return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+        armnn::IConnectableLayer* reshapeLayer = data.m_Network->AddReshapeLayer(reshapeDescriptor);
+        assert(reshapeLayer != nullptr);
+        input.Connect(reshapeLayer->GetInputSlot(0));
+        reshapeLayer->GetOutputSlot(0).SetTensorInfo(reshapedInfo);
+        reshapeLayer->GetOutputSlot(0).Connect(startLayer->GetInputSlot(0));
     }
     else
     {
-        return Fail("%s: ProcessActivation failed", __func__);
+        input.Connect(startLayer->GetInputSlot(0));
     }
+
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activationFunction);
 }
 
 template<typename HalPolicy,
@@ -3446,25 +3435,18 @@
     }
 
     armnn::IConnectableLayer* const startLayer = data.m_Network->AddMultiplicationLayer();
-    armnn::IConnectableLayer* const endLayer   = ProcessActivation(outputInfo, activationFunction, startLayer, data);
 
     const armnn::TensorInfo& inputTensorInfo0 = input0.GetTensorInfo();
     const armnn::TensorInfo& inputTensorInfo1 = input1.GetTensorInfo();
 
-    if (endLayer != nullptr)
+    bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+    if (!isReshapeSupported)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
-        if (!isReshapeSupported)
-        {
-            return false;
-        }
+        return false;
+    }
 
-        return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
-    }
-    else
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activationFunction);
 }
 
 template<typename HalPolicy,
@@ -3683,22 +3665,17 @@
     }
 
     armnn::IConnectableLayer* const startLayer = data.m_Network->AddSubtractionLayer();
-    armnn::IConnectableLayer* const endLayer = ProcessActivation(outputInfo, activationFunction, startLayer, data);
 
     const armnn::TensorInfo& inputTensorInfo0 = input0.GetTensorInfo();
     const armnn::TensorInfo& inputTensorInfo1 = input1.GetTensorInfo();
 
-    if (endLayer)
+    bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+    if (!isReshapeSupported)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
-        if (!isReshapeSupported)
-        {
-            return false;
-        }
-        return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+        return false;
     }
-
-    return Fail("%s: ProcessActivation failed", __func__);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activationFunction);
 }
 
 template<typename HalPolicy,
diff --git a/ConversionUtils_1_2.hpp b/ConversionUtils_1_2.hpp
index 760312e..2f4b91b 100644
--- a/ConversionUtils_1_2.hpp
+++ b/ConversionUtils_1_2.hpp
@@ -341,16 +341,10 @@
         return Fail("%s: AddConvolution2dLayer failed", __func__);
     }
 
-    IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, startLayer, data);
-
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
     input.Connect(startLayer->GetInputSlot(0));
 
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 template<typename HalPolicy,
@@ -527,15 +521,10 @@
         return Fail("%s: AddDepthwiseConvolution2dLayer failed", __func__);
     }
 
-    IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, startLayer, data);
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
     input.Connect(startLayer->GetInputSlot(0));
 
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 template<typename HalPolicy,
@@ -1124,16 +1113,8 @@
     }
     concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
 
-    //
-    // Set up Activation layer (if it is set)
-    //
-    IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, concatLayer, data);
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *concatLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 template<typename HalPolicy,
@@ -1924,7 +1905,7 @@
     {
         return (SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, 0, model, data) &&
                 SetupAndTrackLayerOutputSlot<HalPolicy>(
-                    operation, 1, *layer, 1, model, data, nullptr, validateFunc, true));
+                    operation, 1, *layer, 1, model, data, nullptr, validateFunc, ActivationFn::kActivationNone, true));
     }
 
 }
@@ -2656,7 +2637,7 @@
              SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 1, *layer, 1, model, data) &&
              SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 2, *layer, 2, model, data) &&
              SetupAndTrackLayerOutputSlot<HalPolicy>(
-                 operation, 3, *layer, 3, model, data, nullptr, validateFunc, true));
+                 operation, 3, *layer, 3, model, data, nullptr, validateFunc, ActivationFn::kActivationNone, true));
     }
 
 }
@@ -2851,15 +2832,10 @@
         return Fail("%s: AddTransposeConvolution2dLayer failed", __func__);
     }
 
-    IConnectableLayer* endLayer = ProcessActivation(outputInfo, activation, startLayer, data);
-    if (!endLayer)
-    {
-        return Fail("%s: ProcessActivation failed", __func__);
-    }
-
     input.Connect(startLayer->GetInputSlot(0));
 
-    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data, nullptr, validateFunc);
+    return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *startLayer, model,
+                                                   data, nullptr, validateFunc, activation);
 }
 
 } // armnn_driver namespace
\ No newline at end of file
diff --git a/ConversionUtils_1_3.hpp b/ConversionUtils_1_3.hpp
index 445b9ea..a7f00fc 100644
--- a/ConversionUtils_1_3.hpp
+++ b/ConversionUtils_1_3.hpp
@@ -654,7 +654,8 @@
         return ( SetupAndTrackLayerOutputSlot<HalPolicy>(
                        operation, 0, *layer, 0, model, data, &constOutputStateOutInfo) &&
                  SetupAndTrackLayerOutputSlot<HalPolicy>(
-                       operation, 1, *layer, 1, model, data, nullptr, validateFunc, true) &&
+                       operation, 1, *layer, 1, model, data, nullptr, validateFunc,
+                       ActivationFn::kActivationNone, true) &&
                  SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 2, *layer, 2, model, data, &constOutputInfo));
     }
 }