Return the minimum supported HAL version from validateOperation
This change allows for more granular compatibility checks than the
existing validOperationType(V1_X::OperationType) function family, which
is removed in the next CL.
Bug: 115701354
Test: mm
Change-Id: I0ea6aed45f991b725f37cb76e695e88630d181eb
Merged-In: I0ea6aed45f991b725f37cb76e695e88630d181eb
(cherry picked from commit 7aa259b3344bc5d63f6aa5a75cf869b4482500a0)
diff --git a/nn/common/Utils.cpp b/nn/common/Utils.cpp
index d9a667c..e989894 100644
--- a/nn/common/Utils.cpp
+++ b/nn/common/Utils.cpp
@@ -427,10 +427,15 @@
return ANEURALNETWORKS_NO_ERROR;
}
-int validateOperation(ANeuralNetworksOperationType opType,
- uint32_t inputCount, const uint32_t* inputIndexes,
- uint32_t outputCount, const uint32_t* outputIndexes,
- const std::vector<Operand>& operands) {
+int validateOperation(ANeuralNetworksOperationType opType, uint32_t inputCount,
+ const uint32_t* inputIndexes, uint32_t outputCount,
+ const uint32_t* outputIndexes, const std::vector<Operand>& operands,
+ HalVersion* minSupportedHalVersion) {
+ if (minSupportedHalVersion == nullptr) {
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+ *minSupportedHalVersion = HalVersion::UNKNOWN;
+
int n = validateOperandList(inputCount, inputIndexes, static_cast<uint32_t>(operands.size()),
"ANeuralNetworksModel_addOperation inputs");
if (n != ANEURALNETWORKS_NO_ERROR) {
@@ -451,6 +456,7 @@
switch (opType) {
case ANEURALNETWORKS_OEM_OPERATION: {
+ *minSupportedHalVersion = HalVersion::V1_0;
return ANEURALNETWORKS_NO_ERROR;
}
case ANEURALNETWORKS_ADD: {
@@ -476,6 +482,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -505,6 +512,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -527,6 +535,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -549,6 +558,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -571,6 +581,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -620,6 +631,10 @@
}
if (inputCount == 12 || inputCount == 9) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -670,6 +685,10 @@
}
if (inputCount == 11 || inputCount == 8) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -720,6 +739,10 @@
}
if (inputCount == 11 || inputCount == 8) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -761,6 +784,10 @@
}
if (inputCount == 11 || inputCount == 8) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -811,6 +838,10 @@
}
if (inputCount == 11 || inputCount == 8) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -837,6 +868,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -862,6 +894,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -887,6 +920,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -909,6 +943,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -934,6 +969,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -961,6 +997,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -992,6 +1029,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1011,6 +1049,7 @@
std::vector<OperandType> outExpectedTypes = {inputType};
// The last one is the activation function.
inExpectedTypes.back() = OperandType::INT32;
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1033,6 +1072,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1059,6 +1099,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1086,6 +1127,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1114,6 +1156,10 @@
}
if (inputCount == 4) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -1146,6 +1192,10 @@
}
if (inputCount == 3) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -1178,6 +1228,10 @@
}
if (inputCount == 3) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_0;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -1194,6 +1248,7 @@
std::vector<OperandType> inExpectedTypes = {OperandType::TENSOR_INT32,
inputType};
std::vector<OperandType> outExpectedTypes = {inputType};
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1211,6 +1266,7 @@
inputType};
std::vector<OperandType> outExpectedTypes = {inputType,
OperandType::TENSOR_QUANT8_ASYMM};
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1228,6 +1284,8 @@
OperandType::TENSOR_FLOAT32,
OperandType::INT32};
std::vector<OperandType> outExpectedTypes = {OperandType::TENSOR_INT32};
+ // TODO(mks): Return V1_2 if inputType is sparse.
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1266,6 +1324,7 @@
OperandType::TENSOR_FLOAT32,
OperandType::TENSOR_FLOAT32,
OperandType::TENSOR_FLOAT32};
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1285,6 +1344,7 @@
OperandType::INT32};
std::vector<OperandType> outExpectedTypes = {OperandType::TENSOR_FLOAT32,
OperandType::TENSOR_FLOAT32};
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1305,6 +1365,7 @@
OperandType::INT32};
std::vector<OperandType> outExpectedTypes = {OperandType::TENSOR_FLOAT32,
OperandType::TENSOR_FLOAT32};
+ *minSupportedHalVersion = HalVersion::V1_0;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1336,6 +1397,10 @@
}
if (inputCount == 3) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_1;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -1370,6 +1435,10 @@
}
if (inputCount == 4) {
inExpectedTypes.push_back(OperandType::BOOL);
+ // TODO(xusongw): Mention this in NeuralNetworks.h.
+ *minSupportedHalVersion = HalVersion::V1_2;
+ } else {
+ *minSupportedHalVersion = HalVersion::V1_1;
}
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
@@ -1398,6 +1467,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1423,6 +1493,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1450,6 +1521,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1477,6 +1549,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1514,6 +1587,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1538,6 +1612,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1562,6 +1637,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1591,6 +1667,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_1;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1616,6 +1693,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1638,6 +1716,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands,
inputCount, inputIndexes,
inExpectedTypes,
@@ -1654,6 +1733,7 @@
std::vector<OperandType> inExpectedTypes = {inputType, OperandType::INT32,
OperandType::INT32};
std::vector<OperandType> outExpectedTypes(outputCount, inputType);
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1668,6 +1748,7 @@
inExpectedTypes = {OperandType::TENSOR_FLOAT32, OperandType::TENSOR_FLOAT32,
OperandType::TENSOR_INT32, OperandType::FLOAT32, OperandType::INT32};
outExpectedTypes = {OperandType::TENSOR_FLOAT32};
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1681,6 +1762,7 @@
std::vector<OperandType> outExpectedTypes;
inExpectedTypes = {OperandType::TENSOR_FLOAT32, OperandType::TENSOR_FLOAT32};
outExpectedTypes = {OperandType::TENSOR_FLOAT32};
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1722,6 +1804,7 @@
inExpectedTypes.insert(inExpectedTypes.end(), explicitScalarTypes.begin(),
explicitScalarTypes.end());
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1745,6 +1828,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1784,6 +1868,7 @@
}
inExpectedTypes.insert(inExpectedTypes.end(), argExpectedTypes.begin(),
argExpectedTypes.end());
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1808,6 +1893,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);
@@ -1830,6 +1916,7 @@
<< kOperationNames[opType];
return ANEURALNETWORKS_BAD_DATA;
}
+ *minSupportedHalVersion = HalVersion::V1_2;
return validateOperationOperandTypes(operands, inputCount, inputIndexes,
inExpectedTypes, outputCount, outputIndexes,
outExpectedTypes);