| // DO NOT EDIT; |
| // Generated by ml/nn/runtime/test/specs/generate_vts_test.sh |
| |
| namespace add_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated add_relaxed test |
| #include "examples/add_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/add_relaxed.model.cpp" |
| } // namespace add_relaxed |
| TEST_F(NeuralnetworksHidlTest, add_relaxed) { |
| generated_tests::Execute(device, |
| add_relaxed::createTestModel, |
| add_relaxed::is_ignored, |
| add_relaxed::examples); |
| } |
| |
| namespace avg_pool_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated avg_pool_float_1_relaxed test |
| #include "examples/avg_pool_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/avg_pool_float_1_relaxed.model.cpp" |
| } // namespace avg_pool_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, avg_pool_float_1_relaxed) { |
| generated_tests::Execute(device, |
| avg_pool_float_1_relaxed::createTestModel, |
| avg_pool_float_1_relaxed::is_ignored, |
| avg_pool_float_1_relaxed::examples); |
| } |
| |
| namespace avg_pool_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated avg_pool_float_2_relaxed test |
| #include "examples/avg_pool_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/avg_pool_float_2_relaxed.model.cpp" |
| } // namespace avg_pool_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, avg_pool_float_2_relaxed) { |
| generated_tests::Execute(device, |
| avg_pool_float_2_relaxed::createTestModel, |
| avg_pool_float_2_relaxed::is_ignored, |
| avg_pool_float_2_relaxed::examples); |
| } |
| |
| namespace avg_pool_float_3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated avg_pool_float_3_relaxed test |
| #include "examples/avg_pool_float_3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/avg_pool_float_3_relaxed.model.cpp" |
| } // namespace avg_pool_float_3_relaxed |
| TEST_F(NeuralnetworksHidlTest, avg_pool_float_3_relaxed) { |
| generated_tests::Execute(device, |
| avg_pool_float_3_relaxed::createTestModel, |
| avg_pool_float_3_relaxed::is_ignored, |
| avg_pool_float_3_relaxed::examples); |
| } |
| |
| namespace avg_pool_float_4_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated avg_pool_float_4_relaxed test |
| #include "examples/avg_pool_float_4_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/avg_pool_float_4_relaxed.model.cpp" |
| } // namespace avg_pool_float_4_relaxed |
| TEST_F(NeuralnetworksHidlTest, avg_pool_float_4_relaxed) { |
| generated_tests::Execute(device, |
| avg_pool_float_4_relaxed::createTestModel, |
| avg_pool_float_4_relaxed::is_ignored, |
| avg_pool_float_4_relaxed::examples); |
| } |
| |
| namespace batch_to_space_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated batch_to_space_float_1 test |
| #include "examples/batch_to_space_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/batch_to_space_float_1.model.cpp" |
| } // namespace batch_to_space_float_1 |
| TEST_F(NeuralnetworksHidlTest, batch_to_space_float_1) { |
| generated_tests::Execute(device, |
| batch_to_space_float_1::createTestModel, |
| batch_to_space_float_1::is_ignored, |
| batch_to_space_float_1::examples); |
| } |
| |
| namespace batch_to_space { |
| std::vector<MixedTypedExample> examples = { |
| // Generated batch_to_space test |
| #include "examples/batch_to_space.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/batch_to_space.model.cpp" |
| } // namespace batch_to_space |
| TEST_F(NeuralnetworksHidlTest, batch_to_space) { |
| generated_tests::Execute(device, |
| batch_to_space::createTestModel, |
| batch_to_space::is_ignored, |
| batch_to_space::examples); |
| } |
| |
| namespace batch_to_space_quant8_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated batch_to_space_quant8_1 test |
| #include "examples/batch_to_space_quant8_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/batch_to_space_quant8_1.model.cpp" |
| } // namespace batch_to_space_quant8_1 |
| TEST_F(NeuralnetworksHidlTest, batch_to_space_quant8_1) { |
| generated_tests::Execute(device, |
| batch_to_space_quant8_1::createTestModel, |
| batch_to_space_quant8_1::is_ignored, |
| batch_to_space_quant8_1::examples); |
| } |
| |
| namespace concat_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated concat_float_1_relaxed test |
| #include "examples/concat_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/concat_float_1_relaxed.model.cpp" |
| } // namespace concat_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, concat_float_1_relaxed) { |
| generated_tests::Execute(device, |
| concat_float_1_relaxed::createTestModel, |
| concat_float_1_relaxed::is_ignored, |
| concat_float_1_relaxed::examples); |
| } |
| |
| namespace concat_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated concat_float_2_relaxed test |
| #include "examples/concat_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/concat_float_2_relaxed.model.cpp" |
| } // namespace concat_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, concat_float_2_relaxed) { |
| generated_tests::Execute(device, |
| concat_float_2_relaxed::createTestModel, |
| concat_float_2_relaxed::is_ignored, |
| concat_float_2_relaxed::examples); |
| } |
| |
| namespace concat_float_3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated concat_float_3_relaxed test |
| #include "examples/concat_float_3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/concat_float_3_relaxed.model.cpp" |
| } // namespace concat_float_3_relaxed |
| TEST_F(NeuralnetworksHidlTest, concat_float_3_relaxed) { |
| generated_tests::Execute(device, |
| concat_float_3_relaxed::createTestModel, |
| concat_float_3_relaxed::is_ignored, |
| concat_float_3_relaxed::examples); |
| } |
| |
| namespace conv_1_h3_w2_SAME_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_1_h3_w2_SAME_relaxed test |
| #include "examples/conv_1_h3_w2_SAME_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_1_h3_w2_SAME_relaxed.model.cpp" |
| } // namespace conv_1_h3_w2_SAME_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_1_h3_w2_SAME_relaxed) { |
| generated_tests::Execute(device, |
| conv_1_h3_w2_SAME_relaxed::createTestModel, |
| conv_1_h3_w2_SAME_relaxed::is_ignored, |
| conv_1_h3_w2_SAME_relaxed::examples); |
| } |
| |
| namespace conv_1_h3_w2_VALID_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_1_h3_w2_VALID_relaxed test |
| #include "examples/conv_1_h3_w2_VALID_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_1_h3_w2_VALID_relaxed.model.cpp" |
| } // namespace conv_1_h3_w2_VALID_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_1_h3_w2_VALID_relaxed) { |
| generated_tests::Execute(device, |
| conv_1_h3_w2_VALID_relaxed::createTestModel, |
| conv_1_h3_w2_VALID_relaxed::is_ignored, |
| conv_1_h3_w2_VALID_relaxed::examples); |
| } |
| |
| namespace conv_3_h3_w2_SAME_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_3_h3_w2_SAME_relaxed test |
| #include "examples/conv_3_h3_w2_SAME_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_3_h3_w2_SAME_relaxed.model.cpp" |
| } // namespace conv_3_h3_w2_SAME_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_3_h3_w2_SAME_relaxed) { |
| generated_tests::Execute(device, |
| conv_3_h3_w2_SAME_relaxed::createTestModel, |
| conv_3_h3_w2_SAME_relaxed::is_ignored, |
| conv_3_h3_w2_SAME_relaxed::examples); |
| } |
| |
| namespace conv_3_h3_w2_VALID_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_3_h3_w2_VALID_relaxed test |
| #include "examples/conv_3_h3_w2_VALID_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_3_h3_w2_VALID_relaxed.model.cpp" |
| } // namespace conv_3_h3_w2_VALID_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_3_h3_w2_VALID_relaxed) { |
| generated_tests::Execute(device, |
| conv_3_h3_w2_VALID_relaxed::createTestModel, |
| conv_3_h3_w2_VALID_relaxed::is_ignored, |
| conv_3_h3_w2_VALID_relaxed::examples); |
| } |
| |
| namespace conv_float_channels_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_float_channels_relaxed test |
| #include "examples/conv_float_channels_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_float_channels_relaxed.model.cpp" |
| } // namespace conv_float_channels_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_float_channels_relaxed) { |
| generated_tests::Execute(device, |
| conv_float_channels_relaxed::createTestModel, |
| conv_float_channels_relaxed::is_ignored, |
| conv_float_channels_relaxed::examples); |
| } |
| |
| namespace conv_float_channels_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_float_channels_weights_as_inputs_relaxed test |
| #include "examples/conv_float_channels_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_float_channels_weights_as_inputs_relaxed.model.cpp" |
| } // namespace conv_float_channels_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_float_channels_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| conv_float_channels_weights_as_inputs_relaxed::createTestModel, |
| conv_float_channels_weights_as_inputs_relaxed::is_ignored, |
| conv_float_channels_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace conv_float_large_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_float_large_relaxed test |
| #include "examples/conv_float_large_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_float_large_relaxed.model.cpp" |
| } // namespace conv_float_large_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_float_large_relaxed) { |
| generated_tests::Execute(device, |
| conv_float_large_relaxed::createTestModel, |
| conv_float_large_relaxed::is_ignored, |
| conv_float_large_relaxed::examples); |
| } |
| |
| namespace conv_float_large_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_float_large_weights_as_inputs_relaxed test |
| #include "examples/conv_float_large_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_float_large_weights_as_inputs_relaxed.model.cpp" |
| } // namespace conv_float_large_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_float_large_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| conv_float_large_weights_as_inputs_relaxed::createTestModel, |
| conv_float_large_weights_as_inputs_relaxed::is_ignored, |
| conv_float_large_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace conv_float_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_float_relaxed test |
| #include "examples/conv_float_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_float_relaxed.model.cpp" |
| } // namespace conv_float_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_float_relaxed) { |
| generated_tests::Execute(device, |
| conv_float_relaxed::createTestModel, |
| conv_float_relaxed::is_ignored, |
| conv_float_relaxed::examples); |
| } |
| |
| namespace conv_float_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated conv_float_weights_as_inputs_relaxed test |
| #include "examples/conv_float_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/conv_float_weights_as_inputs_relaxed.model.cpp" |
| } // namespace conv_float_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, conv_float_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| conv_float_weights_as_inputs_relaxed::createTestModel, |
| conv_float_weights_as_inputs_relaxed::is_ignored, |
| conv_float_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace depth_to_space_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated depth_to_space_float_1_relaxed test |
| #include "examples/depth_to_space_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/depth_to_space_float_1_relaxed.model.cpp" |
| } // namespace depth_to_space_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, depth_to_space_float_1_relaxed) { |
| generated_tests::Execute(device, |
| depth_to_space_float_1_relaxed::createTestModel, |
| depth_to_space_float_1_relaxed::is_ignored, |
| depth_to_space_float_1_relaxed::examples); |
| } |
| |
| namespace depth_to_space_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated depth_to_space_float_2_relaxed test |
| #include "examples/depth_to_space_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/depth_to_space_float_2_relaxed.model.cpp" |
| } // namespace depth_to_space_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, depth_to_space_float_2_relaxed) { |
| generated_tests::Execute(device, |
| depth_to_space_float_2_relaxed::createTestModel, |
| depth_to_space_float_2_relaxed::is_ignored, |
| depth_to_space_float_2_relaxed::examples); |
| } |
| |
| namespace depth_to_space_float_3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated depth_to_space_float_3_relaxed test |
| #include "examples/depth_to_space_float_3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/depth_to_space_float_3_relaxed.model.cpp" |
| } // namespace depth_to_space_float_3_relaxed |
| TEST_F(NeuralnetworksHidlTest, depth_to_space_float_3_relaxed) { |
| generated_tests::Execute(device, |
| depth_to_space_float_3_relaxed::createTestModel, |
| depth_to_space_float_3_relaxed::is_ignored, |
| depth_to_space_float_3_relaxed::examples); |
| } |
| |
| namespace depthwise_conv2d_float_large_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated depthwise_conv2d_float_large_relaxed test |
| #include "examples/depthwise_conv2d_float_large_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/depthwise_conv2d_float_large_relaxed.model.cpp" |
| } // namespace depthwise_conv2d_float_large_relaxed |
| TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_large_relaxed) { |
| generated_tests::Execute(device, |
| depthwise_conv2d_float_large_relaxed::createTestModel, |
| depthwise_conv2d_float_large_relaxed::is_ignored, |
| depthwise_conv2d_float_large_relaxed::examples); |
| } |
| |
| namespace depthwise_conv2d_float_large_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated depthwise_conv2d_float_large_weights_as_inputs_relaxed test |
| #include "examples/depthwise_conv2d_float_large_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/depthwise_conv2d_float_large_weights_as_inputs_relaxed.model.cpp" |
| } // namespace depthwise_conv2d_float_large_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_large_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| depthwise_conv2d_float_large_weights_as_inputs_relaxed::createTestModel, |
| depthwise_conv2d_float_large_weights_as_inputs_relaxed::is_ignored, |
| depthwise_conv2d_float_large_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace depthwise_conv_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated depthwise_conv_relaxed test |
| #include "examples/depthwise_conv_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/depthwise_conv_relaxed.model.cpp" |
| } // namespace depthwise_conv_relaxed |
| TEST_F(NeuralnetworksHidlTest, depthwise_conv_relaxed) { |
| generated_tests::Execute(device, |
| depthwise_conv_relaxed::createTestModel, |
| depthwise_conv_relaxed::is_ignored, |
| depthwise_conv_relaxed::examples); |
| } |
| |
| namespace div { |
| std::vector<MixedTypedExample> examples = { |
| // Generated div test |
| #include "examples/div.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/div.model.cpp" |
| } // namespace div |
| TEST_F(NeuralnetworksHidlTest, div) { |
| generated_tests::Execute(device, |
| div::createTestModel, |
| div::is_ignored, |
| div::examples); |
| } |
| |
| namespace embedding_lookup_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated embedding_lookup_relaxed test |
| #include "examples/embedding_lookup_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/embedding_lookup_relaxed.model.cpp" |
| } // namespace embedding_lookup_relaxed |
| TEST_F(NeuralnetworksHidlTest, embedding_lookup_relaxed) { |
| generated_tests::Execute(device, |
| embedding_lookup_relaxed::createTestModel, |
| embedding_lookup_relaxed::is_ignored, |
| embedding_lookup_relaxed::examples); |
| } |
| |
| namespace floor_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated floor_relaxed test |
| #include "examples/floor_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/floor_relaxed.model.cpp" |
| } // namespace floor_relaxed |
| TEST_F(NeuralnetworksHidlTest, floor_relaxed) { |
| generated_tests::Execute(device, |
| floor_relaxed::createTestModel, |
| floor_relaxed::is_ignored, |
| floor_relaxed::examples); |
| } |
| |
| namespace fully_connected_float_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated fully_connected_float_relaxed test |
| #include "examples/fully_connected_float_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/fully_connected_float_relaxed.model.cpp" |
| } // namespace fully_connected_float_relaxed |
| TEST_F(NeuralnetworksHidlTest, fully_connected_float_relaxed) { |
| generated_tests::Execute(device, |
| fully_connected_float_relaxed::createTestModel, |
| fully_connected_float_relaxed::is_ignored, |
| fully_connected_float_relaxed::examples); |
| } |
| |
| namespace fully_connected_float_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated fully_connected_float_weights_as_inputs_relaxed test |
| #include "examples/fully_connected_float_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/fully_connected_float_weights_as_inputs_relaxed.model.cpp" |
| } // namespace fully_connected_float_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, fully_connected_float_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| fully_connected_float_weights_as_inputs_relaxed::createTestModel, |
| fully_connected_float_weights_as_inputs_relaxed::is_ignored, |
| fully_connected_float_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace hashtable_lookup_float_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated hashtable_lookup_float_relaxed test |
| #include "examples/hashtable_lookup_float_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/hashtable_lookup_float_relaxed.model.cpp" |
| } // namespace hashtable_lookup_float_relaxed |
| TEST_F(NeuralnetworksHidlTest, hashtable_lookup_float_relaxed) { |
| generated_tests::Execute(device, |
| hashtable_lookup_float_relaxed::createTestModel, |
| hashtable_lookup_float_relaxed::is_ignored, |
| hashtable_lookup_float_relaxed::examples); |
| } |
| |
| namespace l2_normalization_large_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated l2_normalization_large_relaxed test |
| #include "examples/l2_normalization_large_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/l2_normalization_large_relaxed.model.cpp" |
| } // namespace l2_normalization_large_relaxed |
| TEST_F(NeuralnetworksHidlTest, l2_normalization_large_relaxed) { |
| generated_tests::Execute(device, |
| l2_normalization_large_relaxed::createTestModel, |
| l2_normalization_large_relaxed::is_ignored, |
| l2_normalization_large_relaxed::examples); |
| } |
| |
| namespace l2_normalization_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated l2_normalization_relaxed test |
| #include "examples/l2_normalization_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/l2_normalization_relaxed.model.cpp" |
| } // namespace l2_normalization_relaxed |
| TEST_F(NeuralnetworksHidlTest, l2_normalization_relaxed) { |
| generated_tests::Execute(device, |
| l2_normalization_relaxed::createTestModel, |
| l2_normalization_relaxed::is_ignored, |
| l2_normalization_relaxed::examples); |
| } |
| |
| namespace l2_pool_float_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated l2_pool_float_relaxed test |
| #include "examples/l2_pool_float_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/l2_pool_float_relaxed.model.cpp" |
| } // namespace l2_pool_float_relaxed |
| TEST_F(NeuralnetworksHidlTest, l2_pool_float_relaxed) { |
| generated_tests::Execute(device, |
| l2_pool_float_relaxed::createTestModel, |
| l2_pool_float_relaxed::is_ignored, |
| l2_pool_float_relaxed::examples); |
| } |
| |
| namespace local_response_norm_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated local_response_norm_float_1_relaxed test |
| #include "examples/local_response_norm_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/local_response_norm_float_1_relaxed.model.cpp" |
| } // namespace local_response_norm_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, local_response_norm_float_1_relaxed) { |
| generated_tests::Execute(device, |
| local_response_norm_float_1_relaxed::createTestModel, |
| local_response_norm_float_1_relaxed::is_ignored, |
| local_response_norm_float_1_relaxed::examples); |
| } |
| |
| namespace local_response_norm_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated local_response_norm_float_2_relaxed test |
| #include "examples/local_response_norm_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/local_response_norm_float_2_relaxed.model.cpp" |
| } // namespace local_response_norm_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, local_response_norm_float_2_relaxed) { |
| generated_tests::Execute(device, |
| local_response_norm_float_2_relaxed::createTestModel, |
| local_response_norm_float_2_relaxed::is_ignored, |
| local_response_norm_float_2_relaxed::examples); |
| } |
| |
| namespace local_response_norm_float_3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated local_response_norm_float_3_relaxed test |
| #include "examples/local_response_norm_float_3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/local_response_norm_float_3_relaxed.model.cpp" |
| } // namespace local_response_norm_float_3_relaxed |
| TEST_F(NeuralnetworksHidlTest, local_response_norm_float_3_relaxed) { |
| generated_tests::Execute(device, |
| local_response_norm_float_3_relaxed::createTestModel, |
| local_response_norm_float_3_relaxed::is_ignored, |
| local_response_norm_float_3_relaxed::examples); |
| } |
| |
| namespace local_response_norm_float_4_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated local_response_norm_float_4_relaxed test |
| #include "examples/local_response_norm_float_4_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/local_response_norm_float_4_relaxed.model.cpp" |
| } // namespace local_response_norm_float_4_relaxed |
| TEST_F(NeuralnetworksHidlTest, local_response_norm_float_4_relaxed) { |
| generated_tests::Execute(device, |
| local_response_norm_float_4_relaxed::createTestModel, |
| local_response_norm_float_4_relaxed::is_ignored, |
| local_response_norm_float_4_relaxed::examples); |
| } |
| |
| namespace logistic_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated logistic_float_1_relaxed test |
| #include "examples/logistic_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/logistic_float_1_relaxed.model.cpp" |
| } // namespace logistic_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, logistic_float_1_relaxed) { |
| generated_tests::Execute(device, |
| logistic_float_1_relaxed::createTestModel, |
| logistic_float_1_relaxed::is_ignored, |
| logistic_float_1_relaxed::examples); |
| } |
| |
| namespace logistic_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated logistic_float_2_relaxed test |
| #include "examples/logistic_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/logistic_float_2_relaxed.model.cpp" |
| } // namespace logistic_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, logistic_float_2_relaxed) { |
| generated_tests::Execute(device, |
| logistic_float_2_relaxed::createTestModel, |
| logistic_float_2_relaxed::is_ignored, |
| logistic_float_2_relaxed::examples); |
| } |
| |
| namespace lsh_projection_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lsh_projection_2_relaxed test |
| #include "examples/lsh_projection_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lsh_projection_2_relaxed.model.cpp" |
| } // namespace lsh_projection_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, lsh_projection_2_relaxed) { |
| generated_tests::Execute(device, |
| lsh_projection_2_relaxed::createTestModel, |
| lsh_projection_2_relaxed::is_ignored, |
| lsh_projection_2_relaxed::examples); |
| } |
| |
| namespace lsh_projection_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lsh_projection_relaxed test |
| #include "examples/lsh_projection_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lsh_projection_relaxed.model.cpp" |
| } // namespace lsh_projection_relaxed |
| TEST_F(NeuralnetworksHidlTest, lsh_projection_relaxed) { |
| generated_tests::Execute(device, |
| lsh_projection_relaxed::createTestModel, |
| lsh_projection_relaxed::is_ignored, |
| lsh_projection_relaxed::examples); |
| } |
| |
| namespace lsh_projection_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lsh_projection_weights_as_inputs_relaxed test |
| #include "examples/lsh_projection_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lsh_projection_weights_as_inputs_relaxed.model.cpp" |
| } // namespace lsh_projection_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, lsh_projection_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| lsh_projection_weights_as_inputs_relaxed::createTestModel, |
| lsh_projection_weights_as_inputs_relaxed::is_ignored, |
| lsh_projection_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace lstm2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm2_relaxed test |
| #include "examples/lstm2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm2_relaxed.model.cpp" |
| } // namespace lstm2_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm2_relaxed) { |
| generated_tests::Execute(device, |
| lstm2_relaxed::createTestModel, |
| lstm2_relaxed::is_ignored, |
| lstm2_relaxed::examples); |
| } |
| |
| namespace lstm2_state2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm2_state2_relaxed test |
| #include "examples/lstm2_state2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm2_state2_relaxed.model.cpp" |
| } // namespace lstm2_state2_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm2_state2_relaxed) { |
| generated_tests::Execute(device, |
| lstm2_state2_relaxed::createTestModel, |
| lstm2_state2_relaxed::is_ignored, |
| lstm2_state2_relaxed::examples); |
| } |
| |
| namespace lstm2_state_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm2_state_relaxed test |
| #include "examples/lstm2_state_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm2_state_relaxed.model.cpp" |
| } // namespace lstm2_state_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm2_state_relaxed) { |
| generated_tests::Execute(device, |
| lstm2_state_relaxed::createTestModel, |
| lstm2_state_relaxed::is_ignored, |
| lstm2_state_relaxed::examples); |
| } |
| |
| namespace lstm3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm3_relaxed test |
| #include "examples/lstm3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm3_relaxed.model.cpp" |
| } // namespace lstm3_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm3_relaxed) { |
| generated_tests::Execute(device, |
| lstm3_relaxed::createTestModel, |
| lstm3_relaxed::is_ignored, |
| lstm3_relaxed::examples); |
| } |
| |
| namespace lstm3_state2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm3_state2_relaxed test |
| #include "examples/lstm3_state2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm3_state2_relaxed.model.cpp" |
| } // namespace lstm3_state2_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm3_state2_relaxed) { |
| generated_tests::Execute(device, |
| lstm3_state2_relaxed::createTestModel, |
| lstm3_state2_relaxed::is_ignored, |
| lstm3_state2_relaxed::examples); |
| } |
| |
| namespace lstm3_state3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm3_state3_relaxed test |
| #include "examples/lstm3_state3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm3_state3_relaxed.model.cpp" |
| } // namespace lstm3_state3_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm3_state3_relaxed) { |
| generated_tests::Execute(device, |
| lstm3_state3_relaxed::createTestModel, |
| lstm3_state3_relaxed::is_ignored, |
| lstm3_state3_relaxed::examples); |
| } |
| |
| namespace lstm3_state_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm3_state_relaxed test |
| #include "examples/lstm3_state_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm3_state_relaxed.model.cpp" |
| } // namespace lstm3_state_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm3_state_relaxed) { |
| generated_tests::Execute(device, |
| lstm3_state_relaxed::createTestModel, |
| lstm3_state_relaxed::is_ignored, |
| lstm3_state_relaxed::examples); |
| } |
| |
| namespace lstm_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm_relaxed test |
| #include "examples/lstm_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm_relaxed.model.cpp" |
| } // namespace lstm_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm_relaxed) { |
| generated_tests::Execute(device, |
| lstm_relaxed::createTestModel, |
| lstm_relaxed::is_ignored, |
| lstm_relaxed::examples); |
| } |
| |
| namespace lstm_state2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm_state2_relaxed test |
| #include "examples/lstm_state2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm_state2_relaxed.model.cpp" |
| } // namespace lstm_state2_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm_state2_relaxed) { |
| generated_tests::Execute(device, |
| lstm_state2_relaxed::createTestModel, |
| lstm_state2_relaxed::is_ignored, |
| lstm_state2_relaxed::examples); |
| } |
| |
| namespace lstm_state_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated lstm_state_relaxed test |
| #include "examples/lstm_state_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/lstm_state_relaxed.model.cpp" |
| } // namespace lstm_state_relaxed |
| TEST_F(NeuralnetworksHidlTest, lstm_state_relaxed) { |
| generated_tests::Execute(device, |
| lstm_state_relaxed::createTestModel, |
| lstm_state_relaxed::is_ignored, |
| lstm_state_relaxed::examples); |
| } |
| |
| namespace max_pool_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated max_pool_float_1_relaxed test |
| #include "examples/max_pool_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/max_pool_float_1_relaxed.model.cpp" |
| } // namespace max_pool_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, max_pool_float_1_relaxed) { |
| generated_tests::Execute(device, |
| max_pool_float_1_relaxed::createTestModel, |
| max_pool_float_1_relaxed::is_ignored, |
| max_pool_float_1_relaxed::examples); |
| } |
| |
| namespace max_pool_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated max_pool_float_2_relaxed test |
| #include "examples/max_pool_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/max_pool_float_2_relaxed.model.cpp" |
| } // namespace max_pool_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, max_pool_float_2_relaxed) { |
| generated_tests::Execute(device, |
| max_pool_float_2_relaxed::createTestModel, |
| max_pool_float_2_relaxed::is_ignored, |
| max_pool_float_2_relaxed::examples); |
| } |
| |
| namespace max_pool_float_3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated max_pool_float_3_relaxed test |
| #include "examples/max_pool_float_3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/max_pool_float_3_relaxed.model.cpp" |
| } // namespace max_pool_float_3_relaxed |
| TEST_F(NeuralnetworksHidlTest, max_pool_float_3_relaxed) { |
| generated_tests::Execute(device, |
| max_pool_float_3_relaxed::createTestModel, |
| max_pool_float_3_relaxed::is_ignored, |
| max_pool_float_3_relaxed::examples); |
| } |
| |
| namespace mean_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated mean_float_1 test |
| #include "examples/mean_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/mean_float_1.model.cpp" |
| } // namespace mean_float_1 |
| TEST_F(NeuralnetworksHidlTest, mean_float_1) { |
| generated_tests::Execute(device, |
| mean_float_1::createTestModel, |
| mean_float_1::is_ignored, |
| mean_float_1::examples); |
| } |
| |
| namespace mean_float_2 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated mean_float_2 test |
| #include "examples/mean_float_2.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/mean_float_2.model.cpp" |
| } // namespace mean_float_2 |
| TEST_F(NeuralnetworksHidlTest, mean_float_2) { |
| generated_tests::Execute(device, |
| mean_float_2::createTestModel, |
| mean_float_2::is_ignored, |
| mean_float_2::examples); |
| } |
| |
| namespace mean { |
| std::vector<MixedTypedExample> examples = { |
| // Generated mean test |
| #include "examples/mean.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/mean.model.cpp" |
| } // namespace mean |
| TEST_F(NeuralnetworksHidlTest, mean) { |
| generated_tests::Execute(device, |
| mean::createTestModel, |
| mean::is_ignored, |
| mean::examples); |
| } |
| |
| namespace mobilenet_224_gender_basic_fixed_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated mobilenet_224_gender_basic_fixed_relaxed test |
| #include "examples/mobilenet_224_gender_basic_fixed_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/mobilenet_224_gender_basic_fixed_relaxed.model.cpp" |
| } // namespace mobilenet_224_gender_basic_fixed_relaxed |
| TEST_F(NeuralnetworksHidlTest, mobilenet_224_gender_basic_fixed_relaxed) { |
| generated_tests::Execute(device, |
| mobilenet_224_gender_basic_fixed_relaxed::createTestModel, |
| mobilenet_224_gender_basic_fixed_relaxed::is_ignored, |
| mobilenet_224_gender_basic_fixed_relaxed::examples); |
| } |
| |
| namespace mul_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated mul_relaxed test |
| #include "examples/mul_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/mul_relaxed.model.cpp" |
| } // namespace mul_relaxed |
| TEST_F(NeuralnetworksHidlTest, mul_relaxed) { |
| generated_tests::Execute(device, |
| mul_relaxed::createTestModel, |
| mul_relaxed::is_ignored, |
| mul_relaxed::examples); |
| } |
| |
| namespace mul_relu_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated mul_relu_relaxed test |
| #include "examples/mul_relu_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/mul_relu_relaxed.model.cpp" |
| } // namespace mul_relu_relaxed |
| TEST_F(NeuralnetworksHidlTest, mul_relu_relaxed) { |
| generated_tests::Execute(device, |
| mul_relu_relaxed::createTestModel, |
| mul_relu_relaxed::is_ignored, |
| mul_relu_relaxed::examples); |
| } |
| |
| namespace pad_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated pad_float_1 test |
| #include "examples/pad_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/pad_float_1.model.cpp" |
| } // namespace pad_float_1 |
| TEST_F(NeuralnetworksHidlTest, pad_float_1) { |
| generated_tests::Execute(device, |
| pad_float_1::createTestModel, |
| pad_float_1::is_ignored, |
| pad_float_1::examples); |
| } |
| |
| namespace pad { |
| std::vector<MixedTypedExample> examples = { |
| // Generated pad test |
| #include "examples/pad.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/pad.model.cpp" |
| } // namespace pad |
| TEST_F(NeuralnetworksHidlTest, pad) { |
| generated_tests::Execute(device, |
| pad::createTestModel, |
| pad::is_ignored, |
| pad::examples); |
| } |
| |
| namespace relu1_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated relu1_float_1_relaxed test |
| #include "examples/relu1_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/relu1_float_1_relaxed.model.cpp" |
| } // namespace relu1_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, relu1_float_1_relaxed) { |
| generated_tests::Execute(device, |
| relu1_float_1_relaxed::createTestModel, |
| relu1_float_1_relaxed::is_ignored, |
| relu1_float_1_relaxed::examples); |
| } |
| |
| namespace relu1_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated relu1_float_2_relaxed test |
| #include "examples/relu1_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/relu1_float_2_relaxed.model.cpp" |
| } // namespace relu1_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, relu1_float_2_relaxed) { |
| generated_tests::Execute(device, |
| relu1_float_2_relaxed::createTestModel, |
| relu1_float_2_relaxed::is_ignored, |
| relu1_float_2_relaxed::examples); |
| } |
| |
| namespace relu6_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated relu6_float_1_relaxed test |
| #include "examples/relu6_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/relu6_float_1_relaxed.model.cpp" |
| } // namespace relu6_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, relu6_float_1_relaxed) { |
| generated_tests::Execute(device, |
| relu6_float_1_relaxed::createTestModel, |
| relu6_float_1_relaxed::is_ignored, |
| relu6_float_1_relaxed::examples); |
| } |
| |
| namespace relu6_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated relu6_float_2_relaxed test |
| #include "examples/relu6_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/relu6_float_2_relaxed.model.cpp" |
| } // namespace relu6_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, relu6_float_2_relaxed) { |
| generated_tests::Execute(device, |
| relu6_float_2_relaxed::createTestModel, |
| relu6_float_2_relaxed::is_ignored, |
| relu6_float_2_relaxed::examples); |
| } |
| |
| namespace relu_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated relu_float_1_relaxed test |
| #include "examples/relu_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/relu_float_1_relaxed.model.cpp" |
| } // namespace relu_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, relu_float_1_relaxed) { |
| generated_tests::Execute(device, |
| relu_float_1_relaxed::createTestModel, |
| relu_float_1_relaxed::is_ignored, |
| relu_float_1_relaxed::examples); |
| } |
| |
| namespace reshape_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated reshape_relaxed test |
| #include "examples/reshape_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/reshape_relaxed.model.cpp" |
| } // namespace reshape_relaxed |
| TEST_F(NeuralnetworksHidlTest, reshape_relaxed) { |
| generated_tests::Execute(device, |
| reshape_relaxed::createTestModel, |
| reshape_relaxed::is_ignored, |
| reshape_relaxed::examples); |
| } |
| |
| namespace reshape_weights_as_inputs_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated reshape_weights_as_inputs_relaxed test |
| #include "examples/reshape_weights_as_inputs_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/reshape_weights_as_inputs_relaxed.model.cpp" |
| } // namespace reshape_weights_as_inputs_relaxed |
| TEST_F(NeuralnetworksHidlTest, reshape_weights_as_inputs_relaxed) { |
| generated_tests::Execute(device, |
| reshape_weights_as_inputs_relaxed::createTestModel, |
| reshape_weights_as_inputs_relaxed::is_ignored, |
| reshape_weights_as_inputs_relaxed::examples); |
| } |
| |
| namespace resize_bilinear_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated resize_bilinear_relaxed test |
| #include "examples/resize_bilinear_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/resize_bilinear_relaxed.model.cpp" |
| } // namespace resize_bilinear_relaxed |
| TEST_F(NeuralnetworksHidlTest, resize_bilinear_relaxed) { |
| generated_tests::Execute(device, |
| resize_bilinear_relaxed::createTestModel, |
| resize_bilinear_relaxed::is_ignored, |
| resize_bilinear_relaxed::examples); |
| } |
| |
| namespace rnn_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated rnn_relaxed test |
| #include "examples/rnn_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/rnn_relaxed.model.cpp" |
| } // namespace rnn_relaxed |
| TEST_F(NeuralnetworksHidlTest, rnn_relaxed) { |
| generated_tests::Execute(device, |
| rnn_relaxed::createTestModel, |
| rnn_relaxed::is_ignored, |
| rnn_relaxed::examples); |
| } |
| |
| namespace rnn_state_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated rnn_state_relaxed test |
| #include "examples/rnn_state_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/rnn_state_relaxed.model.cpp" |
| } // namespace rnn_state_relaxed |
| TEST_F(NeuralnetworksHidlTest, rnn_state_relaxed) { |
| generated_tests::Execute(device, |
| rnn_state_relaxed::createTestModel, |
| rnn_state_relaxed::is_ignored, |
| rnn_state_relaxed::examples); |
| } |
| |
| namespace softmax_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated softmax_float_1_relaxed test |
| #include "examples/softmax_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/softmax_float_1_relaxed.model.cpp" |
| } // namespace softmax_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, softmax_float_1_relaxed) { |
| generated_tests::Execute(device, |
| softmax_float_1_relaxed::createTestModel, |
| softmax_float_1_relaxed::is_ignored, |
| softmax_float_1_relaxed::examples); |
| } |
| |
| namespace softmax_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated softmax_float_2_relaxed test |
| #include "examples/softmax_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/softmax_float_2_relaxed.model.cpp" |
| } // namespace softmax_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, softmax_float_2_relaxed) { |
| generated_tests::Execute(device, |
| softmax_float_2_relaxed::createTestModel, |
| softmax_float_2_relaxed::is_ignored, |
| softmax_float_2_relaxed::examples); |
| } |
| |
| namespace space_to_batch_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch_float_1 test |
| #include "examples/space_to_batch_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch_float_1.model.cpp" |
| } // namespace space_to_batch_float_1 |
| TEST_F(NeuralnetworksHidlTest, space_to_batch_float_1) { |
| generated_tests::Execute(device, |
| space_to_batch_float_1::createTestModel, |
| space_to_batch_float_1::is_ignored, |
| space_to_batch_float_1::examples); |
| } |
| |
| namespace space_to_batch_float_2 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch_float_2 test |
| #include "examples/space_to_batch_float_2.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch_float_2.model.cpp" |
| } // namespace space_to_batch_float_2 |
| TEST_F(NeuralnetworksHidlTest, space_to_batch_float_2) { |
| generated_tests::Execute(device, |
| space_to_batch_float_2::createTestModel, |
| space_to_batch_float_2::is_ignored, |
| space_to_batch_float_2::examples); |
| } |
| |
| namespace space_to_batch_float_3 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch_float_3 test |
| #include "examples/space_to_batch_float_3.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch_float_3.model.cpp" |
| } // namespace space_to_batch_float_3 |
| TEST_F(NeuralnetworksHidlTest, space_to_batch_float_3) { |
| generated_tests::Execute(device, |
| space_to_batch_float_3::createTestModel, |
| space_to_batch_float_3::is_ignored, |
| space_to_batch_float_3::examples); |
| } |
| |
| namespace space_to_batch { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch test |
| #include "examples/space_to_batch.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch.model.cpp" |
| } // namespace space_to_batch |
| TEST_F(NeuralnetworksHidlTest, space_to_batch) { |
| generated_tests::Execute(device, |
| space_to_batch::createTestModel, |
| space_to_batch::is_ignored, |
| space_to_batch::examples); |
| } |
| |
| namespace space_to_batch_quant8_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch_quant8_1 test |
| #include "examples/space_to_batch_quant8_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch_quant8_1.model.cpp" |
| } // namespace space_to_batch_quant8_1 |
| TEST_F(NeuralnetworksHidlTest, space_to_batch_quant8_1) { |
| generated_tests::Execute(device, |
| space_to_batch_quant8_1::createTestModel, |
| space_to_batch_quant8_1::is_ignored, |
| space_to_batch_quant8_1::examples); |
| } |
| |
| namespace space_to_batch_quant8_2 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch_quant8_2 test |
| #include "examples/space_to_batch_quant8_2.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch_quant8_2.model.cpp" |
| } // namespace space_to_batch_quant8_2 |
| TEST_F(NeuralnetworksHidlTest, space_to_batch_quant8_2) { |
| generated_tests::Execute(device, |
| space_to_batch_quant8_2::createTestModel, |
| space_to_batch_quant8_2::is_ignored, |
| space_to_batch_quant8_2::examples); |
| } |
| |
| namespace space_to_batch_quant8_3 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_batch_quant8_3 test |
| #include "examples/space_to_batch_quant8_3.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_batch_quant8_3.model.cpp" |
| } // namespace space_to_batch_quant8_3 |
| TEST_F(NeuralnetworksHidlTest, space_to_batch_quant8_3) { |
| generated_tests::Execute(device, |
| space_to_batch_quant8_3::createTestModel, |
| space_to_batch_quant8_3::is_ignored, |
| space_to_batch_quant8_3::examples); |
| } |
| |
| namespace space_to_depth_float_1_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_depth_float_1_relaxed test |
| #include "examples/space_to_depth_float_1_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_depth_float_1_relaxed.model.cpp" |
| } // namespace space_to_depth_float_1_relaxed |
| TEST_F(NeuralnetworksHidlTest, space_to_depth_float_1_relaxed) { |
| generated_tests::Execute(device, |
| space_to_depth_float_1_relaxed::createTestModel, |
| space_to_depth_float_1_relaxed::is_ignored, |
| space_to_depth_float_1_relaxed::examples); |
| } |
| |
| namespace space_to_depth_float_2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_depth_float_2_relaxed test |
| #include "examples/space_to_depth_float_2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_depth_float_2_relaxed.model.cpp" |
| } // namespace space_to_depth_float_2_relaxed |
| TEST_F(NeuralnetworksHidlTest, space_to_depth_float_2_relaxed) { |
| generated_tests::Execute(device, |
| space_to_depth_float_2_relaxed::createTestModel, |
| space_to_depth_float_2_relaxed::is_ignored, |
| space_to_depth_float_2_relaxed::examples); |
| } |
| |
| namespace space_to_depth_float_3_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated space_to_depth_float_3_relaxed test |
| #include "examples/space_to_depth_float_3_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/space_to_depth_float_3_relaxed.model.cpp" |
| } // namespace space_to_depth_float_3_relaxed |
| TEST_F(NeuralnetworksHidlTest, space_to_depth_float_3_relaxed) { |
| generated_tests::Execute(device, |
| space_to_depth_float_3_relaxed::createTestModel, |
| space_to_depth_float_3_relaxed::is_ignored, |
| space_to_depth_float_3_relaxed::examples); |
| } |
| |
| namespace squeeze_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated squeeze_float_1 test |
| #include "examples/squeeze_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/squeeze_float_1.model.cpp" |
| } // namespace squeeze_float_1 |
| TEST_F(NeuralnetworksHidlTest, squeeze_float_1) { |
| generated_tests::Execute(device, |
| squeeze_float_1::createTestModel, |
| squeeze_float_1::is_ignored, |
| squeeze_float_1::examples); |
| } |
| |
| namespace squeeze { |
| std::vector<MixedTypedExample> examples = { |
| // Generated squeeze test |
| #include "examples/squeeze.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/squeeze.model.cpp" |
| } // namespace squeeze |
| TEST_F(NeuralnetworksHidlTest, squeeze) { |
| generated_tests::Execute(device, |
| squeeze::createTestModel, |
| squeeze::is_ignored, |
| squeeze::examples); |
| } |
| |
| namespace squeeze_quant8_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated squeeze_quant8_1 test |
| #include "examples/squeeze_quant8_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/squeeze_quant8_1.model.cpp" |
| } // namespace squeeze_quant8_1 |
| TEST_F(NeuralnetworksHidlTest, squeeze_quant8_1) { |
| generated_tests::Execute(device, |
| squeeze_quant8_1::createTestModel, |
| squeeze_quant8_1::is_ignored, |
| squeeze_quant8_1::examples); |
| } |
| |
| namespace strided_slice_float_10 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_10 test |
| #include "examples/strided_slice_float_10.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_10.model.cpp" |
| } // namespace strided_slice_float_10 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_10) { |
| generated_tests::Execute(device, |
| strided_slice_float_10::createTestModel, |
| strided_slice_float_10::is_ignored, |
| strided_slice_float_10::examples); |
| } |
| |
| namespace strided_slice_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_1 test |
| #include "examples/strided_slice_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_1.model.cpp" |
| } // namespace strided_slice_float_1 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_1) { |
| generated_tests::Execute(device, |
| strided_slice_float_1::createTestModel, |
| strided_slice_float_1::is_ignored, |
| strided_slice_float_1::examples); |
| } |
| |
| namespace strided_slice_float_2 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_2 test |
| #include "examples/strided_slice_float_2.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_2.model.cpp" |
| } // namespace strided_slice_float_2 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_2) { |
| generated_tests::Execute(device, |
| strided_slice_float_2::createTestModel, |
| strided_slice_float_2::is_ignored, |
| strided_slice_float_2::examples); |
| } |
| |
| namespace strided_slice_float_3 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_3 test |
| #include "examples/strided_slice_float_3.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_3.model.cpp" |
| } // namespace strided_slice_float_3 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_3) { |
| generated_tests::Execute(device, |
| strided_slice_float_3::createTestModel, |
| strided_slice_float_3::is_ignored, |
| strided_slice_float_3::examples); |
| } |
| |
| namespace strided_slice_float_4 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_4 test |
| #include "examples/strided_slice_float_4.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_4.model.cpp" |
| } // namespace strided_slice_float_4 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_4) { |
| generated_tests::Execute(device, |
| strided_slice_float_4::createTestModel, |
| strided_slice_float_4::is_ignored, |
| strided_slice_float_4::examples); |
| } |
| |
| namespace strided_slice_float_5 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_5 test |
| #include "examples/strided_slice_float_5.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_5.model.cpp" |
| } // namespace strided_slice_float_5 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_5) { |
| generated_tests::Execute(device, |
| strided_slice_float_5::createTestModel, |
| strided_slice_float_5::is_ignored, |
| strided_slice_float_5::examples); |
| } |
| |
| namespace strided_slice_float_6 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_6 test |
| #include "examples/strided_slice_float_6.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_6.model.cpp" |
| } // namespace strided_slice_float_6 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_6) { |
| generated_tests::Execute(device, |
| strided_slice_float_6::createTestModel, |
| strided_slice_float_6::is_ignored, |
| strided_slice_float_6::examples); |
| } |
| |
| namespace strided_slice_float_7 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_7 test |
| #include "examples/strided_slice_float_7.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_7.model.cpp" |
| } // namespace strided_slice_float_7 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_7) { |
| generated_tests::Execute(device, |
| strided_slice_float_7::createTestModel, |
| strided_slice_float_7::is_ignored, |
| strided_slice_float_7::examples); |
| } |
| |
| namespace strided_slice_float_8 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_8 test |
| #include "examples/strided_slice_float_8.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_8.model.cpp" |
| } // namespace strided_slice_float_8 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_8) { |
| generated_tests::Execute(device, |
| strided_slice_float_8::createTestModel, |
| strided_slice_float_8::is_ignored, |
| strided_slice_float_8::examples); |
| } |
| |
| namespace strided_slice_float_9 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_float_9 test |
| #include "examples/strided_slice_float_9.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_float_9.model.cpp" |
| } // namespace strided_slice_float_9 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_float_9) { |
| generated_tests::Execute(device, |
| strided_slice_float_9::createTestModel, |
| strided_slice_float_9::is_ignored, |
| strided_slice_float_9::examples); |
| } |
| |
| namespace strided_slice { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice test |
| #include "examples/strided_slice.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice.model.cpp" |
| } // namespace strided_slice |
| TEST_F(NeuralnetworksHidlTest, strided_slice) { |
| generated_tests::Execute(device, |
| strided_slice::createTestModel, |
| strided_slice::is_ignored, |
| strided_slice::examples); |
| } |
| |
| namespace strided_slice_qaunt8_10 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_qaunt8_10 test |
| #include "examples/strided_slice_qaunt8_10.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_qaunt8_10.model.cpp" |
| } // namespace strided_slice_qaunt8_10 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_qaunt8_10) { |
| generated_tests::Execute(device, |
| strided_slice_qaunt8_10::createTestModel, |
| strided_slice_qaunt8_10::is_ignored, |
| strided_slice_qaunt8_10::examples); |
| } |
| |
| namespace strided_slice_quant8_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_1 test |
| #include "examples/strided_slice_quant8_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_1.model.cpp" |
| } // namespace strided_slice_quant8_1 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_1) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_1::createTestModel, |
| strided_slice_quant8_1::is_ignored, |
| strided_slice_quant8_1::examples); |
| } |
| |
| namespace strided_slice_quant8_2 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_2 test |
| #include "examples/strided_slice_quant8_2.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_2.model.cpp" |
| } // namespace strided_slice_quant8_2 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_2) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_2::createTestModel, |
| strided_slice_quant8_2::is_ignored, |
| strided_slice_quant8_2::examples); |
| } |
| |
| namespace strided_slice_quant8_3 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_3 test |
| #include "examples/strided_slice_quant8_3.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_3.model.cpp" |
| } // namespace strided_slice_quant8_3 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_3) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_3::createTestModel, |
| strided_slice_quant8_3::is_ignored, |
| strided_slice_quant8_3::examples); |
| } |
| |
| namespace strided_slice_quant8_4 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_4 test |
| #include "examples/strided_slice_quant8_4.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_4.model.cpp" |
| } // namespace strided_slice_quant8_4 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_4) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_4::createTestModel, |
| strided_slice_quant8_4::is_ignored, |
| strided_slice_quant8_4::examples); |
| } |
| |
| namespace strided_slice_quant8_5 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_5 test |
| #include "examples/strided_slice_quant8_5.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_5.model.cpp" |
| } // namespace strided_slice_quant8_5 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_5) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_5::createTestModel, |
| strided_slice_quant8_5::is_ignored, |
| strided_slice_quant8_5::examples); |
| } |
| |
| namespace strided_slice_quant8_6 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_6 test |
| #include "examples/strided_slice_quant8_6.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_6.model.cpp" |
| } // namespace strided_slice_quant8_6 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_6) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_6::createTestModel, |
| strided_slice_quant8_6::is_ignored, |
| strided_slice_quant8_6::examples); |
| } |
| |
| namespace strided_slice_quant8_7 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_7 test |
| #include "examples/strided_slice_quant8_7.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_7.model.cpp" |
| } // namespace strided_slice_quant8_7 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_7) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_7::createTestModel, |
| strided_slice_quant8_7::is_ignored, |
| strided_slice_quant8_7::examples); |
| } |
| |
| namespace strided_slice_quant8_8 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_8 test |
| #include "examples/strided_slice_quant8_8.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_8.model.cpp" |
| } // namespace strided_slice_quant8_8 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_8) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_8::createTestModel, |
| strided_slice_quant8_8::is_ignored, |
| strided_slice_quant8_8::examples); |
| } |
| |
| namespace strided_slice_quant8_9 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated strided_slice_quant8_9 test |
| #include "examples/strided_slice_quant8_9.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/strided_slice_quant8_9.model.cpp" |
| } // namespace strided_slice_quant8_9 |
| TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_9) { |
| generated_tests::Execute(device, |
| strided_slice_quant8_9::createTestModel, |
| strided_slice_quant8_9::is_ignored, |
| strided_slice_quant8_9::examples); |
| } |
| |
| namespace sub { |
| std::vector<MixedTypedExample> examples = { |
| // Generated sub test |
| #include "examples/sub.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/sub.model.cpp" |
| } // namespace sub |
| TEST_F(NeuralnetworksHidlTest, sub) { |
| generated_tests::Execute(device, |
| sub::createTestModel, |
| sub::is_ignored, |
| sub::examples); |
| } |
| |
| namespace svdf2_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated svdf2_relaxed test |
| #include "examples/svdf2_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/svdf2_relaxed.model.cpp" |
| } // namespace svdf2_relaxed |
| TEST_F(NeuralnetworksHidlTest, svdf2_relaxed) { |
| generated_tests::Execute(device, |
| svdf2_relaxed::createTestModel, |
| svdf2_relaxed::is_ignored, |
| svdf2_relaxed::examples); |
| } |
| |
| namespace svdf_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated svdf_relaxed test |
| #include "examples/svdf_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/svdf_relaxed.model.cpp" |
| } // namespace svdf_relaxed |
| TEST_F(NeuralnetworksHidlTest, svdf_relaxed) { |
| generated_tests::Execute(device, |
| svdf_relaxed::createTestModel, |
| svdf_relaxed::is_ignored, |
| svdf_relaxed::examples); |
| } |
| |
| namespace svdf_state_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated svdf_state_relaxed test |
| #include "examples/svdf_state_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/svdf_state_relaxed.model.cpp" |
| } // namespace svdf_state_relaxed |
| TEST_F(NeuralnetworksHidlTest, svdf_state_relaxed) { |
| generated_tests::Execute(device, |
| svdf_state_relaxed::createTestModel, |
| svdf_state_relaxed::is_ignored, |
| svdf_state_relaxed::examples); |
| } |
| |
| namespace tanh_relaxed { |
| std::vector<MixedTypedExample> examples = { |
| // Generated tanh_relaxed test |
| #include "examples/tanh_relaxed.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/tanh_relaxed.model.cpp" |
| } // namespace tanh_relaxed |
| TEST_F(NeuralnetworksHidlTest, tanh_relaxed) { |
| generated_tests::Execute(device, |
| tanh_relaxed::createTestModel, |
| tanh_relaxed::is_ignored, |
| tanh_relaxed::examples); |
| } |
| |
| namespace transpose_float_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated transpose_float_1 test |
| #include "examples/transpose_float_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/transpose_float_1.model.cpp" |
| } // namespace transpose_float_1 |
| TEST_F(NeuralnetworksHidlTest, transpose_float_1) { |
| generated_tests::Execute(device, |
| transpose_float_1::createTestModel, |
| transpose_float_1::is_ignored, |
| transpose_float_1::examples); |
| } |
| |
| namespace transpose { |
| std::vector<MixedTypedExample> examples = { |
| // Generated transpose test |
| #include "examples/transpose.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/transpose.model.cpp" |
| } // namespace transpose |
| TEST_F(NeuralnetworksHidlTest, transpose) { |
| generated_tests::Execute(device, |
| transpose::createTestModel, |
| transpose::is_ignored, |
| transpose::examples); |
| } |
| |
| namespace transpose_quant8_1 { |
| std::vector<MixedTypedExample> examples = { |
| // Generated transpose_quant8_1 test |
| #include "examples/transpose_quant8_1.example.cpp" |
| }; |
| // Generated model constructor |
| #include "vts_models/transpose_quant8_1.model.cpp" |
| } // namespace transpose_quant8_1 |
| TEST_F(NeuralnetworksHidlTest, transpose_quant8_1) { |
| generated_tests::Execute(device, |
| transpose_quant8_1::createTestModel, |
| transpose_quant8_1::is_ignored, |
| transpose_quant8_1::examples); |
| } |