Merge "Change NNAPI VTS to use TEST_P to iterate across all service instances"
am: b287873b73
Change-Id: I1890ae9f3b34e09926b624aec22c6ceef496b243
This commit is contained in:
commit
a89b02a5cd
24 changed files with 379 additions and 350 deletions
|
@ -40,10 +40,11 @@ cc_library_static {
|
|||
],
|
||||
}
|
||||
|
||||
cc_defaults {
|
||||
name: "VtsHalNeuralNetworksV1_0TargetTestDefaults",
|
||||
cc_test {
|
||||
name: "VtsHalNeuralnetworksV1_0TargetTest",
|
||||
defaults: ["VtsHalTargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
"TestAssertions.cpp",
|
||||
"ValidateModel.cpp",
|
||||
"ValidateRequest.cpp",
|
||||
|
@ -64,33 +65,11 @@ cc_defaults {
|
|||
"libneuralnetworks_utils",
|
||||
"VtsHalNeuralNetworksV1_0_utils",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
],
|
||||
header_libs: [
|
||||
"libneuralnetworks_headers",
|
||||
],
|
||||
test_suites: ["general-tests"],
|
||||
}
|
||||
|
||||
cc_test {
|
||||
name: "VtsHalNeuralnetworksV1_0TargetTest",
|
||||
defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
],
|
||||
}
|
||||
|
||||
cc_test {
|
||||
name: "PresubmitHalNeuralnetworksV1_0TargetTest",
|
||||
defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
],
|
||||
cflags: [
|
||||
"-DPRESUBMIT_NOT_VTS",
|
||||
],
|
||||
}
|
||||
|
|
|
@ -21,17 +21,17 @@
|
|||
namespace android::hardware::neuralnetworks::V1_0::vts::functional {
|
||||
|
||||
// create device test
|
||||
TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
|
||||
TEST_P(NeuralnetworksHidlTest, CreateDevice) {}
|
||||
|
||||
// status test
|
||||
TEST_F(NeuralnetworksHidlTest, StatusTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, StatusTest) {
|
||||
Return<DeviceStatus> status = kDevice->getStatus();
|
||||
ASSERT_TRUE(status.isOk());
|
||||
EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
|
||||
}
|
||||
|
||||
// initialization
|
||||
TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
||||
Return<void> ret =
|
||||
kDevice->getCapabilities([](ErrorStatus status, const Capabilities& capabilities) {
|
||||
EXPECT_EQ(ErrorStatus::NONE, status);
|
||||
|
|
|
@ -148,6 +148,20 @@ void Execute(const sp<IDevice>& device, const TestModel& testModel) {
|
|||
checkResults(testModel, outputs);
|
||||
}
|
||||
|
||||
void GeneratedTestBase::SetUp() {
|
||||
testing::TestWithParam<GeneratedTestParam>::SetUp();
|
||||
ASSERT_NE(kDevice, nullptr);
|
||||
}
|
||||
|
||||
std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
|
||||
return TestModelManager::get().getTestModels(filter);
|
||||
}
|
||||
|
||||
std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
|
||||
const auto& [namedDevice, namedModel] = info.param;
|
||||
return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
|
||||
}
|
||||
|
||||
// Tag for the generated tests
|
||||
class GeneratedTest : public GeneratedTestBase {};
|
||||
|
||||
|
|
|
@ -18,29 +18,38 @@
|
|||
#define ANDROID_HARDWARE_NEURALNETWORKS_V1_0_GENERATED_TEST_HARNESS_H
|
||||
|
||||
#include <android/hardware/neuralnetworks/1.0/IDevice.h>
|
||||
#include <functional>
|
||||
#include "TestHarness.h"
|
||||
#include "VtsHalNeuralnetworks.h"
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_0::vts::functional {
|
||||
|
||||
class GeneratedTestBase
|
||||
: public NeuralnetworksHidlTest,
|
||||
public testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
|
||||
using NamedModel = Named<const test_helper::TestModel*>;
|
||||
using GeneratedTestParam = std::tuple<NamedDevice, NamedModel>;
|
||||
|
||||
class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
|
||||
protected:
|
||||
const test_helper::TestModel& kTestModel = *GetParam().second;
|
||||
void SetUp() override;
|
||||
const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
|
||||
const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
|
||||
};
|
||||
|
||||
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
|
||||
INSTANTIATE_TEST_SUITE_P( \
|
||||
TestGenerated, TestSuite, \
|
||||
testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
|
||||
[](const auto& info) { return info.param.first; })
|
||||
using FilterFn = std::function<bool(const test_helper::TestModel&)>;
|
||||
std::vector<NamedModel> getNamedModels(const FilterFn& filter);
|
||||
|
||||
std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info);
|
||||
|
||||
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
|
||||
INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite, \
|
||||
testing::Combine(testing::ValuesIn(getNamedDevices()), \
|
||||
testing::ValuesIn(getNamedModels(filter))), \
|
||||
printGeneratedTest)
|
||||
|
||||
// Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
|
||||
// TODO: Clean up the hierarchy for ValidationTest.
|
||||
class ValidationTest : public GeneratedTestBase {};
|
||||
|
||||
Model createModel(const ::test_helper::TestModel& testModel);
|
||||
Model createModel(const test_helper::TestModel& testModel);
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_0::vts::functional
|
||||
|
||||
|
|
|
@ -117,6 +117,13 @@ std::vector<TestBuffer> getOutputBuffers(const Request& request) {
|
|||
return outputBuffers;
|
||||
}
|
||||
|
||||
std::string gtestCompliantName(std::string name) {
|
||||
// gtest test names must only contain alphanumeric characters
|
||||
std::replace_if(
|
||||
name.begin(), name.end(), [](char c) { return !std::isalnum(c); }, '_');
|
||||
return name;
|
||||
}
|
||||
|
||||
} // namespace android::hardware::neuralnetworks
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_0 {
|
||||
|
|
|
@ -18,11 +18,13 @@
|
|||
|
||||
#include "VtsHalNeuralnetworks.h"
|
||||
#include "1.0/Callbacks.h"
|
||||
#include "1.0/Utils.h"
|
||||
#include "GeneratedTestHarness.h"
|
||||
#include "TestHarness.h"
|
||||
|
||||
#include <android-base/logging.h>
|
||||
#include <hidl/ServiceManagement.h>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_0::vts::functional {
|
||||
|
||||
|
@ -76,34 +78,39 @@ void createPreparedModel(const sp<IDevice>& device, const Model& model,
|
|||
ASSERT_NE(nullptr, preparedModel->get());
|
||||
}
|
||||
|
||||
// A class for test environment setup
|
||||
NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
|
||||
// This has to return a "new" object because it is freed inside
|
||||
// testing::AddGlobalTestEnvironment when the gtest is being torn down
|
||||
static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
|
||||
return instance;
|
||||
}
|
||||
|
||||
void NeuralnetworksHidlEnvironment::registerTestServices() {
|
||||
registerTestService<IDevice>();
|
||||
}
|
||||
|
||||
// The main test class for NEURALNETWORK HIDL HAL.
|
||||
void NeuralnetworksHidlTest::SetUp() {
|
||||
testing::VtsHalHidlTargetTestBase::SetUp();
|
||||
|
||||
#ifdef PRESUBMIT_NOT_VTS
|
||||
const std::string name =
|
||||
NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
|
||||
const std::string sampleDriver = "sample-";
|
||||
if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
#endif // PRESUBMIT_NOT_VTS
|
||||
|
||||
ASSERT_NE(nullptr, kDevice.get());
|
||||
testing::TestWithParam<NeuralnetworksHidlTestParam>::SetUp();
|
||||
ASSERT_NE(kDevice, nullptr);
|
||||
}
|
||||
|
||||
static NamedDevice makeNamedDevice(const std::string& name) {
|
||||
return {name, IDevice::getService(name)};
|
||||
}
|
||||
|
||||
static std::vector<NamedDevice> getNamedDevicesImpl() {
|
||||
// Retrieves the name of all service instances that implement IDevice,
|
||||
// including any Lazy HAL instances.
|
||||
const std::vector<std::string> names = hardware::getAllHalInstanceNames(IDevice::descriptor);
|
||||
|
||||
// Get a handle to each device and pair it with its name.
|
||||
std::vector<NamedDevice> namedDevices;
|
||||
namedDevices.reserve(names.size());
|
||||
std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
|
||||
return namedDevices;
|
||||
}
|
||||
|
||||
const std::vector<NamedDevice>& getNamedDevices() {
|
||||
const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
|
||||
return devices;
|
||||
}
|
||||
|
||||
std::string printNeuralnetworksHidlTest(
|
||||
const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info) {
|
||||
return gtestCompliantName(getName(info.param));
|
||||
}
|
||||
|
||||
INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
|
||||
|
||||
// Forward declaration from ValidateModel.cpp
|
||||
void validateModel(const sp<IDevice>& device, const Model& model);
|
||||
// Forward declaration from ValidateRequest.cpp
|
||||
|
@ -130,14 +137,3 @@ TEST_P(ValidationTest, Test) {
|
|||
INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_0::vts::functional
|
||||
|
||||
using android::hardware::neuralnetworks::V1_0::vts::functional::NeuralnetworksHidlEnvironment;
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
|
||||
testing::InitGoogleTest(&argc, argv);
|
||||
NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
|
||||
|
||||
int status = RUN_ALL_TESTS();
|
||||
return status;
|
||||
}
|
||||
|
|
|
@ -17,40 +17,34 @@
|
|||
#ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_0_VTS_HAL_NEURALNETWORKS_H
|
||||
#define ANDROID_HARDWARE_NEURALNETWORKS_V1_0_VTS_HAL_NEURALNETWORKS_H
|
||||
|
||||
#include "1.0/Utils.h"
|
||||
|
||||
#include <android/hardware/neuralnetworks/1.0/IDevice.h>
|
||||
#include <android/hardware/neuralnetworks/1.0/types.h>
|
||||
|
||||
#include <VtsHalHidlTargetTestBase.h>
|
||||
#include <VtsHalHidlTargetTestEnvBase.h>
|
||||
|
||||
#include <android-base/macros.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_0::vts::functional {
|
||||
|
||||
// A class for test environment setup
|
||||
class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase {
|
||||
DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment);
|
||||
NeuralnetworksHidlEnvironment() = default;
|
||||
|
||||
public:
|
||||
static NeuralnetworksHidlEnvironment* getInstance();
|
||||
void registerTestServices() override;
|
||||
};
|
||||
|
||||
// The main test class for NEURALNETWORKS HIDL HAL.
|
||||
class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase {
|
||||
DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest);
|
||||
|
||||
public:
|
||||
NeuralnetworksHidlTest() = default;
|
||||
void SetUp() override;
|
||||
using NamedDevice = Named<sp<IDevice>>;
|
||||
using NeuralnetworksHidlTestParam = NamedDevice;
|
||||
|
||||
class NeuralnetworksHidlTest : public testing::TestWithParam<NeuralnetworksHidlTestParam> {
|
||||
protected:
|
||||
const sp<IDevice> kDevice = testing::VtsHalHidlTargetTestBase::getService<IDevice>(
|
||||
NeuralnetworksHidlEnvironment::getInstance());
|
||||
void SetUp() override;
|
||||
const sp<IDevice> kDevice = getData(GetParam());
|
||||
};
|
||||
|
||||
const std::vector<NamedDevice>& getNamedDevices();
|
||||
|
||||
std::string printNeuralnetworksHidlTest(
|
||||
const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info);
|
||||
|
||||
#define INSTANTIATE_DEVICE_TEST(TestSuite) \
|
||||
INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
|
||||
printNeuralnetworksHidlTest)
|
||||
|
||||
// Create an IPreparedModel object. If the model cannot be prepared,
|
||||
// "preparedModel" will be nullptr instead.
|
||||
void createPreparedModel(const sp<IDevice>& device, const Model& model,
|
||||
|
|
|
@ -21,13 +21,15 @@
|
|||
#include <android/hardware/neuralnetworks/1.0/types.h>
|
||||
#include <algorithm>
|
||||
#include <iosfwd>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include "TestHarness.h"
|
||||
|
||||
namespace android::hardware::neuralnetworks {
|
||||
|
||||
// Create HIDL Request from the TestModel struct.
|
||||
V1_0::Request createRequest(const ::test_helper::TestModel& testModel);
|
||||
V1_0::Request createRequest(const test_helper::TestModel& testModel);
|
||||
|
||||
// After execution, copy out output results from the output memory pool.
|
||||
std::vector<::test_helper::TestBuffer> getOutputBuffers(const V1_0::Request& request);
|
||||
|
@ -51,6 +53,21 @@ inline uint32_t hidl_vec_push_back(hidl_vec<Type>* vec, const Type& value) {
|
|||
return index;
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
using Named = std::pair<std::string, Type>;
|
||||
|
||||
template <typename Type>
|
||||
const std::string& getName(const Named<Type>& namedData) {
|
||||
return namedData.first;
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
const Type& getData(const Named<Type>& namedData) {
|
||||
return namedData.second;
|
||||
}
|
||||
|
||||
std::string gtestCompliantName(std::string name);
|
||||
|
||||
} // namespace android::hardware::neuralnetworks
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_0 {
|
||||
|
|
|
@ -14,10 +14,11 @@
|
|||
// limitations under the License.
|
||||
//
|
||||
|
||||
cc_defaults {
|
||||
name: "VtsHalNeuralNetworksV1_1TargetTestDefaults",
|
||||
cc_test {
|
||||
name: "VtsHalNeuralnetworksV1_1TargetTest",
|
||||
defaults: ["VtsHalTargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
"TestAssertions.cpp",
|
||||
"ValidateModel.cpp",
|
||||
"ValidateRequest.cpp",
|
||||
|
@ -39,35 +40,12 @@ cc_defaults {
|
|||
"libneuralnetworks_utils",
|
||||
"VtsHalNeuralNetworksV1_0_utils",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
"neuralnetworks_generated_V1_1_example",
|
||||
],
|
||||
header_libs: [
|
||||
"libneuralnetworks_headers",
|
||||
],
|
||||
test_suites: ["general-tests"],
|
||||
}
|
||||
|
||||
cc_test {
|
||||
name: "VtsHalNeuralnetworksV1_1TargetTest",
|
||||
defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
"neuralnetworks_generated_V1_1_example",
|
||||
],
|
||||
}
|
||||
|
||||
cc_test {
|
||||
name: "PresubmitHalNeuralnetworksV1_1TargetTest",
|
||||
defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
"neuralnetworks_generated_V1_1_example",
|
||||
],
|
||||
cflags: [
|
||||
"-DPRESUBMIT_NOT_VTS",
|
||||
],
|
||||
}
|
||||
|
|
|
@ -24,17 +24,17 @@ using V1_0::DeviceStatus;
|
|||
using V1_0::ErrorStatus;
|
||||
|
||||
// create device test
|
||||
TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
|
||||
TEST_P(NeuralnetworksHidlTest, CreateDevice) {}
|
||||
|
||||
// status test
|
||||
TEST_F(NeuralnetworksHidlTest, StatusTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, StatusTest) {
|
||||
Return<DeviceStatus> status = kDevice->getStatus();
|
||||
ASSERT_TRUE(status.isOk());
|
||||
EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
|
||||
}
|
||||
|
||||
// initialization
|
||||
TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
||||
Return<void> ret =
|
||||
kDevice->getCapabilities_1_1([](ErrorStatus status, const Capabilities& capabilities) {
|
||||
EXPECT_EQ(ErrorStatus::NONE, status);
|
||||
|
|
|
@ -156,6 +156,20 @@ void Execute(const sp<IDevice>& device, const TestModel& testModel) {
|
|||
checkResults(testModel, outputs);
|
||||
}
|
||||
|
||||
void GeneratedTestBase::SetUp() {
|
||||
testing::TestWithParam<GeneratedTestParam>::SetUp();
|
||||
ASSERT_NE(kDevice, nullptr);
|
||||
}
|
||||
|
||||
std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
|
||||
return TestModelManager::get().getTestModels(filter);
|
||||
}
|
||||
|
||||
std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
|
||||
const auto& [namedDevice, namedModel] = info.param;
|
||||
return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
|
||||
}
|
||||
|
||||
// Tag for the generated tests
|
||||
class GeneratedTest : public GeneratedTestBase {};
|
||||
|
||||
|
|
|
@ -18,29 +18,38 @@
|
|||
#define ANDROID_HARDWARE_NEURALNETWORKS_V1_1_GENERATED_TEST_HARNESS_H
|
||||
|
||||
#include <android/hardware/neuralnetworks/1.1/IDevice.h>
|
||||
#include "1.0/Utils.h"
|
||||
#include "TestHarness.h"
|
||||
#include "VtsHalNeuralnetworks.h"
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_1::vts::functional {
|
||||
|
||||
class GeneratedTestBase
|
||||
: public NeuralnetworksHidlTest,
|
||||
public testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
|
||||
using NamedModel = Named<const test_helper::TestModel*>;
|
||||
using GeneratedTestParam = std::tuple<NamedDevice, NamedModel>;
|
||||
|
||||
class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
|
||||
protected:
|
||||
const test_helper::TestModel& kTestModel = *GetParam().second;
|
||||
void SetUp() override;
|
||||
const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
|
||||
const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
|
||||
};
|
||||
|
||||
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
|
||||
INSTANTIATE_TEST_SUITE_P( \
|
||||
TestGenerated, TestSuite, \
|
||||
testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
|
||||
[](const auto& info) { return info.param.first; })
|
||||
using FilterFn = std::function<bool(const test_helper::TestModel&)>;
|
||||
std::vector<NamedModel> getNamedModels(const FilterFn& filter);
|
||||
|
||||
std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info);
|
||||
|
||||
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
|
||||
INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite, \
|
||||
testing::Combine(testing::ValuesIn(getNamedDevices()), \
|
||||
testing::ValuesIn(getNamedModels(filter))), \
|
||||
printGeneratedTest)
|
||||
|
||||
// Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
|
||||
// TODO: Clean up the hierarchy for ValidationTest.
|
||||
class ValidationTest : public GeneratedTestBase {};
|
||||
|
||||
Model createModel(const ::test_helper::TestModel& testModel);
|
||||
Model createModel(const test_helper::TestModel& testModel);
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_1::vts::functional
|
||||
|
||||
|
|
|
@ -17,13 +17,15 @@
|
|||
#define LOG_TAG "neuralnetworks_hidl_hal_test"
|
||||
|
||||
#include "VtsHalNeuralnetworks.h"
|
||||
#include <android-base/logging.h>
|
||||
#include <hidl/ServiceManagement.h>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include "1.0/Callbacks.h"
|
||||
#include "1.0/Utils.h"
|
||||
#include "GeneratedTestHarness.h"
|
||||
#include "TestHarness.h"
|
||||
|
||||
#include <android-base/logging.h>
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_1::vts::functional {
|
||||
|
||||
using V1_0::ErrorStatus;
|
||||
|
@ -79,34 +81,39 @@ void createPreparedModel(const sp<IDevice>& device, const Model& model,
|
|||
ASSERT_NE(nullptr, preparedModel->get());
|
||||
}
|
||||
|
||||
// A class for test environment setup
|
||||
NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
|
||||
// This has to return a "new" object because it is freed inside
|
||||
// testing::AddGlobalTestEnvironment when the gtest is being torn down
|
||||
static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
|
||||
return instance;
|
||||
}
|
||||
|
||||
void NeuralnetworksHidlEnvironment::registerTestServices() {
|
||||
registerTestService<IDevice>();
|
||||
}
|
||||
|
||||
// The main test class for NEURALNETWORK HIDL HAL.
|
||||
void NeuralnetworksHidlTest::SetUp() {
|
||||
testing::VtsHalHidlTargetTestBase::SetUp();
|
||||
|
||||
#ifdef PRESUBMIT_NOT_VTS
|
||||
const std::string name =
|
||||
NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
|
||||
const std::string sampleDriver = "sample-";
|
||||
if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
#endif // PRESUBMIT_NOT_VTS
|
||||
|
||||
ASSERT_NE(nullptr, kDevice.get());
|
||||
testing::TestWithParam<NeuralnetworksHidlTestParam>::SetUp();
|
||||
ASSERT_NE(kDevice, nullptr);
|
||||
}
|
||||
|
||||
static NamedDevice makeNamedDevice(const std::string& name) {
|
||||
return {name, IDevice::getService(name)};
|
||||
}
|
||||
|
||||
static std::vector<NamedDevice> getNamedDevicesImpl() {
|
||||
// Retrieves the name of all service instances that implement IDevice,
|
||||
// including any Lazy HAL instances.
|
||||
const std::vector<std::string> names = hardware::getAllHalInstanceNames(IDevice::descriptor);
|
||||
|
||||
// Get a handle to each device and pair it with its name.
|
||||
std::vector<NamedDevice> namedDevices;
|
||||
namedDevices.reserve(names.size());
|
||||
std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
|
||||
return namedDevices;
|
||||
}
|
||||
|
||||
const std::vector<NamedDevice>& getNamedDevices() {
|
||||
const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
|
||||
return devices;
|
||||
}
|
||||
|
||||
std::string printNeuralnetworksHidlTest(
|
||||
const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info) {
|
||||
return gtestCompliantName(getName(info.param));
|
||||
}
|
||||
|
||||
INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
|
||||
|
||||
// Forward declaration from ValidateModel.cpp
|
||||
void validateModel(const sp<IDevice>& device, const Model& model);
|
||||
// Forward declaration from ValidateRequest.cpp
|
||||
|
@ -133,14 +140,3 @@ TEST_P(ValidationTest, Test) {
|
|||
INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_1::vts::functional
|
||||
|
||||
using android::hardware::neuralnetworks::V1_1::vts::functional::NeuralnetworksHidlEnvironment;
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
|
||||
testing::InitGoogleTest(&argc, argv);
|
||||
NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
|
||||
|
||||
int status = RUN_ALL_TESTS();
|
||||
return status;
|
||||
}
|
||||
|
|
|
@ -17,41 +17,33 @@
|
|||
#ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_1_VTS_HAL_NEURALNETWORKS_H
|
||||
#define ANDROID_HARDWARE_NEURALNETWORKS_V1_1_VTS_HAL_NEURALNETWORKS_H
|
||||
|
||||
#include <android/hardware/neuralnetworks/1.0/types.h>
|
||||
#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
|
||||
#include <android/hardware/neuralnetworks/1.1/IDevice.h>
|
||||
#include <android/hardware/neuralnetworks/1.1/types.h>
|
||||
|
||||
#include <VtsHalHidlTargetTestBase.h>
|
||||
#include <VtsHalHidlTargetTestEnvBase.h>
|
||||
|
||||
#include <android-base/macros.h>
|
||||
#include <gtest/gtest.h>
|
||||
#include <vector>
|
||||
#include "1.0/Utils.h"
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_1::vts::functional {
|
||||
|
||||
// A class for test environment setup
|
||||
class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase {
|
||||
DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment);
|
||||
NeuralnetworksHidlEnvironment() = default;
|
||||
|
||||
public:
|
||||
static NeuralnetworksHidlEnvironment* getInstance();
|
||||
void registerTestServices() override;
|
||||
};
|
||||
|
||||
// The main test class for NEURALNETWORKS HIDL HAL.
|
||||
class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase {
|
||||
DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest);
|
||||
|
||||
public:
|
||||
NeuralnetworksHidlTest() = default;
|
||||
void SetUp() override;
|
||||
using NamedDevice = Named<sp<IDevice>>;
|
||||
using NeuralnetworksHidlTestParam = NamedDevice;
|
||||
|
||||
class NeuralnetworksHidlTest : public testing::TestWithParam<NeuralnetworksHidlTestParam> {
|
||||
protected:
|
||||
const sp<IDevice> kDevice = testing::VtsHalHidlTargetTestBase::getService<IDevice>(
|
||||
NeuralnetworksHidlEnvironment::getInstance());
|
||||
void SetUp() override;
|
||||
const sp<IDevice> kDevice = getData(GetParam());
|
||||
};
|
||||
|
||||
const std::vector<NamedDevice>& getNamedDevices();
|
||||
|
||||
std::string printNeuralnetworksHidlTest(
|
||||
const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info);
|
||||
|
||||
#define INSTANTIATE_DEVICE_TEST(TestSuite) \
|
||||
INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
|
||||
printNeuralnetworksHidlTest)
|
||||
|
||||
// Create an IPreparedModel object. If the model cannot be prepared,
|
||||
// "preparedModel" will be nullptr instead.
|
||||
void createPreparedModel(const sp<IDevice>& device, const Model& model,
|
||||
|
|
|
@ -14,16 +14,19 @@
|
|||
// limitations under the License.
|
||||
//
|
||||
|
||||
cc_defaults {
|
||||
name: "VtsHalNeuralNetworksV1_2TargetTestDefaults",
|
||||
cc_test {
|
||||
name: "VtsHalNeuralnetworksV1_2TargetTest",
|
||||
defaults: ["VtsHalTargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
"Callbacks.cpp",
|
||||
"CompilationCachingTests.cpp",
|
||||
"GeneratedTestHarness.cpp",
|
||||
"TestAssertions.cpp",
|
||||
"ValidateModel.cpp",
|
||||
"ValidateRequest.cpp",
|
||||
"ValidateBurst.cpp",
|
||||
"VtsHalNeuralnetworks.cpp",
|
||||
"Callbacks.cpp",
|
||||
"GeneratedTestHarness.cpp",
|
||||
],
|
||||
local_include_dirs: ["include"],
|
||||
shared_libs: [
|
||||
|
@ -42,41 +45,13 @@ cc_defaults {
|
|||
"libneuralnetworks_utils",
|
||||
"VtsHalNeuralNetworksV1_0_utils",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
"neuralnetworks_generated_V1_1_example",
|
||||
"neuralnetworks_generated_V1_2_example",
|
||||
],
|
||||
header_libs: [
|
||||
"libneuralnetworks_headers",
|
||||
],
|
||||
test_suites: ["general-tests"],
|
||||
}
|
||||
|
||||
cc_test {
|
||||
name: "VtsHalNeuralnetworksV1_2TargetTest",
|
||||
defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
"CompilationCachingTests.cpp",
|
||||
"ValidateBurst.cpp",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
"neuralnetworks_generated_V1_1_example",
|
||||
"neuralnetworks_generated_V1_2_example",
|
||||
],
|
||||
}
|
||||
|
||||
cc_test {
|
||||
name: "PresubmitHalNeuralnetworksV1_2TargetTest",
|
||||
defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
|
||||
srcs: [
|
||||
"BasicTests.cpp",
|
||||
"CompilationCachingTests.cpp",
|
||||
"ValidateBurst.cpp",
|
||||
],
|
||||
whole_static_libs: [
|
||||
"neuralnetworks_generated_V1_0_example",
|
||||
"neuralnetworks_generated_V1_1_example",
|
||||
"neuralnetworks_generated_V1_2_example",
|
||||
],
|
||||
cflags: [
|
||||
"-DPRESUBMIT_NOT_VTS",
|
||||
],
|
||||
}
|
||||
|
|
|
@ -25,17 +25,17 @@ using V1_0::ErrorStatus;
|
|||
using V1_0::PerformanceInfo;
|
||||
|
||||
// create device test
|
||||
TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
|
||||
TEST_P(NeuralnetworksHidlTest, CreateDevice) {}
|
||||
|
||||
// status test
|
||||
TEST_F(NeuralnetworksHidlTest, StatusTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, StatusTest) {
|
||||
Return<DeviceStatus> status = kDevice->getStatus();
|
||||
ASSERT_TRUE(status.isOk());
|
||||
EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
|
||||
}
|
||||
|
||||
// initialization
|
||||
TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
||||
using OperandPerformance = Capabilities::OperandPerformance;
|
||||
Return<void> ret = kDevice->getCapabilities_1_2([](ErrorStatus status,
|
||||
const Capabilities& capabilities) {
|
||||
|
@ -60,7 +60,7 @@ TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
|
|||
}
|
||||
|
||||
// device version test
|
||||
TEST_F(NeuralnetworksHidlTest, GetDeviceVersionStringTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, GetDeviceVersionStringTest) {
|
||||
Return<void> ret =
|
||||
kDevice->getVersionString([](ErrorStatus status, const hidl_string& version) {
|
||||
EXPECT_EQ(ErrorStatus::NONE, status);
|
||||
|
@ -70,7 +70,7 @@ TEST_F(NeuralnetworksHidlTest, GetDeviceVersionStringTest) {
|
|||
}
|
||||
|
||||
// device type test
|
||||
TEST_F(NeuralnetworksHidlTest, GetDeviceTypeTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, GetDeviceTypeTest) {
|
||||
Return<void> ret = kDevice->getType([](ErrorStatus status, DeviceType type) {
|
||||
EXPECT_EQ(ErrorStatus::NONE, status);
|
||||
EXPECT_TRUE(type == DeviceType::OTHER || type == DeviceType::CPU ||
|
||||
|
@ -80,7 +80,7 @@ TEST_F(NeuralnetworksHidlTest, GetDeviceTypeTest) {
|
|||
}
|
||||
|
||||
// device supported extensions test
|
||||
TEST_F(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) {
|
||||
TEST_P(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) {
|
||||
Return<void> ret = kDevice->getSupportedExtensions(
|
||||
[](ErrorStatus status, const hidl_vec<Extension>& extensions) {
|
||||
EXPECT_EQ(ErrorStatus::NONE, status);
|
||||
|
@ -101,7 +101,7 @@ TEST_F(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) {
|
|||
}
|
||||
|
||||
// getNumberOfCacheFilesNeeded test
|
||||
TEST_F(NeuralnetworksHidlTest, getNumberOfCacheFilesNeeded) {
|
||||
TEST_P(NeuralnetworksHidlTest, getNumberOfCacheFilesNeeded) {
|
||||
Return<void> ret = kDevice->getNumberOfCacheFilesNeeded(
|
||||
[](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
|
||||
EXPECT_EQ(ErrorStatus::NONE, status);
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#define LOG_TAG "neuralnetworks_hidl_hal_test"
|
||||
|
||||
#include <android-base/logging.h>
|
||||
#include <fcntl.h>
|
||||
#include <ftw.h>
|
||||
#include <gtest/gtest.h>
|
||||
#include <hidlmemory/mapping.h>
|
||||
|
@ -37,11 +38,11 @@
|
|||
// Forward declaration of the mobilenet generated test models in
|
||||
// frameworks/ml/nn/runtime/test/generated/.
|
||||
namespace generated_tests::mobilenet_224_gender_basic_fixed {
|
||||
const ::test_helper::TestModel& get_test_model();
|
||||
const test_helper::TestModel& get_test_model();
|
||||
} // namespace generated_tests::mobilenet_224_gender_basic_fixed
|
||||
|
||||
namespace generated_tests::mobilenet_quantized {
|
||||
const ::test_helper::TestModel& get_test_model();
|
||||
const test_helper::TestModel& get_test_model();
|
||||
} // namespace generated_tests::mobilenet_quantized
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_2::vts::functional {
|
||||
|
@ -53,13 +54,13 @@ using V1_1::ExecutionPreference;
|
|||
|
||||
namespace float32_model {
|
||||
|
||||
constexpr auto get_test_model = ::generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
|
||||
constexpr auto get_test_model = generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
|
||||
|
||||
} // namespace float32_model
|
||||
|
||||
namespace quant8_model {
|
||||
|
||||
constexpr auto get_test_model = ::generated_tests::mobilenet_quantized::get_test_model;
|
||||
constexpr auto get_test_model = generated_tests::mobilenet_quantized::get_test_model;
|
||||
|
||||
} // namespace quant8_model
|
||||
|
||||
|
@ -217,12 +218,13 @@ TestModel createLargeTestModelImpl(TestOperationType op, uint32_t len) {
|
|||
} // namespace
|
||||
|
||||
// Tag for the compilation caching tests.
|
||||
class CompilationCachingTestBase : public NeuralnetworksHidlTest {
|
||||
class CompilationCachingTestBase : public testing::Test {
|
||||
protected:
|
||||
CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
|
||||
CompilationCachingTestBase(sp<IDevice> device, OperandType type)
|
||||
: kDevice(std::move(device)), kOperandType(type) {}
|
||||
|
||||
void SetUp() override {
|
||||
NeuralnetworksHidlTest::SetUp();
|
||||
testing::Test::SetUp();
|
||||
ASSERT_NE(kDevice.get(), nullptr);
|
||||
|
||||
// Create cache directory. The cache directory and a temporary cache file is always created
|
||||
|
@ -274,7 +276,7 @@ class CompilationCachingTestBase : public NeuralnetworksHidlTest {
|
|||
};
|
||||
nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
|
||||
}
|
||||
NeuralnetworksHidlTest::TearDown();
|
||||
testing::Test::TearDown();
|
||||
}
|
||||
|
||||
// Model and examples creators. According to kOperandType, the following methods will return
|
||||
|
@ -398,16 +400,21 @@ class CompilationCachingTestBase : public NeuralnetworksHidlTest {
|
|||
uint32_t mNumDataCache;
|
||||
uint32_t mIsCachingSupported;
|
||||
|
||||
const sp<IDevice> kDevice;
|
||||
// The primary data type of the testModel.
|
||||
const OperandType kOperandType;
|
||||
};
|
||||
|
||||
using CompilationCachingTestParam = std::tuple<NamedDevice, OperandType>;
|
||||
|
||||
// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
|
||||
// pass running with float32 models and the second pass running with quant8 models.
|
||||
class CompilationCachingTest : public CompilationCachingTestBase,
|
||||
public testing::WithParamInterface<OperandType> {
|
||||
public testing::WithParamInterface<CompilationCachingTestParam> {
|
||||
protected:
|
||||
CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
|
||||
CompilationCachingTest()
|
||||
: CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
|
||||
std::get<OperandType>(GetParam())) {}
|
||||
};
|
||||
|
||||
TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
|
||||
|
@ -1192,16 +1199,30 @@ TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
|
|||
}
|
||||
}
|
||||
|
||||
static const auto kNamedDeviceChoices = testing::ValuesIn(getNamedDevices());
|
||||
static const auto kOperandTypeChoices =
|
||||
testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
|
||||
std::string printCompilationCachingTest(
|
||||
const testing::TestParamInfo<CompilationCachingTestParam>& info) {
|
||||
const auto& [namedDevice, operandType] = info.param;
|
||||
const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
|
||||
return gtestCompliantName(getName(namedDevice) + "_" + type);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest,
|
||||
testing::Combine(kNamedDeviceChoices, kOperandTypeChoices),
|
||||
printCompilationCachingTest);
|
||||
|
||||
using CompilationCachingSecurityTestParam = std::tuple<NamedDevice, OperandType, uint32_t>;
|
||||
|
||||
class CompilationCachingSecurityTest
|
||||
: public CompilationCachingTestBase,
|
||||
public testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
|
||||
public testing::WithParamInterface<CompilationCachingSecurityTestParam> {
|
||||
protected:
|
||||
CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
|
||||
CompilationCachingSecurityTest()
|
||||
: CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
|
||||
std::get<OperandType>(GetParam())) {}
|
||||
|
||||
void SetUp() {
|
||||
CompilationCachingTestBase::SetUp();
|
||||
|
@ -1291,7 +1312,7 @@ class CompilationCachingSecurityTest
|
|||
}
|
||||
}
|
||||
|
||||
const uint32_t kSeed = std::get<1>(GetParam());
|
||||
const uint32_t kSeed = std::get<uint32_t>(GetParam());
|
||||
std::mt19937 generator;
|
||||
};
|
||||
|
||||
|
@ -1338,7 +1359,16 @@ TEST_P(CompilationCachingSecurityTest, WrongToken) {
|
|||
});
|
||||
}
|
||||
|
||||
std::string printCompilationCachingSecurityTest(
|
||||
const testing::TestParamInfo<CompilationCachingSecurityTestParam>& info) {
|
||||
const auto& [namedDevice, operandType, seed] = info.param;
|
||||
const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
|
||||
return gtestCompliantName(getName(namedDevice) + "_" + type + "_" + std::to_string(seed));
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
|
||||
testing::Combine(kOperandTypeChoices, testing::Range(0U, 10U)));
|
||||
testing::Combine(kNamedDeviceChoices, kOperandTypeChoices,
|
||||
testing::Range(0U, 10U)),
|
||||
printCompilationCachingSecurityTest);
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_2::vts::functional
|
||||
|
|
|
@ -190,7 +190,7 @@ static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& prepar
|
|||
}
|
||||
static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
|
||||
const sp<IPreparedModel>& preparedModel) {
|
||||
return ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
|
||||
return android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
|
||||
}
|
||||
enum class Executor { ASYNC, SYNC, BURST };
|
||||
|
||||
|
@ -371,6 +371,20 @@ void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDyn
|
|||
EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
|
||||
}
|
||||
|
||||
void GeneratedTestBase::SetUp() {
|
||||
testing::TestWithParam<GeneratedTestParam>::SetUp();
|
||||
ASSERT_NE(kDevice, nullptr);
|
||||
}
|
||||
|
||||
std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
|
||||
return TestModelManager::get().getTestModels(filter);
|
||||
}
|
||||
|
||||
std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
|
||||
const auto& [namedDevice, namedModel] = info.param;
|
||||
return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
|
||||
}
|
||||
|
||||
// Tag for the generated tests
|
||||
class GeneratedTest : public GeneratedTestBase {};
|
||||
|
||||
|
|
|
@ -22,34 +22,43 @@
|
|||
#include <android/hardware/neuralnetworks/1.2/types.h>
|
||||
#include <functional>
|
||||
#include <vector>
|
||||
#include "1.0/Utils.h"
|
||||
#include "TestHarness.h"
|
||||
#include "VtsHalNeuralnetworks.h"
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_2::vts::functional {
|
||||
|
||||
class GeneratedTestBase
|
||||
: public NeuralnetworksHidlTest,
|
||||
public testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
|
||||
using NamedModel = Named<const test_helper::TestModel*>;
|
||||
using GeneratedTestParam = std::tuple<NamedDevice, NamedModel>;
|
||||
|
||||
class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
|
||||
protected:
|
||||
const test_helper::TestModel& kTestModel = *GetParam().second;
|
||||
void SetUp() override;
|
||||
const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
|
||||
const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
|
||||
};
|
||||
|
||||
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
|
||||
INSTANTIATE_TEST_SUITE_P( \
|
||||
TestGenerated, TestSuite, \
|
||||
testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
|
||||
[](const auto& info) { return info.param.first; })
|
||||
using FilterFn = std::function<bool(const test_helper::TestModel&)>;
|
||||
std::vector<NamedModel> getNamedModels(const FilterFn& filter);
|
||||
|
||||
std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info);
|
||||
|
||||
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
|
||||
INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite, \
|
||||
testing::Combine(testing::ValuesIn(getNamedDevices()), \
|
||||
testing::ValuesIn(getNamedModels(filter))), \
|
||||
printGeneratedTest)
|
||||
|
||||
// Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
|
||||
// TODO: Clean up the hierarchy for ValidationTest.
|
||||
class ValidationTest : public GeneratedTestBase {};
|
||||
|
||||
Model createModel(const ::test_helper::TestModel& testModel);
|
||||
Model createModel(const test_helper::TestModel& testModel);
|
||||
|
||||
void PrepareModel(const sp<IDevice>& device, const Model& model, sp<IPreparedModel>* preparedModel);
|
||||
|
||||
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel,
|
||||
const ::test_helper::TestModel& testModel, bool testDynamicOutputShape);
|
||||
const test_helper::TestModel& testModel, bool testDynamicOutputShape);
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_2::vts::functional
|
||||
|
||||
|
|
|
@ -262,7 +262,7 @@ static void validateBurstSerialization(const sp<IPreparedModel>& preparedModel,
|
|||
}));
|
||||
|
||||
// serialize the request
|
||||
const auto serialized = ::android::nn::serialize(request, MeasureTiming::YES, slots);
|
||||
const auto serialized = android::nn::serialize(request, MeasureTiming::YES, slots);
|
||||
|
||||
// validations
|
||||
removeDatumTest(sender.get(), receiver.get(), serialized);
|
||||
|
@ -299,7 +299,7 @@ static void validateBurstFmqLength(const sp<IPreparedModel>& preparedModel,
|
|||
// skip test if regular burst output isn't useful for testing a failure
|
||||
// caused by having too small of a length for the result FMQ
|
||||
const std::vector<FmqResultDatum> serialized =
|
||||
::android::nn::serialize(statusRegular, outputShapesRegular, timingRegular);
|
||||
android::nn::serialize(statusRegular, outputShapesRegular, timingRegular);
|
||||
if (statusRegular != ErrorStatus::NONE ||
|
||||
serialized.size() <= kExecutionBurstChannelSmallLength) {
|
||||
return;
|
||||
|
|
|
@ -94,7 +94,7 @@ static void validate(const sp<IPreparedModel>& preparedModel, const std::string&
|
|||
|
||||
// create burst
|
||||
std::shared_ptr<::android::nn::ExecutionBurstController> burst =
|
||||
::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
|
||||
android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
|
||||
ASSERT_NE(nullptr, burst.get());
|
||||
|
||||
// create memory keys
|
||||
|
|
|
@ -17,13 +17,15 @@
|
|||
#define LOG_TAG "neuralnetworks_hidl_hal_test"
|
||||
|
||||
#include "VtsHalNeuralnetworks.h"
|
||||
#include <android-base/logging.h>
|
||||
#include <hidl/ServiceManagement.h>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include "1.0/Callbacks.h"
|
||||
#include "1.0/Utils.h"
|
||||
#include "GeneratedTestHarness.h"
|
||||
#include "TestHarness.h"
|
||||
|
||||
#include <android-base/logging.h>
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_2::vts::functional {
|
||||
|
||||
using implementation::PreparedModelCallback;
|
||||
|
@ -82,34 +84,39 @@ void createPreparedModel(const sp<IDevice>& device, const Model& model,
|
|||
ASSERT_NE(nullptr, preparedModel->get());
|
||||
}
|
||||
|
||||
// A class for test environment setup
|
||||
NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
|
||||
// This has to return a "new" object because it is freed inside
|
||||
// testing::AddGlobalTestEnvironment when the gtest is being torn down
|
||||
static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
|
||||
return instance;
|
||||
}
|
||||
|
||||
void NeuralnetworksHidlEnvironment::registerTestServices() {
|
||||
registerTestService<IDevice>();
|
||||
}
|
||||
|
||||
// The main test class for NEURALNETWORK HIDL HAL.
|
||||
void NeuralnetworksHidlTest::SetUp() {
|
||||
testing::VtsHalHidlTargetTestBase::SetUp();
|
||||
|
||||
#ifdef PRESUBMIT_NOT_VTS
|
||||
const std::string name =
|
||||
NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
|
||||
const std::string sampleDriver = "sample-";
|
||||
if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
|
||||
GTEST_SKIP();
|
||||
}
|
||||
#endif // PRESUBMIT_NOT_VTS
|
||||
|
||||
ASSERT_NE(nullptr, kDevice.get());
|
||||
testing::TestWithParam<NeuralnetworksHidlTestParam>::SetUp();
|
||||
ASSERT_NE(kDevice, nullptr);
|
||||
}
|
||||
|
||||
static NamedDevice makeNamedDevice(const std::string& name) {
|
||||
return {name, IDevice::getService(name)};
|
||||
}
|
||||
|
||||
static std::vector<NamedDevice> getNamedDevicesImpl() {
|
||||
// Retrieves the name of all service instances that implement IDevice,
|
||||
// including any Lazy HAL instances.
|
||||
const std::vector<std::string> names = hardware::getAllHalInstanceNames(IDevice::descriptor);
|
||||
|
||||
// Get a handle to each device and pair it with its name.
|
||||
std::vector<NamedDevice> namedDevices;
|
||||
namedDevices.reserve(names.size());
|
||||
std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
|
||||
return namedDevices;
|
||||
}
|
||||
|
||||
const std::vector<NamedDevice>& getNamedDevices() {
|
||||
const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
|
||||
return devices;
|
||||
}
|
||||
|
||||
std::string printNeuralnetworksHidlTest(
|
||||
const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info) {
|
||||
return gtestCompliantName(getName(info.param));
|
||||
}
|
||||
|
||||
INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
|
||||
|
||||
// Forward declaration from ValidateModel.cpp
|
||||
void validateModel(const sp<IDevice>& device, const Model& model);
|
||||
// Forward declaration from ValidateRequest.cpp
|
||||
|
@ -162,14 +169,3 @@ sp<IPreparedModel> getPreparedModel_1_2(const sp<implementation::PreparedModelCa
|
|||
}
|
||||
|
||||
} // namespace android::hardware::neuralnetworks::V1_2::vts::functional
|
||||
|
||||
using android::hardware::neuralnetworks::V1_2::vts::functional::NeuralnetworksHidlEnvironment;
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
|
||||
testing::InitGoogleTest(&argc, argv);
|
||||
NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
|
||||
|
||||
int status = RUN_ALL_TESTS();
|
||||
return status;
|
||||
}
|
||||
|
|
|
@ -17,42 +17,33 @@
|
|||
#ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_2_VTS_HAL_NEURALNETWORKS_H
|
||||
#define ANDROID_HARDWARE_NEURALNETWORKS_V1_2_VTS_HAL_NEURALNETWORKS_H
|
||||
|
||||
#include <VtsHalHidlTargetTestBase.h>
|
||||
#include <VtsHalHidlTargetTestEnvBase.h>
|
||||
#include <android-base/macros.h>
|
||||
#include <android/hardware/neuralnetworks/1.0/types.h>
|
||||
#include <android/hardware/neuralnetworks/1.1/types.h>
|
||||
#include <android/hardware/neuralnetworks/1.2/IDevice.h>
|
||||
#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
|
||||
#include <android/hardware/neuralnetworks/1.2/types.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "1.0/Utils.h"
|
||||
#include "1.2/Callbacks.h"
|
||||
|
||||
namespace android::hardware::neuralnetworks::V1_2::vts::functional {
|
||||
|
||||
// A class for test environment setup
|
||||
class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase {
|
||||
DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment);
|
||||
NeuralnetworksHidlEnvironment() = default;
|
||||
|
||||
public:
|
||||
static NeuralnetworksHidlEnvironment* getInstance();
|
||||
void registerTestServices() override;
|
||||
};
|
||||
|
||||
// The main test class for NEURALNETWORKS HIDL HAL.
|
||||
class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase {
|
||||
DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest);
|
||||
|
||||
public:
|
||||
NeuralnetworksHidlTest() = default;
|
||||
void SetUp() override;
|
||||
using NamedDevice = Named<sp<IDevice>>;
|
||||
using NeuralnetworksHidlTestParam = NamedDevice;
|
||||
|
||||
class NeuralnetworksHidlTest : public testing::TestWithParam<NeuralnetworksHidlTestParam> {
|
||||
protected:
|
||||
const sp<IDevice> kDevice = testing::VtsHalHidlTargetTestBase::getService<IDevice>(
|
||||
NeuralnetworksHidlEnvironment::getInstance());
|
||||
void SetUp() override;
|
||||
const sp<IDevice> kDevice = getData(GetParam());
|
||||
};
|
||||
|
||||
const std::vector<NamedDevice>& getNamedDevices();
|
||||
|
||||
std::string printNeuralnetworksHidlTest(
|
||||
const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info);
|
||||
|
||||
#define INSTANTIATE_DEVICE_TEST(TestSuite) \
|
||||
INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
|
||||
printNeuralnetworksHidlTest)
|
||||
|
||||
// Create an IPreparedModel object. If the model cannot be prepared,
|
||||
// "preparedModel" will be nullptr instead.
|
||||
void createPreparedModel(const sp<IDevice>& device, const Model& model,
|
||||
|
|
|
@ -1,26 +1,35 @@
|
|||
{
|
||||
"presubmit": [
|
||||
{
|
||||
"name": "PresubmitHalNeuralnetworksV1_0TargetTest",
|
||||
"name": "VtsHalNeuralnetworksV1_0TargetTest",
|
||||
"options": [
|
||||
{
|
||||
"native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.0::IDevice/sample-all"
|
||||
// Just use sample-all driver for presubmit tests for faster results.
|
||||
// The other sample drivers (fast-float, quant, etc.) are subsets of
|
||||
// sample-all.
|
||||
"native-test-flag": "--gtest_filter=*sample_all*"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "PresubmitHalNeuralnetworksV1_1TargetTest",
|
||||
"name": "VtsHalNeuralnetworksV1_1TargetTest",
|
||||
"options": [
|
||||
{
|
||||
"native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.1::IDevice/sample-all"
|
||||
// Just use sample-all driver for presubmit tests for faster results.
|
||||
// The other sample drivers (fast-float, quant, etc.) are subsets of
|
||||
// sample-all.
|
||||
"native-test-flag": "--gtest_filter=*sample_all*"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "PresubmitHalNeuralnetworksV1_2TargetTest",
|
||||
"name": "VtsHalNeuralnetworksV1_2TargetTest",
|
||||
"options": [
|
||||
{
|
||||
"native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.2::IDevice/sample-all"
|
||||
// Just use sample-all driver for presubmit tests for faster results.
|
||||
// The other sample drivers (fast-float, quant, etc.) are subsets of
|
||||
// sample-all.
|
||||
"native-test-flag": "--gtest_filter=*sample_all*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue