Merge "Enable NN VTS and utility code to use lazy services" am: a546efe5ef

Original change: https://android-review.googlesource.com/c/platform/hardware/interfaces/+/1674540

Change-Id: I344d508f27ff68692b47419f69c7b06edccad1b3
This commit is contained in:
Michael Butler 2021-04-14 16:55:06 +00:00 committed by Automerger Merge Worker
commit 08454e9895
4 changed files with 266 additions and 6 deletions

View file

@ -0,0 +1,70 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/neuralnetworks/BnBuffer.h>
#include <aidl/android/hardware/neuralnetworks/BnDevice.h>
#include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
#include <android/binder_auto_utils.h>
#include <memory>
#include <string>
#include <vector>
namespace aidl::android::hardware::neuralnetworks {
class InvalidDevice : public BnDevice {
public:
static std::shared_ptr<InvalidDevice> create();
InvalidDevice(Capabilities capabilities, const NumberOfCacheFiles& numberOfCacheFiles,
std::vector<Extension> extensions, DeviceType deviceType,
std::string versionString);
ndk::ScopedAStatus allocate(const BufferDesc& desc,
const std::vector<IPreparedModelParcel>& preparedModels,
const std::vector<BufferRole>& inputRoles,
const std::vector<BufferRole>& outputRoles,
DeviceBuffer* deviceBuffer) override;
ndk::ScopedAStatus getCapabilities(Capabilities* capabilities) override;
ndk::ScopedAStatus getNumberOfCacheFilesNeeded(NumberOfCacheFiles* numberOfCacheFiles) override;
ndk::ScopedAStatus getSupportedExtensions(std::vector<Extension>* extensions) override;
ndk::ScopedAStatus getSupportedOperations(const Model& model,
std::vector<bool>* supportedOperations) override;
ndk::ScopedAStatus getType(DeviceType* deviceType) override;
ndk::ScopedAStatus getVersionString(std::string* versionString) override;
ndk::ScopedAStatus prepareModel(
const Model& model, ExecutionPreference preference, Priority priority, int64_t deadline,
const std::vector<ndk::ScopedFileDescriptor>& modelCache,
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback) override;
ndk::ScopedAStatus prepareModelFromCache(
int64_t deadline, const std::vector<ndk::ScopedFileDescriptor>& modelCache,
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback) override;
private:
const Capabilities kCapabilities;
const NumberOfCacheFiles kNumberOfCacheFiles;
const std::vector<Extension> kExtensions;
const DeviceType kDeviceType;
const std::string kVersionString;
};
} // namespace aidl::android::hardware::neuralnetworks

View file

@ -0,0 +1,179 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "InvalidDevice"
#include "InvalidDevice.h"
#include <aidl/android/hardware/neuralnetworks/BnBuffer.h>
#include <aidl/android/hardware/neuralnetworks/BnDevice.h>
#include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
#include <android/binder_auto_utils.h>
#include "Conversions.h"
#include "Utils.h"
#include <memory>
#include <string>
#include <utility>
#include <vector>
namespace aidl::android::hardware::neuralnetworks {
namespace {
ndk::ScopedAStatus toAStatus(ErrorStatus errorStatus, const std::string& errorMessage) {
if (errorStatus == ErrorStatus::NONE) {
return ndk::ScopedAStatus::ok();
}
return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
static_cast<int32_t>(errorStatus), errorMessage.c_str());
}
} // namespace
std::shared_ptr<InvalidDevice> InvalidDevice::create() {
constexpr auto perf = PerformanceInfo{
.execTime = std::numeric_limits<float>::max(),
.powerUsage = std::numeric_limits<float>::max(),
};
auto capabilities = Capabilities{
.relaxedFloat32toFloat16PerformanceScalar = perf,
.relaxedFloat32toFloat16PerformanceTensor = perf,
.operandPerformance = {},
.ifPerformance = perf,
.whilePerformance = perf,
};
constexpr auto numberOfCacheFiles = NumberOfCacheFiles{
.numModelCache = 0,
.numDataCache = 0,
};
std::vector<Extension> extensions{};
constexpr auto deviceType = DeviceType::OTHER;
std::string versionString = "invalid";
return ndk::SharedRefBase::make<InvalidDevice>(std::move(capabilities), numberOfCacheFiles,
std::move(extensions), deviceType,
std::move(versionString));
}
InvalidDevice::InvalidDevice(Capabilities capabilities,
const NumberOfCacheFiles& numberOfCacheFiles,
std::vector<Extension> extensions, DeviceType deviceType,
std::string versionString)
: kCapabilities(std::move(capabilities)),
kNumberOfCacheFiles(numberOfCacheFiles),
kExtensions(std::move(extensions)),
kDeviceType(deviceType),
kVersionString(std::move(versionString)) {}
ndk::ScopedAStatus InvalidDevice::allocate(
const BufferDesc& /*desc*/, const std::vector<IPreparedModelParcel>& /*preparedModels*/,
const std::vector<BufferRole>& /*inputRoles*/,
const std::vector<BufferRole>& /*outputRoles*/, DeviceBuffer* /*deviceBuffer*/) {
return toAStatus(ErrorStatus::GENERAL_FAILURE, "InvalidDevice");
}
ndk::ScopedAStatus InvalidDevice::getCapabilities(Capabilities* capabilities) {
*capabilities = kCapabilities;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::getNumberOfCacheFilesNeeded(
NumberOfCacheFiles* numberOfCacheFiles) {
*numberOfCacheFiles = kNumberOfCacheFiles;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::getSupportedExtensions(std::vector<Extension>* extensions) {
*extensions = kExtensions;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::getSupportedOperations(const Model& model,
std::vector<bool>* supportedOperations) {
if (const auto result = utils::validate(model); !result.ok()) {
return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
}
*supportedOperations = std::vector<bool>(model.main.operations.size(), false);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::getType(DeviceType* deviceType) {
*deviceType = kDeviceType;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::getVersionString(std::string* versionString) {
*versionString = kVersionString;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::prepareModel(
const Model& model, ExecutionPreference preference, Priority priority, int64_t deadline,
const std::vector<ndk::ScopedFileDescriptor>& modelCache,
const std::vector<ndk::ScopedFileDescriptor>& dataCache, const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback) {
if (callback.get() == nullptr) {
return toAStatus(ErrorStatus::INVALID_ARGUMENT,
"invalid callback passed to InvalidDevice::prepareModel");
}
if (const auto result = utils::validate(model); !result.ok()) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
}
if (const auto result = utils::validate(preference); !result.ok()) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
}
if (const auto result = utils::validate(priority); !result.ok()) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
}
if (deadline < -1) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(ErrorStatus::INVALID_ARGUMENT,
"Invalid deadline " + std::to_string(deadline));
}
if (modelCache.size() != static_cast<size_t>(kNumberOfCacheFiles.numModelCache)) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(ErrorStatus::INVALID_ARGUMENT,
"Invalid modelCache, size = " + std::to_string(modelCache.size()));
}
if (dataCache.size() != static_cast<size_t>(kNumberOfCacheFiles.numDataCache)) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(ErrorStatus::INVALID_ARGUMENT,
"Invalid modelCache, size = " + std::to_string(dataCache.size()));
}
if (token.size() != IDevice::BYTE_SIZE_OF_CACHE_TOKEN) {
callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
return toAStatus(
ErrorStatus::INVALID_ARGUMENT,
"Invalid cache token, size = " + std::to_string(IDevice::BYTE_SIZE_OF_CACHE_TOKEN));
}
callback->notify(ErrorStatus::GENERAL_FAILURE, nullptr);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus InvalidDevice::prepareModelFromCache(
int64_t /*deadline*/, const std::vector<ndk::ScopedFileDescriptor>& /*modelCache*/,
const std::vector<ndk::ScopedFileDescriptor>& /*dataCache*/,
const std::vector<uint8_t>& /*token*/,
const std::shared_ptr<IPreparedModelCallback>& callback) {
callback->notify(ErrorStatus::GENERAL_FAILURE, nullptr);
return toAStatus(ErrorStatus::GENERAL_FAILURE, "InvalidDevice");
}
} // namespace aidl::android::hardware::neuralnetworks

View file

@ -16,6 +16,7 @@
#include "Service.h"
#include <AndroidVersionUtil.h>
#include <android/binder_auto_utils.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
@ -35,13 +36,23 @@ nn::GeneralResult<nn::SharedDevice> getDevice(const std::string& instanceName) {
hal::utils::ResilientDevice::Factory makeDevice =
[instanceName,
name = std::move(fullName)](bool blocking) -> nn::GeneralResult<nn::SharedDevice> {
const auto& getService =
blocking ? AServiceManager_getService : AServiceManager_checkService;
std::add_pointer_t<AIBinder*(const char*)> getService;
if (blocking) {
if (__builtin_available(android __NNAPI_AIDL_MIN_ANDROID_API__, *)) {
getService = AServiceManager_waitForService;
} else {
getService = AServiceManager_getService;
}
} else {
getService = AServiceManager_checkService;
}
auto service = IDevice::fromBinder(ndk::SpAIBinder(getService(name.c_str())));
if (service == nullptr) {
return NN_ERROR() << (blocking ? "AServiceManager_getService"
: "AServiceManager_checkService")
<< " returned nullptr";
return NN_ERROR()
<< (blocking ? "AServiceManager_waitForService (or AServiceManager_getService)"
: "AServiceManager_checkService")
<< " returned nullptr";
}
ABinderProcess_startThreadPool();
return Device::create(instanceName, std::move(service));

View file

@ -94,7 +94,7 @@ void NeuralNetworksAidlTest::SetUp() {
}
static NamedDevice makeNamedDevice(const std::string& name) {
ndk::SpAIBinder binder(AServiceManager_getService(name.c_str()));
ndk::SpAIBinder binder(AServiceManager_waitForService(name.c_str()));
return {name, IDevice::fromBinder(binder)};
}