Cleanup how transport errors are handled in NN utils am: cca3e20e23
am: 9c51fc5008
Original change: https://android-review.googlesource.com/c/platform/hardware/interfaces/+/1520619 MUST ONLY BE SUBMITTED BY AUTOMERGER Change-Id: If5d805de06eebdfae54b0e9e1205d24f118a006d
This commit is contained in:
commit
4ffc5da286
10 changed files with 66 additions and 84 deletions
|
@ -56,7 +56,7 @@ nn::GeneralResult<nn::Capabilities> initCapabilities(V1_0::IDevice* device) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getCapabilities(cb);
|
const auto ret = device->getCapabilities(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,8 @@ std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
|
||||||
|
|
||||||
nn::GeneralResult<void> Device::wait() const {
|
nn::GeneralResult<void> Device::wait() const {
|
||||||
const auto ret = kDevice->ping();
|
const auto ret = kDevice->ping();
|
||||||
return hal::utils::handleTransportError(ret);
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
||||||
|
@ -148,7 +149,7 @@ nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Mo
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = kDevice->getSupportedOperations(hidlModel, cb);
|
const auto ret = kDevice->getSupportedOperations(hidlModel, cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -168,7 +169,7 @@ nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
|
||||||
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
||||||
|
|
||||||
const auto ret = kDevice->prepareModel(hidlModel, cb);
|
const auto ret = kDevice->prepareModel(hidlModel, cb);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
|
return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
|
||||||
|
|
|
@ -67,8 +67,7 @@ nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Prepare
|
||||||
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
||||||
|
|
||||||
const auto ret = kPreparedModel->execute(hidlRequest, cb);
|
const auto ret = kPreparedModel->execute(hidlRequest, cb);
|
||||||
const auto status =
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
NN_TRY(hal::utils::makeExecutionFailure(hal::utils::handleTransportError(ret)));
|
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "execute failed with " << toString(status);
|
return NN_ERROR(canonical) << "execute failed with " << toString(status);
|
||||||
|
|
|
@ -57,7 +57,7 @@ nn::GeneralResult<nn::Capabilities> initCapabilities(V1_1::IDevice* device) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getCapabilities_1_1(cb);
|
const auto ret = device->getCapabilities_1_1(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,8 @@ std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
|
||||||
|
|
||||||
nn::GeneralResult<void> Device::wait() const {
|
nn::GeneralResult<void> Device::wait() const {
|
||||||
const auto ret = kDevice->ping();
|
const auto ret = kDevice->ping();
|
||||||
return hal::utils::handleTransportError(ret);
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
||||||
|
@ -150,7 +151,7 @@ nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Mo
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = kDevice->getSupportedOperations_1_1(hidlModel, cb);
|
const auto ret = kDevice->getSupportedOperations_1_1(hidlModel, cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -171,7 +172,7 @@ nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
|
||||||
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
||||||
|
|
||||||
const auto ret = kDevice->prepareModel_1_1(hidlModel, hidlPreference, cb);
|
const auto ret = kDevice->prepareModel_1_1(hidlModel, hidlPreference, cb);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != V1_0::ErrorStatus::NONE) {
|
if (status != V1_0::ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
|
return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
|
||||||
|
|
|
@ -59,7 +59,7 @@ nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getCapabilities_1_2(cb);
|
const auto ret = device->getCapabilities_1_2(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -81,7 +81,7 @@ nn::GeneralResult<std::string> initVersionString(V1_2::IDevice* device) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getVersionString(cb);
|
const auto ret = device->getVersionString(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -101,7 +101,7 @@ nn::GeneralResult<nn::DeviceType> initDeviceType(V1_2::IDevice* device) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getType(cb);
|
const auto ret = device->getType(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -121,7 +121,7 @@ nn::GeneralResult<std::vector<nn::Extension>> initExtensions(V1_2::IDevice* devi
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getSupportedExtensions(cb);
|
const auto ret = device->getSupportedExtensions(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -144,7 +144,7 @@ nn::GeneralResult<std::pair<uint32_t, uint32_t>> initNumberOfCacheFilesNeeded(
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getNumberOfCacheFilesNeeded(cb);
|
const auto ret = device->getNumberOfCacheFilesNeeded(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -217,7 +217,8 @@ std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
|
||||||
|
|
||||||
nn::GeneralResult<void> Device::wait() const {
|
nn::GeneralResult<void> Device::wait() const {
|
||||||
const auto ret = kDevice->ping();
|
const auto ret = kDevice->ping();
|
||||||
return hal::utils::handleTransportError(ret);
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
||||||
|
@ -247,7 +248,7 @@ nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Mo
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = kDevice->getSupportedOperations_1_2(hidlModel, cb);
|
const auto ret = kDevice->getSupportedOperations_1_2(hidlModel, cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -272,7 +273,7 @@ nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
|
||||||
|
|
||||||
const auto ret = kDevice->prepareModel_1_2(hidlModel, hidlPreference, hidlModelCache,
|
const auto ret = kDevice->prepareModel_1_2(hidlModel, hidlPreference, hidlModelCache,
|
||||||
hidlDataCache, hidlToken, cb);
|
hidlDataCache, hidlToken, cb);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != V1_0::ErrorStatus::NONE) {
|
if (status != V1_0::ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "prepareModel_1_2 failed with " << toString(status);
|
return NN_ERROR(canonical) << "prepareModel_1_2 failed with " << toString(status);
|
||||||
|
@ -292,7 +293,7 @@ nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
|
||||||
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
||||||
|
|
||||||
const auto ret = kDevice->prepareModelFromCache(hidlModelCache, hidlDataCache, hidlToken, cb);
|
const auto ret = kDevice->prepareModelFromCache(hidlModelCache, hidlDataCache, hidlToken, cb);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != V1_0::ErrorStatus::NONE) {
|
if (status != V1_0::ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "prepareModelFromCache failed with " << toString(status);
|
return NN_ERROR(canonical) << "prepareModelFromCache failed with " << toString(status);
|
||||||
|
|
|
@ -83,7 +83,7 @@ PreparedModel::executeSynchronously(const V1_0::Request& request, MeasureTiming
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = kPreparedModel->executeSynchronously(request, measure, cb);
|
const auto ret = kPreparedModel->executeSynchronously(request, measure, cb);
|
||||||
NN_TRY(hal::utils::makeExecutionFailure(hal::utils::handleTransportError(ret)));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -94,8 +94,7 @@ PreparedModel::executeAsynchronously(const V1_0::Request& request, MeasureTiming
|
||||||
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
const auto scoped = kDeathHandler.protectCallback(cb.get());
|
||||||
|
|
||||||
const auto ret = kPreparedModel->execute_1_2(request, measure, cb);
|
const auto ret = kPreparedModel->execute_1_2(request, measure, cb);
|
||||||
const auto status =
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
NN_TRY(hal::utils::makeExecutionFailure(hal::utils::handleTransportError(ret)));
|
|
||||||
if (status != V1_0::ErrorStatus::NONE) {
|
if (status != V1_0::ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "execute failed with " << toString(status);
|
return NN_ERROR(canonical) << "execute failed with " << toString(status);
|
||||||
|
|
|
@ -64,7 +64,7 @@ nn::GeneralResult<void> Buffer::copyTo(const nn::Memory& dst) const {
|
||||||
const auto hidlDst = NN_TRY(convert(dst));
|
const auto hidlDst = NN_TRY(convert(dst));
|
||||||
|
|
||||||
const auto ret = kBuffer->copyTo(hidlDst);
|
const auto ret = kBuffer->copyTo(hidlDst);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "IBuffer::copyTo failed with " << toString(status);
|
return NN_ERROR(canonical) << "IBuffer::copyTo failed with " << toString(status);
|
||||||
|
@ -79,7 +79,7 @@ nn::GeneralResult<void> Buffer::copyFrom(const nn::Memory& src,
|
||||||
const auto hidlDimensions = hidl_vec<uint32_t>(dimensions);
|
const auto hidlDimensions = hidl_vec<uint32_t>(dimensions);
|
||||||
|
|
||||||
const auto ret = kBuffer->copyFrom(hidlSrc, hidlDimensions);
|
const auto ret = kBuffer->copyFrom(hidlSrc, hidlDimensions);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "IBuffer::copyFrom failed with " << toString(status);
|
return NN_ERROR(canonical) << "IBuffer::copyFrom failed with " << toString(status);
|
||||||
|
|
|
@ -86,7 +86,7 @@ nn::GeneralResult<nn::Capabilities> initCapabilities(V1_3::IDevice* device) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = device->getCapabilities_1_3(cb);
|
const auto ret = device->getCapabilities_1_3(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -162,7 +162,8 @@ std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
|
||||||
|
|
||||||
nn::GeneralResult<void> Device::wait() const {
|
nn::GeneralResult<void> Device::wait() const {
|
||||||
const auto ret = kDevice->ping();
|
const auto ret = kDevice->ping();
|
||||||
return hal::utils::handleTransportError(ret);
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
|
||||||
|
@ -191,7 +192,7 @@ nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Mo
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = kDevice->getSupportedOperations_1_3(hidlModel, cb);
|
const auto ret = kDevice->getSupportedOperations_1_3(hidlModel, cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -219,7 +220,7 @@ nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
|
||||||
const auto ret =
|
const auto ret =
|
||||||
kDevice->prepareModel_1_3(hidlModel, hidlPreference, hidlPriority, hidlDeadline,
|
kDevice->prepareModel_1_3(hidlModel, hidlPreference, hidlPriority, hidlDeadline,
|
||||||
hidlModelCache, hidlDataCache, hidlToken, cb);
|
hidlModelCache, hidlDataCache, hidlToken, cb);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "prepareModel_1_3 failed with " << toString(status);
|
return NN_ERROR(canonical) << "prepareModel_1_3 failed with " << toString(status);
|
||||||
|
@ -241,7 +242,7 @@ nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
|
||||||
|
|
||||||
const auto ret = kDevice->prepareModelFromCache_1_3(hidlDeadline, hidlModelCache, hidlDataCache,
|
const auto ret = kDevice->prepareModelFromCache_1_3(hidlDeadline, hidlModelCache, hidlDataCache,
|
||||||
hidlToken, cb);
|
hidlToken, cb);
|
||||||
const auto status = NN_TRY(hal::utils::handleTransportError(ret));
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "prepareModelFromCache_1_3 failed with " << toString(status);
|
return NN_ERROR(canonical) << "prepareModelFromCache_1_3 failed with " << toString(status);
|
||||||
|
@ -277,7 +278,7 @@ nn::GeneralResult<nn::SharedBuffer> Device::allocate(
|
||||||
|
|
||||||
const auto ret =
|
const auto ret =
|
||||||
kDevice->allocate(hidlDesc, hidlPreparedModels, hidlInputRoles, hidlOutputRoles, cb);
|
kDevice->allocate(hidlDesc, hidlPreparedModels, hidlInputRoles, hidlOutputRoles, cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,7 +89,7 @@ convertExecuteFencedResults(const hidl_handle& syncFence,
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto ret = callback->getExecutionInfo(cb);
|
const auto ret = callback->getExecutionInfo(cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
@ -133,7 +133,7 @@ PreparedModel::executeSynchronously(const Request& request, V1_2::MeasureTiming
|
||||||
|
|
||||||
const auto ret = kPreparedModel->executeSynchronously_1_3(request, measure, deadline,
|
const auto ret = kPreparedModel->executeSynchronously_1_3(request, measure, deadline,
|
||||||
loopTimeoutDuration, cb);
|
loopTimeoutDuration, cb);
|
||||||
NN_TRY(hal::utils::makeExecutionFailure(hal::utils::handleTransportError(ret)));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -147,8 +147,7 @@ PreparedModel::executeAsynchronously(const Request& request, V1_2::MeasureTiming
|
||||||
|
|
||||||
const auto ret =
|
const auto ret =
|
||||||
kPreparedModel->execute_1_3(request, measure, deadline, loopTimeoutDuration, cb);
|
kPreparedModel->execute_1_3(request, measure, deadline, loopTimeoutDuration, cb);
|
||||||
const auto status =
|
const auto status = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
NN_TRY(hal::utils::makeExecutionFailure(hal::utils::handleTransportError(ret)));
|
|
||||||
if (status != ErrorStatus::NONE) {
|
if (status != ErrorStatus::NONE) {
|
||||||
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
|
||||||
return NN_ERROR(canonical) << "executeAsynchronously failed with " << toString(status);
|
return NN_ERROR(canonical) << "executeAsynchronously failed with " << toString(status);
|
||||||
|
@ -230,7 +229,7 @@ PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::S
|
||||||
const auto ret = kPreparedModel->executeFenced(hidlRequest, hidlWaitFor, hidlMeasure,
|
const auto ret = kPreparedModel->executeFenced(hidlRequest, hidlWaitFor, hidlMeasure,
|
||||||
hidlDeadline, hidlLoopTimeoutDuration,
|
hidlDeadline, hidlLoopTimeoutDuration,
|
||||||
hidlTimeoutDurationAfterFence, cb);
|
hidlTimeoutDurationAfterFence, cb);
|
||||||
NN_TRY(hal::utils::handleTransportError(ret));
|
HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
auto [syncFence, callback] = NN_TRY(std::move(result));
|
auto [syncFence, callback] = NN_TRY(std::move(result));
|
||||||
|
|
||||||
// If executeFenced required the request memory to be moved into shared memory, block here until
|
// If executeFenced required the request memory to be moved into shared memory, block here until
|
||||||
|
|
|
@ -19,65 +19,46 @@
|
||||||
#include <nnapi/Result.h>
|
#include <nnapi/Result.h>
|
||||||
#include <nnapi/Types.h>
|
#include <nnapi/Types.h>
|
||||||
|
|
||||||
|
#include <type_traits>
|
||||||
|
|
||||||
namespace android::hardware::neuralnetworks::utils {
|
namespace android::hardware::neuralnetworks::utils {
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
nn::GeneralResult<Type> handleTransportError(const hardware::Return<Type>& ret) {
|
nn::GeneralResult<Type> handleTransportError(const hardware::Return<Type>& ret) {
|
||||||
if (ret.isDeadObject()) {
|
if (ret.isDeadObject()) {
|
||||||
return NN_ERROR(nn::ErrorStatus::DEAD_OBJECT)
|
return nn::error(nn::ErrorStatus::DEAD_OBJECT)
|
||||||
<< "Return<>::isDeadObject returned true: " << ret.description();
|
<< "Return<>::isDeadObject returned true: " << ret.description();
|
||||||
}
|
}
|
||||||
if (!ret.isOk()) {
|
if (!ret.isOk()) {
|
||||||
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
|
return nn::error(nn::ErrorStatus::GENERAL_FAILURE)
|
||||||
<< "Return<>::isOk returned false: " << ret.description();
|
<< "Return<>::isOk returned false: " << ret.description();
|
||||||
}
|
}
|
||||||
return ret;
|
if constexpr (!std::is_same_v<Type, void>) {
|
||||||
|
return static_cast<Type>(ret);
|
||||||
|
} else {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template <>
|
#define HANDLE_TRANSPORT_FAILURE(ret) \
|
||||||
inline nn::GeneralResult<void> handleTransportError(const hardware::Return<void>& ret) {
|
({ \
|
||||||
if (ret.isDeadObject()) {
|
auto result = ::android::hardware::neuralnetworks::utils::handleTransportError(ret); \
|
||||||
return NN_ERROR(nn::ErrorStatus::DEAD_OBJECT)
|
if (!result.has_value()) { \
|
||||||
<< "Return<>::isDeadObject returned true: " << ret.description();
|
return NN_ERROR(result.error().code) << result.error().message; \
|
||||||
}
|
} \
|
||||||
if (!ret.isOk()) {
|
std::move(result).value(); \
|
||||||
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
|
})
|
||||||
<< "Return<>::isOk returned false: " << ret.description();
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
nn::GeneralResult<Type> makeGeneralFailure(nn::Result<Type> result, nn::ErrorStatus status) {
|
nn::GeneralResult<Type> makeGeneralFailure(nn::Result<Type> result, nn::ErrorStatus status) {
|
||||||
if (!result.has_value()) {
|
if (!result.has_value()) {
|
||||||
return nn::error(status) << std::move(result).error();
|
return nn::error(status) << std::move(result).error();
|
||||||
}
|
}
|
||||||
return std::move(result).value();
|
if constexpr (!std::is_same_v<Type, void>) {
|
||||||
}
|
return std::move(result).value();
|
||||||
|
} else {
|
||||||
template <>
|
return {};
|
||||||
inline nn::GeneralResult<void> makeGeneralFailure(nn::Result<void> result, nn::ErrorStatus status) {
|
|
||||||
if (!result.has_value()) {
|
|
||||||
return nn::error(status) << std::move(result).error();
|
|
||||||
}
|
}
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename Type>
|
|
||||||
nn::ExecutionResult<Type> makeExecutionFailure(nn::Result<Type> result, nn::ErrorStatus status) {
|
|
||||||
if (!result.has_value()) {
|
|
||||||
return nn::error(status) << std::move(result).error();
|
|
||||||
}
|
|
||||||
return std::move(result).value();
|
|
||||||
}
|
|
||||||
|
|
||||||
template <>
|
|
||||||
inline nn::ExecutionResult<void> makeExecutionFailure(nn::Result<void> result,
|
|
||||||
nn::ErrorStatus status) {
|
|
||||||
if (!result.has_value()) {
|
|
||||||
return nn::error(status) << std::move(result).error();
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
|
@ -86,16 +67,16 @@ nn::ExecutionResult<Type> makeExecutionFailure(nn::GeneralResult<Type> result) {
|
||||||
const auto [message, status] = std::move(result).error();
|
const auto [message, status] = std::move(result).error();
|
||||||
return nn::error(status) << message;
|
return nn::error(status) << message;
|
||||||
}
|
}
|
||||||
return std::move(result).value();
|
if constexpr (!std::is_same_v<Type, void>) {
|
||||||
|
return std::move(result).value();
|
||||||
|
} else {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template <>
|
template <typename Type>
|
||||||
inline nn::ExecutionResult<void> makeExecutionFailure(nn::GeneralResult<void> result) {
|
nn::ExecutionResult<Type> makeExecutionFailure(nn::Result<Type> result, nn::ErrorStatus status) {
|
||||||
if (!result.has_value()) {
|
return makeExecutionFailure(makeGeneralFailure(result, status));
|
||||||
const auto [message, status] = std::move(result).error();
|
|
||||||
return nn::error(status) << message;
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace android::hardware::neuralnetworks::utils
|
} // namespace android::hardware::neuralnetworks::utils
|
|
@ -58,7 +58,7 @@ nn::GeneralResult<DeathHandler> DeathHandler::create(sp<hidl::base::V1_0::IBase>
|
||||||
auto deathRecipient = sp<DeathRecipient>::make();
|
auto deathRecipient = sp<DeathRecipient>::make();
|
||||||
|
|
||||||
const auto ret = object->linkToDeath(deathRecipient, /*cookie=*/0);
|
const auto ret = object->linkToDeath(deathRecipient, /*cookie=*/0);
|
||||||
const bool success = NN_TRY(handleTransportError(ret));
|
const bool success = HANDLE_TRANSPORT_FAILURE(ret);
|
||||||
if (!success) {
|
if (!success) {
|
||||||
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "IBase::linkToDeath returned false";
|
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "IBase::linkToDeath returned false";
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue