Add additional parameters for NN Burst -- HAL am: 8414a6ebf9

Original change: https://android-review.googlesource.com/c/platform/hardware/interfaces/+/1663920

Change-Id: If42779032ff2e6aa617c14a1f3e21379958eee05
This commit is contained in:
Michael Butler 2021-04-05 21:36:05 +00:00 committed by Automerger Merge Worker
commit b19c1cbef3
34 changed files with 357 additions and 76 deletions

View file

@ -44,7 +44,9 @@ class Burst final : public nn::IBurst {
OptionalCacheHold cacheMemory(const nn::SharedMemory& memory) const override;
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure) const override;
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const override;
private:
const nn::SharedPreparedModel kPreparedModel;

View file

@ -20,6 +20,7 @@
#include <nnapi/IBurst.h>
#include <nnapi/IPreparedModel.h>
#include <nnapi/Result.h>
#include <nnapi/TypeUtils.h>
#include <nnapi/Types.h>
#include <memory>
@ -48,8 +49,10 @@ Burst::OptionalCacheHold Burst::cacheMemory(const nn::SharedMemory& /*memory*/)
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
const nn::Request& request, nn::MeasureTiming measure) const {
return kPreparedModel->execute(request, measure, {}, {});
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const {
return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
}
} // namespace android::hardware::neuralnetworks::V1_0::utils

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_DEVICE
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_DEVICE
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_DEVICE_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_DEVICE_H
#include <android/hardware/neuralnetworks/1.0/IDevice.h>
#include <gmock/gmock.h>
@ -83,4 +83,4 @@ inline void MockDevice::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_0::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_DEVICE
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_DEVICE_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_PREPARED_MODEL
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_PREPARED_MODEL
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_PREPARED_MODEL_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_PREPARED_MODEL_H
#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
#include <gmock/gmock.h>
@ -82,4 +82,4 @@ inline void MockPreparedModel::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_0::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_PREPARED_MODEL
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_TEST_MOCK_PREPARED_MODEL_H

View file

@ -224,7 +224,19 @@ TEST(PreparedModelTest, executeFencedNotSupported) {
EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
}
// TODO: test burst execution if/when it is added to nn::IPreparedModel.
TEST(PreparedModelTest, configureExecutionBurst) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_TRUE(result.has_value())
<< "Failed with " << result.error().code << ": " << result.error().message;
EXPECT_NE(result.value(), nullptr);
}
TEST(PreparedModelTest, getUnderlyingResource) {
// setup test

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_DEVICE
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_DEVICE
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_DEVICE_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_DEVICE_H
#include <android/hardware/neuralnetworks/1.1/IDevice.h>
#include <gmock/gmock.h>
@ -92,4 +92,4 @@ inline void MockDevice::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_1::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_DEVICE
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_DEVICE_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_PREPARED_MODEL
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_PREPARED_MODEL
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_PREPARED_MODEL_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_PREPARED_MODEL_H
#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
#include <gmock/gmock.h>
@ -41,4 +41,4 @@ inline sp<MockPreparedModel> MockPreparedModel::create() {
} // namespace android::hardware::neuralnetworks::V1_0::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_PREPARED_MODEL
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_1_UTILS_TEST_MOCK_PREPARED_MODEL_H

View file

@ -57,7 +57,8 @@ class ExecutionBurstController final : public nn::IBurst {
public:
using FallbackFunction =
std::function<nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>(
const nn::Request&, nn::MeasureTiming)>;
const nn::Request&, nn::MeasureTiming, const nn::OptionalTimePoint&,
const nn::OptionalDuration&)>;
/**
* NN runtime memory cache.
@ -168,7 +169,9 @@ class ExecutionBurstController final : public nn::IBurst {
// See IBurst::execute for information on this method.
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure) const override;
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const override;
private:
mutable std::atomic_flag mExecutionInFlight = ATOMIC_FLAG_INIT;

View file

@ -276,7 +276,9 @@ ExecutionBurstController::OptionalCacheHold ExecutionBurstController::cacheMemor
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
ExecutionBurstController::execute(const nn::Request& request, nn::MeasureTiming measure) const {
ExecutionBurstController::execute(const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const {
// This is the first point when we know an execution is occurring, so begin to collect
// systraces. Note that the first point we can begin collecting systraces in
// ExecutionBurstServer is when the RequestChannelReceiver realizes there is data in the FMQ, so
@ -289,7 +291,7 @@ ExecutionBurstController::execute(const nn::Request& request, nn::MeasureTiming
version > nn::Version::ANDROID_Q) {
// fallback to another execution path if the packet could not be sent
if (kFallback) {
return kFallback(request, measure);
return kFallback(request, measure, deadline, loopTimeoutDuration);
}
return NN_ERROR() << "Request object has features not supported by IBurst::execute";
}
@ -323,7 +325,7 @@ ExecutionBurstController::execute(const nn::Request& request, nn::MeasureTiming
if (!sendStatus.ok()) {
// fallback to another execution path if the packet could not be sent
if (kFallback) {
return kFallback(request, measure);
return kFallback(request, measure, deadline, loopTimeoutDuration);
}
return NN_ERROR() << "Error sending FMQ packet: " << sendStatus.error();
}

View file

@ -259,7 +259,7 @@ nn::ExecutionResult<std::pair<hidl_vec<OutputShape>, Timing>> ExecutionBurstServ
nn::MeasureTiming canonicalMeasure = NN_TRY(makeExecutionFailure(nn::convert(measure)));
const auto [outputShapes, timing] =
NN_TRY(mBurstExecutor->execute(canonicalRequest, canonicalMeasure));
NN_TRY(mBurstExecutor->execute(canonicalRequest, canonicalMeasure, {}, {}));
return std::make_pair(NN_TRY(makeExecutionFailure(convert(outputShapes))),
NN_TRY(makeExecutionFailure(convert(timing))));

View file

@ -122,10 +122,12 @@ PreparedModel::executeFenced(const nn::Request& /*request*/,
nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const {
auto self = shared_from_this();
auto fallback = [preparedModel = std::move(self)](const nn::Request& request,
nn::MeasureTiming measure)
auto fallback = [preparedModel = std::move(self)](
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration)
-> nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> {
return preparedModel->execute(request, measure, {}, {});
return preparedModel->execute(request, measure, deadline, loopTimeoutDuration);
};
const auto pollingTimeWindow = getBurstControllerPollingTimeWindow();
return ExecutionBurstController::create(kPreparedModel, std::move(fallback), pollingTimeWindow);

View file

@ -0,0 +1,36 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_BURST_CONTEXT_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_BURST_CONTEXT_H
#include <android/hardware/neuralnetworks/1.2/IBurstContext.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <hidl/HidlSupport.h>
#include <hidl/Status.h>
namespace android::hardware::neuralnetworks::V1_2::utils {
class MockBurstContext final : public IBurstContext {
public:
// V1_2 methods below.
MOCK_METHOD(Return<void>, freeMemory, (int32_t slot), (override));
};
} // namespace android::hardware::neuralnetworks::V1_2::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_BURST_CONTEXT_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_DEVICE
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_DEVICE
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_DEVICE_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_DEVICE_H
#include <android/hardware/neuralnetworks/1.2/IDevice.h>
#include <gmock/gmock.h>
@ -114,4 +114,4 @@ inline void MockDevice::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_2::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_DEVICE
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_DEVICE_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_PREPARED_MODEL
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_PREPARED_MODEL
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_PREPARED_MODEL_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_PREPARED_MODEL_H
#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
#include <gmock/gmock.h>
@ -98,4 +98,4 @@ inline void MockPreparedModel::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_2::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_PREPARED_MODEL
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_2_UTILS_TEST_MOCK_PREPARED_MODEL_H

View file

@ -16,6 +16,8 @@
#include "MockPreparedModel.h"
#include "MockBurstContext.h"
#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
@ -67,6 +69,17 @@ auto makeExecuteAsynchronously(V1_0::ErrorStatus launchStatus, V1_0::ErrorStatus
return launchStatus;
};
}
auto makeConfigureExecutionBurstReturn(V1_0::ErrorStatus status,
const sp<MockBurstContext>& burstContext) {
return [status, burstContext](
const sp<V1_2::IBurstCallback>& /*callback*/,
const MQDescriptorSync<V1_2::FmqRequestDatum>& /*requestChannel*/,
const MQDescriptorSync<V1_2::FmqResultDatum>& /*resultChannel*/,
V1_2::IPreparedModel::configureExecutionBurst_cb cb) -> hardware::Return<void> {
cb(status, burstContext);
return hardware::Void();
};
}
std::function<hardware::Status()> makeTransportFailure(status_t status) {
return [status] { return hardware::Status::fromStatusT(status); };
@ -321,7 +334,76 @@ TEST(PreparedModelTest, executeFencedNotSupported) {
EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
}
// TODO: test burst execution if/when it is added to nn::IPreparedModel.
TEST(PreparedModelTest, configureExecutionBurst) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto mockBurstContext = sp<MockBurstContext>::make();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(makeConfigureExecutionBurstReturn(V1_0::ErrorStatus::NONE, mockBurstContext));
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_TRUE(result.has_value())
<< "Failed with " << result.error().code << ": " << result.error().message;
EXPECT_NE(result.value(), nullptr);
}
TEST(PreparedModelTest, configureExecutionBurstError) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(
makeConfigureExecutionBurstReturn(V1_0::ErrorStatus::GENERAL_FAILURE, nullptr));
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_FALSE(result.has_value());
EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
}
TEST(PreparedModelTest, configureExecutionBurstTransportFailure) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_FALSE(result.has_value());
EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
}
TEST(PreparedModelTest, configureExecutionBurstDeadObject) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_FALSE(result.has_value());
EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
}
TEST(PreparedModelTest, getUnderlyingResource) {
// setup test

View file

@ -201,10 +201,12 @@ PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::S
nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const {
auto self = shared_from_this();
auto fallback = [preparedModel = std::move(self)](const nn::Request& request,
nn::MeasureTiming measure)
auto fallback = [preparedModel = std::move(self)](
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration)
-> nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> {
return preparedModel->execute(request, measure, {}, {});
return preparedModel->execute(request, measure, deadline, loopTimeoutDuration);
};
const auto pollingTimeWindow = V1_2::utils::getBurstControllerPollingTimeWindow();
return V1_2::utils::ExecutionBurstController::create(kPreparedModel, std::move(fallback),

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BUFFER
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BUFFER
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BUFFER_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BUFFER_H
#include <android/hardware/neuralnetworks/1.3/IBuffer.h>
#include <gmock/gmock.h>
@ -40,4 +40,4 @@ inline sp<MockBuffer> MockBuffer::create() {
} // namespace android::hardware::neuralnetworks::V1_3::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BUFFER
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BUFFER_H

View file

@ -0,0 +1,36 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BURST_CONTEXT_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BURST_CONTEXT_H
#include <android/hardware/neuralnetworks/1.2/IBurstContext.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <hidl/HidlSupport.h>
#include <hidl/Status.h>
namespace android::hardware::neuralnetworks::V1_3::utils {
class MockBurstContext final : public V1_2::IBurstContext {
public:
// V1_2 methods below.
MOCK_METHOD(Return<void>, freeMemory, (int32_t slot), (override));
};
} // namespace android::hardware::neuralnetworks::V1_3::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_BURST_CONTEXT_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_DEVICE
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_DEVICE
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_DEVICE_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_DEVICE_H
#include <android/hardware/neuralnetworks/1.3/IDevice.h>
#include <gmock/gmock.h>
@ -136,4 +136,4 @@ inline void MockDevice::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_3::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_DEVICE
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_DEVICE_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK_H
#include <android/hardware/neuralnetworks/1.3/IFencedExecutionCallback.h>
#include <gmock/gmock.h>
@ -39,4 +39,4 @@ inline sp<MockFencedExecutionCallback> MockFencedExecutionCallback::create() {
} // namespace android::hardware::neuralnetworks::V1_3::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_PREPARED_MODEL
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_PREPARED_MODEL
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_PREPARED_MODEL_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_PREPARED_MODEL_H
#include <android/hardware/neuralnetworks/1.3/IPreparedModel.h>
#include <gmock/gmock.h>
@ -118,4 +118,4 @@ inline void MockPreparedModel::simulateCrash() {
} // namespace android::hardware::neuralnetworks::V1_3::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_PREPARED_MODEL
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_TEST_MOCK_PREPARED_MODEL_H

View file

@ -14,6 +14,7 @@
* limitations under the License.
*/
#include "MockBurstContext.h"
#include "MockFencedExecutionCallback.h"
#include "MockPreparedModel.h"
@ -96,6 +97,17 @@ auto makeExecuteFencedCallbackReturn(V1_3::ErrorStatus status, const V1_2::Timin
return hardware::Void();
};
}
auto makeConfigureExecutionBurstReturn(V1_0::ErrorStatus status,
const sp<MockBurstContext>& burstContext) {
return [status, burstContext](
const sp<V1_2::IBurstCallback>& /*callback*/,
const MQDescriptorSync<V1_2::FmqRequestDatum>& /*requestChannel*/,
const MQDescriptorSync<V1_2::FmqResultDatum>& /*resultChannel*/,
V1_2::IPreparedModel::configureExecutionBurst_cb cb) -> hardware::Return<void> {
cb(status, burstContext);
return hardware::Void();
};
}
std::function<hardware::Status()> makeTransportFailure(status_t status) {
return [status] { return hardware::Status::fromStatusT(status); };
@ -450,7 +462,76 @@ TEST(PreparedModelTest, executeFencedDeadObject) {
EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
}
// TODO: test burst execution if/when it is added to nn::IPreparedModel.
TEST(PreparedModelTest, configureExecutionBurst) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto mockBurstContext = sp<MockBurstContext>::make();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(makeConfigureExecutionBurstReturn(V1_0::ErrorStatus::NONE, mockBurstContext));
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_TRUE(result.has_value())
<< "Failed with " << result.error().code << ": " << result.error().message;
EXPECT_NE(result.value(), nullptr);
}
TEST(PreparedModelTest, configureExecutionBurstError) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(
makeConfigureExecutionBurstReturn(V1_0::ErrorStatus::GENERAL_FAILURE, nullptr));
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_FALSE(result.has_value());
EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
}
TEST(PreparedModelTest, configureExecutionBurstTransportFailure) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_FALSE(result.has_value());
EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
}
TEST(PreparedModelTest, configureExecutionBurstDeadObject) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel =
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_, _, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_FALSE(result.has_value());
EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
}
TEST(PreparedModelTest, getUnderlyingResource) {
// setup test

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_BUFFER
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_BUFFER
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_BUFFER_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_BUFFER_H
#include <aidl/android/hardware/neuralnetworks/BnBuffer.h>
#include <android/binder_interface_utils.h>
@ -40,4 +40,4 @@ inline std::shared_ptr<MockBuffer> MockBuffer::create() {
} // namespace aidl::android::hardware::neuralnetworks::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_BUFFER
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_BUFFER_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_DEVICE
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_DEVICE
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_DEVICE_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_DEVICE_H
#include <aidl/android/hardware/neuralnetworks/BnDevice.h>
#include <android/binder_auto_utils.h>
@ -64,4 +64,4 @@ inline std::shared_ptr<MockDevice> MockDevice::create() {
} // namespace aidl::android::hardware::neuralnetworks::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_DEVICE
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_DEVICE_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK_H
#include <aidl/android/hardware/neuralnetworks/BnFencedExecutionCallback.h>
#include <android/binder_auto_utils.h>
@ -42,4 +42,4 @@ inline std::shared_ptr<MockFencedExecutionCallback> MockFencedExecutionCallback:
} // namespace aidl::android::hardware::neuralnetworks::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_FENCED_EXECUTION_CALLBACK_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H
#include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
#include <android/binder_interface_utils.h>
@ -49,4 +49,4 @@ inline std::shared_ptr<MockPreparedModel> MockPreparedModel::create() {
} // namespace aidl::android::hardware::neuralnetworks::utils
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H

View file

@ -252,7 +252,19 @@ TEST(PreparedModelTest, executeFencedDeadObject) {
EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
}
// TODO: test burst execution if/when it is added to nn::IPreparedModel.
TEST(PreparedModelTest, configureExecutionBurst) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
// run test
const auto result = preparedModel->configureExecutionBurst();
// verify result
ASSERT_TRUE(result.has_value())
<< "Failed with " << result.error().code << ": " << result.error().message;
EXPECT_NE(result.value(), nullptr);
}
TEST(PreparedModelTest, getUnderlyingResource) {
// setup test

View file

@ -32,7 +32,9 @@ class InvalidBurst final : public nn::IBurst {
OptionalCacheHold cacheMemory(const nn::SharedMemory& memory) const override;
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure) const override;
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const override;
};
} // namespace android::hardware::neuralnetworks::utils

View file

@ -47,7 +47,9 @@ class ResilientBurst final : public nn::IBurst,
OptionalCacheHold cacheMemory(const nn::SharedMemory& memory) const override;
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure) const override;
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const override;
private:
const Factory kMakeBurst;

View file

@ -32,7 +32,9 @@ InvalidBurst::OptionalCacheHold InvalidBurst::cacheMemory(
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> InvalidBurst::execute(
const nn::Request& /*request*/, nn::MeasureTiming /*measure*/) const {
const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
const nn::OptionalTimePoint& /*deadline*/,
const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
return NN_ERROR() << "InvalidBurst";
}

View file

@ -100,9 +100,11 @@ ResilientBurst::OptionalCacheHold ResilientBurst::cacheMemory(
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> ResilientBurst::execute(
const nn::Request& request, nn::MeasureTiming measure) const {
const auto fn = [&request, measure](const nn::IBurst& burst) {
return burst.execute(request, measure);
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration) const {
const auto fn = [&request, measure, deadline, loopTimeoutDuration](const nn::IBurst& burst) {
return burst.execute(request, measure, deadline, loopTimeoutDuration);
};
return protect(*this, fn);
}

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER_H
#include <gmock/gmock.h>
#include <gtest/gtest.h>
@ -34,4 +34,4 @@ class MockBuffer final : public IBuffer {
} // namespace android::nn
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE_H
#include <gmock/gmock.h>
#include <gtest/gtest.h>
@ -55,4 +55,4 @@ class MockDevice final : public IDevice {
} // namespace android::nn
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE_H

View file

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL
#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL_H
#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL_H
#include <gmock/gmock.h>
#include <gtest/gtest.h>
@ -41,4 +41,4 @@ class MockPreparedModel final : public IPreparedModel {
} // namespace android::nn
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL
#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL_H