Merge "NNAPI: Add execution preference to prepareModel (HAL)" into pi-dev

This commit is contained in:
Michael Butler 2018-04-13 17:26:47 +00:00 committed by Android (Google) Code Review
commit 7fe93ba5c9
6 changed files with 62 additions and 12 deletions

View file

@ -338,8 +338,8 @@ e15ebdf1e0a326ff5b8a59668d4d8cd3852bd88388eae91de13f5f7e1af50ed1 android.hardwar
b8c7ed58aa8740361e63d0ce9e7c94227572a629f356958840b34809d2393a7c android.hardware.media.bufferpool@1.0::IClientManager b8c7ed58aa8740361e63d0ce9e7c94227572a629f356958840b34809d2393a7c android.hardware.media.bufferpool@1.0::IClientManager
4a2c0dc82780e6c90731725a103feab8ab6ecf85a64e049b9cbd2b2c61620fe1 android.hardware.media.bufferpool@1.0::IConnection 4a2c0dc82780e6c90731725a103feab8ab6ecf85a64e049b9cbd2b2c61620fe1 android.hardware.media.bufferpool@1.0::IConnection
6aef1218e5949f867b0104752ac536c1b707222a403341720de90141df129e3e android.hardware.media.bufferpool@1.0::types 6aef1218e5949f867b0104752ac536c1b707222a403341720de90141df129e3e android.hardware.media.bufferpool@1.0::types
3e4d8e0085ebe8549efb8ad4b8b400a141a3fa3f47ae23696b3e05a1612eb003 android.hardware.neuralnetworks@1.1::IDevice 7698dc2382a2eeb43541840e3ee624f34108efdfb976b2bfa7c13ef15fb8c4c4 android.hardware.neuralnetworks@1.1::IDevice
50db076b03a6760557fc60ef433ba9dd2ff983cf3305eeb504b0fff3eaa604ff android.hardware.neuralnetworks@1.1::types 5604001029a255648a9e955de0a822a48d9ba7cc259b106fb8be0cd43dc8eece android.hardware.neuralnetworks@1.1::types
8d3d86da0bfa4bf070970d8303c659f67f35d670c287d45a3f542e4fedadd578 android.hardware.nfc@1.1::INfc 8d3d86da0bfa4bf070970d8303c659f67f35d670c287d45a3f542e4fedadd578 android.hardware.nfc@1.1::INfc
e85f566698d2a2c28100e264fcf2c691a066756ddf8dd341d009ff50cfe10614 android.hardware.nfc@1.1::INfcClientCallback e85f566698d2a2c28100e264fcf2c691a066756ddf8dd341d009ff50cfe10614 android.hardware.nfc@1.1::INfcClientCallback
5e278fcaa3287d397d8eebe1c22aaa28150f5caae1cf9381cd6dc32cb37899c5 android.hardware.nfc@1.1::types 5e278fcaa3287d397d8eebe1c22aaa28150f5caae1cf9381cd6dc32cb37899c5 android.hardware.nfc@1.1::types

View file

@ -242,8 +242,8 @@ void Execute(const sp<V1_1::IDevice>& device, std::function<V1_1::Model(void)> c
// launch prepare model // launch prepare model
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
ASSERT_NE(nullptr, preparedModelCallback.get()); ASSERT_NE(nullptr, preparedModelCallback.get());
Return<ErrorStatus> prepareLaunchStatus = Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_1(
device->prepareModel_1_1(model, preparedModelCallback); model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk()); ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus)); ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));

View file

@ -102,6 +102,8 @@ interface IDevice extends @1.0::IDevice {
* Multiple threads can call prepareModel on the same model concurrently. * Multiple threads can call prepareModel on the same model concurrently.
* *
* @param model The model to be prepared for execution. * @param model The model to be prepared for execution.
* @param preference Indicates the intended execution behavior of a prepared
* model.
* @param callback A callback object used to return the error status of * @param callback A callback object used to return the error status of
* preparing the model for execution and the prepared model * preparing the model for execution and the prepared model
* if successful, nullptr otherwise. The callback object's * if successful, nullptr otherwise. The callback object's
@ -115,6 +117,7 @@ interface IDevice extends @1.0::IDevice {
* - INVALID_ARGUMENT if one of the input arguments is * - INVALID_ARGUMENT if one of the input arguments is
* invalid * invalid
*/ */
prepareModel_1_1(Model model, IPreparedModelCallback callback) prepareModel_1_1(Model model, ExecutionPreference preference,
IPreparedModelCallback callback)
generates (ErrorStatus status); generates (ErrorStatus status);
}; };

View file

@ -382,3 +382,24 @@ struct Model {
*/ */
bool relaxComputationFloat32toFloat16; bool relaxComputationFloat32toFloat16;
}; };
/**
* Execution preferences.
*/
enum ExecutionPreference : int32_t {
/**
* Prefer executing in a way that minimizes battery drain.
* This is desirable for compilations that will be executed often.
*/
LOW_POWER = 0,
/**
* Prefer returning a single answer as fast as possible, even if this causes
* more power consumption.
*/
FAST_SINGLE_ANSWER = 1,
/**
* Prefer maximizing the throughput of successive frames, for example when
* processing successive frames coming from the camera.
*/
SUSTAINED_SPEED = 2,
};

View file

@ -50,13 +50,13 @@ static void validateGetSupportedOperations(const sp<IDevice>& device, const std:
} }
static void validatePrepareModel(const sp<IDevice>& device, const std::string& message, static void validatePrepareModel(const sp<IDevice>& device, const std::string& message,
const V1_1::Model& model) { const V1_1::Model& model, ExecutionPreference preference) {
SCOPED_TRACE(message + " [prepareModel_1_1]"); SCOPED_TRACE(message + " [prepareModel_1_1]");
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
ASSERT_NE(nullptr, preparedModelCallback.get()); ASSERT_NE(nullptr, preparedModelCallback.get());
Return<ErrorStatus> prepareLaunchStatus = Return<ErrorStatus> prepareLaunchStatus =
device->prepareModel_1_1(model, preparedModelCallback); device->prepareModel_1_1(model, preference, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk()); ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(prepareLaunchStatus)); ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(prepareLaunchStatus));
@ -67,15 +67,24 @@ static void validatePrepareModel(const sp<IDevice>& device, const std::string& m
ASSERT_EQ(nullptr, preparedModel.get()); ASSERT_EQ(nullptr, preparedModel.get());
} }
static bool validExecutionPreference(ExecutionPreference preference) {
return preference == ExecutionPreference::LOW_POWER ||
preference == ExecutionPreference::FAST_SINGLE_ANSWER ||
preference == ExecutionPreference::SUSTAINED_SPEED;
}
// Primary validation function. This function will take a valid model, apply a // Primary validation function. This function will take a valid model, apply a
// mutation to it to invalidate the model, then pass it to interface calls that // mutation to it to invalidate the model, then pass it to interface calls that
// use the model. Note that the model here is passed by value, and any mutation // use the model. Note that the model here is passed by value, and any mutation
// to the model does not leave this function. // to the model does not leave this function.
static void validate(const sp<IDevice>& device, const std::string& message, V1_1::Model model, static void validate(const sp<IDevice>& device, const std::string& message, V1_1::Model model,
const std::function<void(Model*)>& mutation) { const std::function<void(Model*)>& mutation,
ExecutionPreference preference = ExecutionPreference::FAST_SINGLE_ANSWER) {
mutation(&model); mutation(&model);
validateGetSupportedOperations(device, message, model); if (validExecutionPreference(preference)) {
validatePrepareModel(device, message, model); validateGetSupportedOperations(device, message, model);
}
validatePrepareModel(device, message, model, preference);
} }
// Delete element from hidl_vec. hidl_vec doesn't support a "remove" operation, // Delete element from hidl_vec. hidl_vec doesn't support a "remove" operation,
@ -486,6 +495,22 @@ static void addOperationOutputTest(const sp<IDevice>& device, const V1_1::Model&
} }
} }
///////////////////////// VALIDATE EXECUTION PREFERENCE /////////////////////////
static const int32_t invalidExecutionPreferences[] = {
static_cast<int32_t>(ExecutionPreference::LOW_POWER) - 1, // lower bound
static_cast<int32_t>(ExecutionPreference::SUSTAINED_SPEED) + 1, // upper bound
};
static void mutateExecutionPreferenceTest(const sp<IDevice>& device, const V1_1::Model& model) {
for (int32_t preference : invalidExecutionPreferences) {
const std::string message =
"mutateExecutionPreferenceTest: preference " + std::to_string(preference);
validate(device, message, model, [](Model*) {},
static_cast<ExecutionPreference>(preference));
}
}
////////////////////////// ENTRY POINT ////////////////////////////// ////////////////////////// ENTRY POINT //////////////////////////////
void ValidationTest::validateModel(const V1_1::Model& model) { void ValidationTest::validateModel(const V1_1::Model& model) {
@ -503,6 +528,7 @@ void ValidationTest::validateModel(const V1_1::Model& model) {
removeOperationOutputTest(device, model); removeOperationOutputTest(device, model);
addOperationInputTest(device, model); addOperationInputTest(device, model);
addOperationOutputTest(device, model); addOperationOutputTest(device, model);
mutateExecutionPreferenceTest(device, model);
} }
} // namespace functional } // namespace functional

View file

@ -60,8 +60,8 @@ static void createPreparedModel(const sp<IDevice>& device, const V1_1::Model& mo
// launch prepare model // launch prepare model
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
ASSERT_NE(nullptr, preparedModelCallback.get()); ASSERT_NE(nullptr, preparedModelCallback.get());
Return<ErrorStatus> prepareLaunchStatus = Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_1(
device->prepareModel_1_1(model, preparedModelCallback); model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk()); ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus)); ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));