Add ELU and HARD_SWISH am: 503d850369

Change-Id: I2347a7439107a05429f7e1488c9b4ce5d57f34b7
This commit is contained in:
Automerger Merge Worker 2020-01-31 16:23:00 +00:00
commit b17645bc30
2 changed files with 52 additions and 2 deletions

View file

@ -633,7 +633,7 @@ d1f382d14e1384b907d5bb5780df7f01934650d556fedbed2f15a90773c657d6 android.hardwar
4167dc3ad35e9cd0d2057d4868c7675ae2c3c9d05bbd614c1f5dccfa5fd68797 android.hardware.neuralnetworks@1.3::IExecutionCallback
7d23020248194abbee8091cc624f39a5a6d7ccba338b172d5d2d3df0cceffbee android.hardware.neuralnetworks@1.3::IPreparedModel
0439a1fbbec7f16e5e4c653d85ac685d51bfafbae15b8f8cca530acdd7d6a8ce android.hardware.neuralnetworks@1.3::IPreparedModelCallback
162515505235bc770601f02c3537f9ccf11582583bf7b11dd2ec81fab6855333 android.hardware.neuralnetworks@1.3::types
26c643aedf4e28b8d82e517d9cd70601b37f881e1ea94f09808d9e233517e400 android.hardware.neuralnetworks@1.3::types
3e01d4446cd69fd1c48f8572efd97487bc179564b32bd795800b97bbe10be37b android.hardware.wifi@1.4::IWifi
a64467bae843569f0d465c5be7f0c7a5b987985b55a3ef4794dd5afc68538650 android.hardware.wifi.supplicant@1.3::ISupplicant
44445b8a03d7b9e68b2fbd954672c18a8fce9e32851b0692f4f4ab3407f86ecb android.hardware.wifi.supplicant@1.3::ISupplicantStaIface

View file

@ -4986,6 +4986,56 @@ enum OperationType : int32_t {
*/
WHILE = 97,
/**
* Computes exponential linear activation on the input tensor element-wise.
*
* The output is calculated using the following formula:
*
* ELU(x) = max(0, x) + min(0, alpha * (exp(x) - 1))
*
* Supported tensor {@link OperandType}:
* * {@link OperandType::TENSOR_FLOAT16}
* * {@link OperandType::TENSOR_FLOAT32}
*
* Inputs:
* * 0: A tensor, specifying the input. May be zero-sized.
* * 1: A scalar, specifying the alpha parameter.
* For input tensor of {@link OperandType::TENSOR_FLOAT16},
* the alpha value must be of {@link OperandType::FLOAT16}.
* For input tensor of {@link OperandType::TENSOR_FLOAT32},
* the alpha value must be of {@link OperandType::FLOAT32}.
*
* Outputs:
* * 0: The output tensor of same shape and type as input0.
*/
ELU = 98,
/**
* Computes hard-swish activation on the input tensor element-wise.
*
* Hard swish activation is introduced in
* https://arxiv.org/pdf/1905.02244.pdf
*
* The output is calculated using the following formula:
*
* h-swish(x) = x * max(0, min(6, (x + 3))) / 6
* Supported tensor {@link OperandType}:
* * {@link OperandType::TENSOR_FLOAT16}
* * {@link OperandType::TENSOR_FLOAT32}
* * {@link OperandType::TENSOR_QUANT8_ASYMM}
* * {@link OperandType::TENSOR_QUANT8_ASYMM_SIGNED}
*
* Inputs:
* * 0: A tensor, specifying the input. May be zero-sized.
*
* Outputs:
* * 0: The output tensor of same shape and type as input0.
* Scale and zero point of this tensor may be different from the input
* tensor's parameters.
*/
HARD_SWISH = 99,
/**
* DEPRECATED. Since NNAPI 1.2, extensions are the preferred alternative to
* OEM operation and data types.
@ -5008,7 +5058,7 @@ enum OperationType : int32_t {
enum OperationTypeRange : uint32_t {
BASE_MIN = 0,
FUNDAMENTAL_MIN = 0,
FUNDAMENTAL_MAX = 97,
FUNDAMENTAL_MAX = 99,
OEM_MIN = 10000,
OEM_MAX = 10000,
BASE_MAX = 0xFFFF,