Merge tm-dev-plus-aosp-without-vendor@8763363
Bug: 236760014 Merged-In: If82a5bf302cb68a2c5b486006e4679fbfc8ee545 Change-Id: I9f63475a68f386bbb8ad768a8f2075e6c3c55eaa
This commit is contained in:
commit
7c0bc319e9
12 changed files with 570 additions and 276 deletions
|
@ -61,6 +61,7 @@ cc_library_headers {
|
|||
apex_available: [
|
||||
"//apex_available:platform",
|
||||
"com.android.bluetooth",
|
||||
"com.android.media.swcodec",
|
||||
],
|
||||
min_sdk_version: "29",
|
||||
host_supported: true,
|
||||
|
|
|
@ -52,6 +52,12 @@ enum {
|
|||
SENSOR_TYPE_LOW_LATENCY_OFFBODY_DETECT = 34,
|
||||
SENSOR_TYPE_ACCELEROMETER_UNCALIBRATED = 35,
|
||||
SENSOR_TYPE_HINGE_ANGLE = 36,
|
||||
SENSOR_TYPE_HEAD_TRACKER = 37,
|
||||
SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES = 38,
|
||||
SENSOR_TYPE_GYROSCOPE_LIMITED_AXES = 39,
|
||||
SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES_UNCALIBRATED = 40,
|
||||
SENSOR_TYPE_GYROSCOPE_LIMITED_AXES_UNCALIBRATED = 41,
|
||||
SENSOR_TYPE_HEADING = 42,
|
||||
SENSOR_TYPE_DEVICE_PRIVATE_BASE = 65536 /* 0x10000 */,
|
||||
};
|
||||
|
||||
|
|
|
@ -186,6 +186,12 @@ enum {
|
|||
#define SENSOR_STRING_TYPE_LOW_LATENCY_OFFBODY_DETECT "android.sensor.low_latency_offbody_detect"
|
||||
#define SENSOR_STRING_TYPE_ACCELEROMETER_UNCALIBRATED "android.sensor.accelerometer_uncalibrated"
|
||||
#define SENSOR_STRING_TYPE_HINGE_ANGLE "android.sensor.hinge_angle"
|
||||
#define SENSOR_STRING_TYPE_HEAD_TRACKER "android.sensor.head_tracker"
|
||||
#define SENSOR_STRING_TYPE_ACCELEROMETER_LIMITED_AXES "android.sensor.accelerometer_limited_axes"
|
||||
#define SENSOR_STRING_TYPE_GYROSCOPE_LIMITED_AXES "android.sensor.gyroscope_limited_axes"
|
||||
#define SENSOR_STRING_TYPE_ACCELEROMETER_LIMITED_AXES_UNCALIBRATED "android.sensor.accelerometer_limited_axes_uncalibrated"
|
||||
#define SENSOR_STRING_TYPE_GYROSCOPE_LIMITED_AXES_UNCALIBRATED "android.sensor.gyroscope_limited_axes_uncalibrated"
|
||||
#define SENSOR_STRING_TYPE_HEADING "android.sensor.heading"
|
||||
|
||||
/**
|
||||
* Values returned by the accelerometer in various locations in the universe.
|
||||
|
@ -291,6 +297,76 @@ typedef struct {
|
|||
};
|
||||
} additional_info_event_t;
|
||||
|
||||
typedef struct {
|
||||
float rx;
|
||||
float ry;
|
||||
float rz;
|
||||
float vx;
|
||||
float vy;
|
||||
float vz;
|
||||
int32_t discontinuity_count;
|
||||
} head_tracker_event_t;
|
||||
|
||||
/**
|
||||
* limited axes imu event data
|
||||
*/
|
||||
typedef struct {
|
||||
union {
|
||||
float calib[3];
|
||||
struct {
|
||||
float x;
|
||||
float y;
|
||||
float z;
|
||||
};
|
||||
};
|
||||
union {
|
||||
float supported[3];
|
||||
struct {
|
||||
float x_supported;
|
||||
float y_supported;
|
||||
float z_supported;
|
||||
};
|
||||
};
|
||||
} limited_axes_imu_event_t;
|
||||
|
||||
/**
|
||||
* limited axes uncalibrated imu event data
|
||||
*/
|
||||
typedef struct {
|
||||
union {
|
||||
float uncalib[3];
|
||||
struct {
|
||||
float x_uncalib;
|
||||
float y_uncalib;
|
||||
float z_uncalib;
|
||||
};
|
||||
};
|
||||
union {
|
||||
float bias[3];
|
||||
struct {
|
||||
float x_bias;
|
||||
float y_bias;
|
||||
float z_bias;
|
||||
};
|
||||
};
|
||||
union {
|
||||
float supported[3];
|
||||
struct {
|
||||
float x_supported;
|
||||
float y_supported;
|
||||
float z_supported;
|
||||
};
|
||||
};
|
||||
} limited_axes_imu_uncalibrated_event_t;
|
||||
|
||||
/**
|
||||
* Heading event data
|
||||
*/
|
||||
typedef struct {
|
||||
float heading;
|
||||
float accuracy;
|
||||
} heading_event_t;
|
||||
|
||||
/**
|
||||
* Union of the various types of sensor data
|
||||
* that can be returned.
|
||||
|
@ -368,6 +444,26 @@ typedef struct sensors_event_t {
|
|||
* SENSOR_TYPE_ADDITIONAL_INFO for details.
|
||||
*/
|
||||
additional_info_event_t additional_info;
|
||||
|
||||
/* vector describing head orientation (added for legacy code support only) */
|
||||
head_tracker_event_t head_tracker;
|
||||
|
||||
/*
|
||||
* limited axes imu event, See
|
||||
* SENSOR_TYPE_GYROSCOPE_LIMITED_AXES and
|
||||
* SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES for details.
|
||||
*/
|
||||
limited_axes_imu_event_t limited_axes_imu;
|
||||
|
||||
/*
|
||||
* limited axes imu uncalibrated event, See
|
||||
* SENSOR_TYPE_GYROSCOPE_LIMITED_AXES_UNCALIBRATED and
|
||||
* SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES_UNCALIBRATED for details.
|
||||
*/
|
||||
limited_axes_imu_uncalibrated_event_t limited_axes_imu_uncalibrated;
|
||||
|
||||
/* heading data containing value in degrees and its accuracy */
|
||||
heading_event_t heading;
|
||||
};
|
||||
|
||||
union {
|
||||
|
|
|
@ -63,7 +63,7 @@ namespace android {
|
|||
#endif // SUBMIX_VERBOSE_LOGGING
|
||||
|
||||
// NOTE: This value will be rounded up to the nearest power of 2 by MonoPipe().
|
||||
#define DEFAULT_PIPE_SIZE_IN_FRAMES (1024*4)
|
||||
#define DEFAULT_PIPE_SIZE_IN_FRAMES (1024*4) // size at default sample rate
|
||||
// Value used to divide the MonoPipe() buffer into segments that are written to the source and
|
||||
// read from the sink. The maximum latency of the device is the size of the MonoPipe's buffer
|
||||
// the minimum latency is the MonoPipe buffer size divided by this value.
|
||||
|
@ -83,10 +83,7 @@ namespace android {
|
|||
// multiple input streams from this device. If this option is enabled, each input stream returned
|
||||
// is *the same stream* which means that readers will race to read data from these streams.
|
||||
#define ENABLE_LEGACY_INPUT_OPEN 1
|
||||
// Whether channel conversion (16-bit signed PCM mono->stereo, stereo->mono) is enabled.
|
||||
#define ENABLE_CHANNEL_CONVERSION 1
|
||||
// Whether resampling is enabled.
|
||||
#define ENABLE_RESAMPLING 1
|
||||
|
||||
#if LOG_STREAMS_TO_FILES
|
||||
// Folder to save stream log files to.
|
||||
#define LOG_STREAM_FOLDER "/data/misc/audioserver"
|
||||
|
@ -130,11 +127,6 @@ struct submix_config {
|
|||
// channel bitfields are not equivalent.
|
||||
audio_channel_mask_t input_channel_mask;
|
||||
audio_channel_mask_t output_channel_mask;
|
||||
#if ENABLE_RESAMPLING
|
||||
// Input stream and output stream sample rates.
|
||||
uint32_t input_sample_rate;
|
||||
uint32_t output_sample_rate;
|
||||
#endif // ENABLE_RESAMPLING
|
||||
size_t pipe_frame_size; // Number of bytes in each audio frame in the pipe.
|
||||
size_t buffer_size_frames; // Size of the audio pipe in frames.
|
||||
// Maximum number of frames buffered by the input and output streams.
|
||||
|
@ -159,11 +151,6 @@ typedef struct route_config {
|
|||
// destroyed if both and input and output streams are destroyed.
|
||||
struct submix_stream_out *output;
|
||||
struct submix_stream_in *input;
|
||||
#if ENABLE_RESAMPLING
|
||||
// Buffer used as temporary storage for resampled data prior to returning data to the output
|
||||
// stream.
|
||||
int16_t resampler_buffer[DEFAULT_PIPE_SIZE_IN_FRAMES];
|
||||
#endif // ENABLE_RESAMPLING
|
||||
} route_config_t;
|
||||
|
||||
struct submix_audio_device {
|
||||
|
@ -221,6 +208,11 @@ static bool sample_rate_supported(const uint32_t sample_rate)
|
|||
return return_value;
|
||||
}
|
||||
|
||||
static size_t pipe_size_in_frames(const uint32_t sample_rate)
|
||||
{
|
||||
return DEFAULT_PIPE_SIZE_IN_FRAMES * ((float) sample_rate / DEFAULT_SAMPLE_RATE_HZ);
|
||||
}
|
||||
|
||||
// Determine whether the specified sample rate is supported, if it is return the specified sample
|
||||
// rate, otherwise return the default sample rate for the submix module.
|
||||
static uint32_t get_supported_sample_rate(uint32_t sample_rate)
|
||||
|
@ -325,7 +317,6 @@ static struct submix_audio_device * audio_hw_device_get_submix_audio_device(
|
|||
static bool audio_config_compare(const audio_config * const input_config,
|
||||
const audio_config * const output_config)
|
||||
{
|
||||
#if !ENABLE_CHANNEL_CONVERSION
|
||||
const uint32_t input_channels = audio_channel_count_from_in_mask(input_config->channel_mask);
|
||||
const uint32_t output_channels = audio_channel_count_from_out_mask(output_config->channel_mask);
|
||||
if (input_channels != output_channels) {
|
||||
|
@ -333,13 +324,8 @@ static bool audio_config_compare(const audio_config * const input_config,
|
|||
input_channels, output_channels);
|
||||
return false;
|
||||
}
|
||||
#endif // !ENABLE_CHANNEL_CONVERSION
|
||||
#if ENABLE_RESAMPLING
|
||||
if (input_config->sample_rate != output_config->sample_rate &&
|
||||
audio_channel_count_from_in_mask(input_config->channel_mask) != 1) {
|
||||
#else
|
||||
|
||||
if (input_config->sample_rate != output_config->sample_rate) {
|
||||
#endif // ENABLE_RESAMPLING
|
||||
ALOGE("audio_config_compare() sample rate mismatch %ul vs. %ul",
|
||||
input_config->sample_rate, output_config->sample_rate);
|
||||
return false;
|
||||
|
@ -376,24 +362,11 @@ static void submix_audio_device_create_pipe_l(struct submix_audio_device * const
|
|||
in->route_handle = route_idx;
|
||||
rsxadev->routes[route_idx].input = in;
|
||||
rsxadev->routes[route_idx].config.input_channel_mask = config->channel_mask;
|
||||
#if ENABLE_RESAMPLING
|
||||
rsxadev->routes[route_idx].config.input_sample_rate = config->sample_rate;
|
||||
// If the output isn't configured yet, set the output sample rate to the maximum supported
|
||||
// sample rate such that the smallest possible input buffer is created, and put a default
|
||||
// value for channel count
|
||||
if (!rsxadev->routes[route_idx].output) {
|
||||
rsxadev->routes[route_idx].config.output_sample_rate = 48000;
|
||||
rsxadev->routes[route_idx].config.output_channel_mask = AUDIO_CHANNEL_OUT_STEREO;
|
||||
}
|
||||
#endif // ENABLE_RESAMPLING
|
||||
}
|
||||
if (out) {
|
||||
out->route_handle = route_idx;
|
||||
rsxadev->routes[route_idx].output = out;
|
||||
rsxadev->routes[route_idx].config.output_channel_mask = config->channel_mask;
|
||||
#if ENABLE_RESAMPLING
|
||||
rsxadev->routes[route_idx].config.output_sample_rate = config->sample_rate;
|
||||
#endif // ENABLE_RESAMPLING
|
||||
}
|
||||
// Save the address
|
||||
strncpy(rsxadev->routes[route_idx].address, address, AUDIO_DEVICE_MAX_ADDRESS_LEN);
|
||||
|
@ -403,18 +376,14 @@ static void submix_audio_device_create_pipe_l(struct submix_audio_device * const
|
|||
{
|
||||
struct submix_config * const device_config = &rsxadev->routes[route_idx].config;
|
||||
uint32_t channel_count;
|
||||
if (out)
|
||||
if (out) {
|
||||
channel_count = audio_channel_count_from_out_mask(config->channel_mask);
|
||||
else
|
||||
} else {
|
||||
channel_count = audio_channel_count_from_in_mask(config->channel_mask);
|
||||
#if ENABLE_CHANNEL_CONVERSION
|
||||
// If channel conversion is enabled, allocate enough space for the maximum number of
|
||||
// possible channels stored in the pipe for the situation when the number of channels in
|
||||
// the output stream don't match the number in the input stream.
|
||||
const uint32_t pipe_channel_count = max(channel_count, 2);
|
||||
#else
|
||||
}
|
||||
|
||||
const uint32_t pipe_channel_count = channel_count;
|
||||
#endif // ENABLE_CHANNEL_CONVERSION
|
||||
|
||||
const NBAIO_Format format = Format_from_SR_C(config->sample_rate, pipe_channel_count,
|
||||
config->format);
|
||||
const NBAIO_Format offers[1] = {format};
|
||||
|
@ -444,11 +413,7 @@ static void submix_audio_device_create_pipe_l(struct submix_audio_device * const
|
|||
buffer_period_count;
|
||||
if (in) device_config->pipe_frame_size = audio_stream_in_frame_size(&in->stream);
|
||||
if (out) device_config->pipe_frame_size = audio_stream_out_frame_size(&out->stream);
|
||||
#if ENABLE_CHANNEL_CONVERSION
|
||||
// Calculate the pipe frame size based upon the number of channels.
|
||||
device_config->pipe_frame_size = (device_config->pipe_frame_size * pipe_channel_count) /
|
||||
channel_count;
|
||||
#endif // ENABLE_CHANNEL_CONVERSION
|
||||
|
||||
SUBMIX_ALOGV("submix_audio_device_create_pipe_l(): pipe frame size %zd, pipe size %zd, "
|
||||
"period size %zd", device_config->pipe_frame_size,
|
||||
device_config->buffer_size_frames, device_config->buffer_period_size_frames);
|
||||
|
@ -473,10 +438,6 @@ static void submix_audio_device_release_pipe_l(struct submix_audio_device * cons
|
|||
rsxadev->routes[route_idx].rsxSource.clear();
|
||||
}
|
||||
memset(rsxadev->routes[route_idx].address, 0, AUDIO_DEVICE_MAX_ADDRESS_LEN);
|
||||
#if ENABLE_RESAMPLING
|
||||
memset(rsxadev->routes[route_idx].resampler_buffer, 0,
|
||||
sizeof(int16_t) * DEFAULT_PIPE_SIZE_IN_FRAMES);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Remove references to the specified input and output streams. When the device no longer
|
||||
|
@ -624,11 +585,7 @@ static uint32_t out_get_sample_rate(const struct audio_stream *stream)
|
|||
{
|
||||
const struct submix_stream_out * const out = audio_stream_get_submix_stream_out(
|
||||
const_cast<struct audio_stream *>(stream));
|
||||
#if ENABLE_RESAMPLING
|
||||
const uint32_t out_rate = out->dev->routes[out->route_handle].config.output_sample_rate;
|
||||
#else
|
||||
const uint32_t out_rate = out->dev->routes[out->route_handle].config.common.sample_rate;
|
||||
#endif // ENABLE_RESAMPLING
|
||||
SUBMIX_ALOGV("out_get_sample_rate() returns %u for addr %s",
|
||||
out_rate, out->dev->routes[out->route_handle].address);
|
||||
return out_rate;
|
||||
|
@ -637,17 +594,6 @@ static uint32_t out_get_sample_rate(const struct audio_stream *stream)
|
|||
static int out_set_sample_rate(struct audio_stream *stream, uint32_t rate)
|
||||
{
|
||||
struct submix_stream_out * const out = audio_stream_get_submix_stream_out(stream);
|
||||
#if ENABLE_RESAMPLING
|
||||
// The sample rate of the stream can't be changed once it's set since this would change the
|
||||
// output buffer size and hence break playback to the shared pipe.
|
||||
if (rate != out->dev->routes[out->route_handle].config.output_sample_rate) {
|
||||
ALOGE("out_set_sample_rate() resampling enabled can't change sample rate from "
|
||||
"%u to %u for addr %s",
|
||||
out->dev->routes[out->route_handle].config.output_sample_rate, rate,
|
||||
out->dev->routes[out->route_handle].address);
|
||||
return -ENOSYS;
|
||||
}
|
||||
#endif // ENABLE_RESAMPLING
|
||||
if (!sample_rate_supported(rate)) {
|
||||
ALOGE("out_set_sample_rate(rate=%u) rate unsupported", rate);
|
||||
return -ENOSYS;
|
||||
|
@ -994,11 +940,7 @@ static uint32_t in_get_sample_rate(const struct audio_stream *stream)
|
|||
{
|
||||
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(
|
||||
const_cast<struct audio_stream*>(stream));
|
||||
#if ENABLE_RESAMPLING
|
||||
const uint32_t rate = in->dev->routes[in->route_handle].config.input_sample_rate;
|
||||
#else
|
||||
const uint32_t rate = in->dev->routes[in->route_handle].config.common.sample_rate;
|
||||
#endif // ENABLE_RESAMPLING
|
||||
SUBMIX_ALOGV("in_get_sample_rate() returns %u", rate);
|
||||
return rate;
|
||||
}
|
||||
|
@ -1006,15 +948,6 @@ static uint32_t in_get_sample_rate(const struct audio_stream *stream)
|
|||
static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate)
|
||||
{
|
||||
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(stream);
|
||||
#if ENABLE_RESAMPLING
|
||||
// The sample rate of the stream can't be changed once it's set since this would change the
|
||||
// input buffer size and hence break recording from the shared pipe.
|
||||
if (rate != in->dev->routes[in->route_handle].config.input_sample_rate) {
|
||||
ALOGE("in_set_sample_rate() resampling enabled can't change sample rate from "
|
||||
"%u to %u", in->dev->routes[in->route_handle].config.input_sample_rate, rate);
|
||||
return -ENOSYS;
|
||||
}
|
||||
#endif // ENABLE_RESAMPLING
|
||||
if (!sample_rate_supported(rate)) {
|
||||
ALOGE("in_set_sample_rate(rate=%u) rate unsupported", rate);
|
||||
return -ENOSYS;
|
||||
|
@ -1033,13 +966,6 @@ static size_t in_get_buffer_size(const struct audio_stream *stream)
|
|||
audio_stream_in_frame_size((const struct audio_stream_in *)stream);
|
||||
size_t buffer_size_frames = calculate_stream_pipe_size_in_frames(
|
||||
stream, config, config->buffer_period_size_frames, stream_frame_size);
|
||||
#if ENABLE_RESAMPLING
|
||||
// Scale the size of the buffer based upon the maximum number of frames that could be returned
|
||||
// given the ratio of output to input sample rate.
|
||||
buffer_size_frames = (size_t)(((float)buffer_size_frames *
|
||||
(float)config->input_sample_rate) /
|
||||
(float)config->output_sample_rate);
|
||||
#endif // ENABLE_RESAMPLING
|
||||
const size_t buffer_size_bytes = buffer_size_frames * stream_frame_size;
|
||||
SUBMIX_ALOGV("in_get_buffer_size() returns %zu bytes, %zu frames", buffer_size_bytes,
|
||||
buffer_size_frames);
|
||||
|
@ -1168,65 +1094,10 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
|
|||
// read the data from the pipe (it's non blocking)
|
||||
int attempts = 0;
|
||||
char* buff = (char*)buffer;
|
||||
#if ENABLE_CHANNEL_CONVERSION
|
||||
// Determine whether channel conversion is required.
|
||||
const uint32_t input_channels = audio_channel_count_from_in_mask(
|
||||
rsxadev->routes[in->route_handle].config.input_channel_mask);
|
||||
const uint32_t output_channels = audio_channel_count_from_out_mask(
|
||||
rsxadev->routes[in->route_handle].config.output_channel_mask);
|
||||
if (input_channels != output_channels) {
|
||||
SUBMIX_ALOGV("in_read(): %d output channels will be converted to %d "
|
||||
"input channels", output_channels, input_channels);
|
||||
// Only support 16-bit PCM channel conversion from mono to stereo or stereo to mono.
|
||||
ALOG_ASSERT(rsxadev->routes[in->route_handle].config.common.format ==
|
||||
AUDIO_FORMAT_PCM_16_BIT);
|
||||
ALOG_ASSERT((input_channels == 1 && output_channels == 2) ||
|
||||
(input_channels == 2 && output_channels == 1));
|
||||
}
|
||||
#endif // ENABLE_CHANNEL_CONVERSION
|
||||
|
||||
#if ENABLE_RESAMPLING
|
||||
const uint32_t input_sample_rate = in_get_sample_rate(&stream->common);
|
||||
const uint32_t output_sample_rate =
|
||||
rsxadev->routes[in->route_handle].config.output_sample_rate;
|
||||
const size_t resampler_buffer_size_frames =
|
||||
sizeof(rsxadev->routes[in->route_handle].resampler_buffer) /
|
||||
sizeof(rsxadev->routes[in->route_handle].resampler_buffer[0]);
|
||||
float resampler_ratio = 1.0f;
|
||||
// Determine whether resampling is required.
|
||||
if (input_sample_rate != output_sample_rate) {
|
||||
resampler_ratio = (float)output_sample_rate / (float)input_sample_rate;
|
||||
// Only support 16-bit PCM mono resampling.
|
||||
// NOTE: Resampling is performed after the channel conversion step.
|
||||
ALOG_ASSERT(rsxadev->routes[in->route_handle].config.common.format ==
|
||||
AUDIO_FORMAT_PCM_16_BIT);
|
||||
ALOG_ASSERT(audio_channel_count_from_in_mask(
|
||||
rsxadev->routes[in->route_handle].config.input_channel_mask) == 1);
|
||||
}
|
||||
#endif // ENABLE_RESAMPLING
|
||||
|
||||
while ((remaining_frames > 0) && (attempts < MAX_READ_ATTEMPTS)) {
|
||||
ssize_t frames_read = -1977;
|
||||
size_t read_frames = remaining_frames;
|
||||
#if ENABLE_RESAMPLING
|
||||
char* const saved_buff = buff;
|
||||
if (resampler_ratio != 1.0f) {
|
||||
// Calculate the number of frames from the pipe that need to be read to generate
|
||||
// the data for the input stream read.
|
||||
const size_t frames_required_for_resampler = (size_t)(
|
||||
(float)read_frames * (float)resampler_ratio);
|
||||
read_frames = min(frames_required_for_resampler, resampler_buffer_size_frames);
|
||||
// Read into the resampler buffer.
|
||||
buff = (char*)rsxadev->routes[in->route_handle].resampler_buffer;
|
||||
}
|
||||
#endif // ENABLE_RESAMPLING
|
||||
#if ENABLE_CHANNEL_CONVERSION
|
||||
if (output_channels == 1 && input_channels == 2) {
|
||||
// Need to read half the requested frames since the converted output
|
||||
// data will take twice the space (mono->stereo).
|
||||
read_frames /= 2;
|
||||
}
|
||||
#endif // ENABLE_CHANNEL_CONVERSION
|
||||
|
||||
SUBMIX_ALOGV("in_read(): frames available to read %zd", source->availableToRead());
|
||||
|
||||
|
@ -1234,56 +1105,6 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
|
|||
|
||||
SUBMIX_ALOGV("in_read(): frames read %zd", frames_read);
|
||||
|
||||
#if ENABLE_CHANNEL_CONVERSION
|
||||
// Perform in-place channel conversion.
|
||||
// NOTE: In the following "input stream" refers to the data returned by this function
|
||||
// and "output stream" refers to the data read from the pipe.
|
||||
if (input_channels != output_channels && frames_read > 0) {
|
||||
int16_t *data = (int16_t*)buff;
|
||||
if (output_channels == 2 && input_channels == 1) {
|
||||
// Offset into the output stream data in samples.
|
||||
ssize_t output_stream_offset = 0;
|
||||
for (ssize_t input_stream_frame = 0; input_stream_frame < frames_read;
|
||||
input_stream_frame++, output_stream_offset += 2) {
|
||||
// Average the content from both channels.
|
||||
data[input_stream_frame] = ((int32_t)data[output_stream_offset] +
|
||||
(int32_t)data[output_stream_offset + 1]) / 2;
|
||||
}
|
||||
} else if (output_channels == 1 && input_channels == 2) {
|
||||
// Offset into the input stream data in samples.
|
||||
ssize_t input_stream_offset = (frames_read - 1) * 2;
|
||||
for (ssize_t output_stream_frame = frames_read - 1; output_stream_frame >= 0;
|
||||
output_stream_frame--, input_stream_offset -= 2) {
|
||||
const short sample = data[output_stream_frame];
|
||||
data[input_stream_offset] = sample;
|
||||
data[input_stream_offset + 1] = sample;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif // ENABLE_CHANNEL_CONVERSION
|
||||
|
||||
#if ENABLE_RESAMPLING
|
||||
if (resampler_ratio != 1.0f) {
|
||||
SUBMIX_ALOGV("in_read(): resampling %zd frames", frames_read);
|
||||
const int16_t * const data = (int16_t*)buff;
|
||||
int16_t * const resampled_buffer = (int16_t*)saved_buff;
|
||||
// Resample with *no* filtering - if the data from the ouptut stream was really
|
||||
// sampled at a different rate this will result in very nasty aliasing.
|
||||
const float output_stream_frames = (float)frames_read;
|
||||
size_t input_stream_frame = 0;
|
||||
for (float output_stream_frame = 0.0f;
|
||||
output_stream_frame < output_stream_frames &&
|
||||
input_stream_frame < remaining_frames;
|
||||
output_stream_frame += resampler_ratio, input_stream_frame++) {
|
||||
resampled_buffer[input_stream_frame] = data[(size_t)output_stream_frame];
|
||||
}
|
||||
ALOG_ASSERT(input_stream_frame <= (ssize_t)resampler_buffer_size_frames);
|
||||
SUBMIX_ALOGV("in_read(): resampler produced %zd frames", input_stream_frame);
|
||||
frames_read = input_stream_frame;
|
||||
buff = saved_buff;
|
||||
}
|
||||
#endif // ENABLE_RESAMPLING
|
||||
|
||||
if (frames_read > 0) {
|
||||
#if LOG_STREAMS_TO_FILES
|
||||
if (in->log_fd >= 0) write(in->log_fd, buff, frames_read * frame_size);
|
||||
|
@ -1411,7 +1232,6 @@ static int adev_open_output_stream(struct audio_hw_device *dev,
|
|||
struct submix_audio_device * const rsxadev = audio_hw_device_get_submix_audio_device(dev);
|
||||
ALOGD("adev_open_output_stream(address=%s)", address);
|
||||
struct submix_stream_out *out;
|
||||
bool force_pipe_creation = false;
|
||||
(void)handle;
|
||||
(void)devices;
|
||||
(void)flags;
|
||||
|
@ -1464,25 +1284,20 @@ static int adev_open_output_stream(struct audio_hw_device *dev,
|
|||
out->stream.get_next_write_timestamp = out_get_next_write_timestamp;
|
||||
out->stream.get_presentation_position = out_get_presentation_position;
|
||||
|
||||
#if ENABLE_RESAMPLING
|
||||
// Recreate the pipe with the correct sample rate so that MonoPipe.write() rate limits
|
||||
// writes correctly.
|
||||
force_pipe_creation = rsxadev->routes[route_idx].config.common.sample_rate
|
||||
!= config->sample_rate;
|
||||
#endif // ENABLE_RESAMPLING
|
||||
|
||||
// If the sink has been shutdown or pipe recreation is forced (see above), delete the pipe so
|
||||
// that it's recreated.
|
||||
if ((rsxadev->routes[route_idx].rsxSink != NULL
|
||||
&& rsxadev->routes[route_idx].rsxSink->isShutdown()) || force_pipe_creation) {
|
||||
&& rsxadev->routes[route_idx].rsxSink->isShutdown())) {
|
||||
submix_audio_device_release_pipe_l(rsxadev, route_idx);
|
||||
}
|
||||
|
||||
// Store a pointer to the device from the output stream.
|
||||
out->dev = rsxadev;
|
||||
// Initialize the pipe.
|
||||
ALOGV("adev_open_output_stream(): about to create pipe at index %d", route_idx);
|
||||
submix_audio_device_create_pipe_l(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
|
||||
const size_t pipeSizeInFrames = pipe_size_in_frames(config->sample_rate);
|
||||
ALOGI("adev_open_output_stream(): about to create pipe at index %d, rate %u, pipe size %zu",
|
||||
route_idx, config->sample_rate, pipeSizeInFrames);
|
||||
submix_audio_device_create_pipe_l(rsxadev, config, pipeSizeInFrames,
|
||||
DEFAULT_PIPE_PERIOD_COUNT, NULL, out, address, route_idx);
|
||||
#if LOG_STREAMS_TO_FILES
|
||||
out->log_fd = open(LOG_STREAM_OUT_FILENAME, O_CREAT | O_TRUNC | O_WRONLY,
|
||||
|
@ -1611,7 +1426,8 @@ static size_t adev_get_input_buffer_size(const struct audio_hw_device *dev,
|
|||
const size_t frame_size_in_bytes = audio_channel_count_from_in_mask(config->channel_mask) *
|
||||
audio_bytes_per_sample(config->format);
|
||||
if (max_buffer_period_size_frames == 0) {
|
||||
max_buffer_period_size_frames = DEFAULT_PIPE_SIZE_IN_FRAMES;
|
||||
max_buffer_period_size_frames =
|
||||
pipe_size_in_frames(get_supported_sample_rate(config->sample_rate));;
|
||||
}
|
||||
const size_t buffer_size = max_buffer_period_size_frames * frame_size_in_bytes;
|
||||
SUBMIX_ALOGV("adev_get_input_buffer_size() returns %zu bytes, %zu frames",
|
||||
|
@ -1724,8 +1540,10 @@ static int adev_open_input_stream(struct audio_hw_device *dev,
|
|||
|
||||
in->read_error_count = 0;
|
||||
// Initialize the pipe.
|
||||
ALOGV("adev_open_input_stream(): about to create pipe");
|
||||
submix_audio_device_create_pipe_l(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
|
||||
const size_t pipeSizeInFrames = pipe_size_in_frames(config->sample_rate);
|
||||
ALOGI("adev_open_input_stream(): about to create pipe at index %d, rate %u, pipe size %zu",
|
||||
route_idx, config->sample_rate, pipeSizeInFrames);
|
||||
submix_audio_device_create_pipe_l(rsxadev, config, pipeSizeInFrames,
|
||||
DEFAULT_PIPE_PERIOD_COUNT, in, NULL, address, route_idx);
|
||||
|
||||
sp <MonoPipe> sink = rsxadev->routes[route_idx].rsxSink;
|
||||
|
@ -1779,16 +1597,9 @@ static int adev_dump(const audio_hw_device_t *device, int fd)
|
|||
int n = snprintf(msg, sizeof(msg), "\nReroute submix audio module:\n");
|
||||
write(fd, &msg, n);
|
||||
for (int i=0 ; i < MAX_ROUTES ; i++) {
|
||||
#if ENABLE_RESAMPLING
|
||||
n = snprintf(msg, sizeof(msg), " route[%d] rate in=%d out=%d, addr=[%s]\n", i,
|
||||
rsxadev->routes[i].config.input_sample_rate,
|
||||
rsxadev->routes[i].config.output_sample_rate,
|
||||
rsxadev->routes[i].address);
|
||||
#else
|
||||
n = snprintf(msg, sizeof(msg), " route[%d], rate=%d addr=[%s]\n", i,
|
||||
rsxadev->routes[i].config.common.sample_rate,
|
||||
rsxadev->routes[i].address);
|
||||
#endif
|
||||
write(fd, &msg, n);
|
||||
}
|
||||
return 0;
|
||||
|
|
|
@ -87,7 +87,7 @@ int DynamicSensorManager::activate(int handle, bool enable) {
|
|||
}
|
||||
|
||||
return operateSensor(handle,
|
||||
[&enable] (sp<BaseSensorObject> s)->int {
|
||||
[=] (sp<BaseSensorObject> s)->int {
|
||||
return s->enable(enable);
|
||||
});
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ int DynamicSensorManager::batch(int handle, nsecs_t sample_period, nsecs_t batch
|
|||
return 0;
|
||||
}
|
||||
return operateSensor(handle,
|
||||
[&sample_period, &batch_period] (sp<BaseSensorObject> s)->int {
|
||||
[=] (sp<BaseSensorObject> s)->int {
|
||||
return s->batch(sample_period, batch_period);
|
||||
});
|
||||
}
|
||||
|
@ -239,6 +239,87 @@ const sensor_t& DynamicSensorManager::getDynamicMetaSensor() const {
|
|||
return mMetaSensor;
|
||||
}
|
||||
|
||||
int DynamicSensorManager::operateSensor(
|
||||
int handle, OperateSensorFunc sensorFunc) {
|
||||
std::shared_future<int> sensorOp;
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mSensorOpQueueLock);
|
||||
|
||||
// Invoke the function asynchronously.
|
||||
sensorOp = std::async(
|
||||
[this, handle = handle, sensorFunc = sensorFunc,
|
||||
sensorOpIndex = mNextSensorOpIndex] ()->int {
|
||||
return operateSensor(handle, sensorFunc, sensorOpIndex);
|
||||
}).share();
|
||||
|
||||
// Add sensor operation to the queue.
|
||||
mSensorOpQueue.push({mNextSensorOpIndex, sensorOp});
|
||||
mNextSensorOpIndex++;
|
||||
}
|
||||
|
||||
// Wait for the sensor operation to complete.
|
||||
if (sensorOp.wait_for(kSensorOpTimeout) != std::future_status::ready) {
|
||||
ALOGE("sensor operation timed out");
|
||||
return TIMED_OUT;
|
||||
}
|
||||
|
||||
return sensorOp.get();
|
||||
}
|
||||
|
||||
int DynamicSensorManager::operateSensor(
|
||||
int handle, OperateSensorFunc sensorFunc, uint64_t sensorOpIndex) {
|
||||
int rv = 0;
|
||||
|
||||
// Wait until this sensor operation is at the head of the queue.
|
||||
while (1) {
|
||||
std::shared_future<int> headSensorOp;
|
||||
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mSensorOpQueueLock);
|
||||
|
||||
if (mSensorOpQueue.front().first == sensorOpIndex) {
|
||||
break;
|
||||
}
|
||||
headSensorOp = mSensorOpQueue.front().second;
|
||||
}
|
||||
headSensorOp.wait();
|
||||
}
|
||||
|
||||
// Perform sensor operation.
|
||||
sp<BaseSensorObject> sensor;
|
||||
{
|
||||
std::lock_guard<std::mutex> lk(mLock);
|
||||
const auto i = mMap.find(handle);
|
||||
if (i == mMap.end()) {
|
||||
rv = BAD_VALUE;
|
||||
}
|
||||
if (rv == 0) {
|
||||
sensor = i->second.promote();
|
||||
if (sensor == nullptr) {
|
||||
// sensor object is already gone
|
||||
rv = BAD_VALUE;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rv == 0) {
|
||||
rv = sensorFunc(sensor);
|
||||
}
|
||||
|
||||
// Remove sensor operation from queue. When the operation's shared state is
|
||||
// destroyed, execution of this function ceases. Thus, if the state is
|
||||
// destroyed when the operation is removed from the queue, the lock will
|
||||
// never be released. To prevent that, the state is shared locally, so it
|
||||
// isn't destroyed until this function completes.
|
||||
std::shared_future<int> sensorOp;
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mSensorOpQueueLock);
|
||||
sensorOp = mSensorOpQueue.front().second;
|
||||
mSensorOpQueue.pop();
|
||||
}
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
DynamicSensorManager::ConnectionReport::ConnectionReport(
|
||||
int handle, sp<BaseSensorObject> sensor) :
|
||||
mSensor(*(sensor->getSensor())),
|
||||
|
|
|
@ -22,7 +22,9 @@
|
|||
#include <hardware/sensors.h>
|
||||
#include <utils/RefBase.h>
|
||||
|
||||
#include <future>
|
||||
#include <mutex>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
@ -92,24 +94,13 @@ private:
|
|||
// returns next available handle to use upon a new sensor connection, or -1 if we run out.
|
||||
int getNextAvailableHandle();
|
||||
|
||||
// TF: int foo(sp<BaseSensorObject> obj);
|
||||
template <typename TF>
|
||||
int operateSensor(int handle, TF f) const {
|
||||
sp<BaseSensorObject> s;
|
||||
{
|
||||
std::lock_guard<std::mutex> lk(mLock);
|
||||
const auto i = mMap.find(handle);
|
||||
if (i == mMap.end()) {
|
||||
return BAD_VALUE;
|
||||
}
|
||||
s = i->second.promote();
|
||||
if (s == nullptr) {
|
||||
// sensor object is already gone
|
||||
return BAD_VALUE;
|
||||
}
|
||||
}
|
||||
return f(s);
|
||||
}
|
||||
// Runs a sensor function with a timeout. On timeout, function could still
|
||||
// be running, so any function parameter or closure lifetimes should match
|
||||
// the function's lifetime.
|
||||
using OperateSensorFunc = std::function<int(sp<BaseSensorObject>)>;
|
||||
int operateSensor(int handle, OperateSensorFunc sensorFunc);
|
||||
int operateSensor(int handle, OperateSensorFunc sensorFunc,
|
||||
uint64_t sensorOpIndex);
|
||||
|
||||
// available sensor handle space
|
||||
const std::pair<int, int> mHandleRange;
|
||||
|
@ -133,6 +124,14 @@ private:
|
|||
|
||||
// daemons
|
||||
std::vector<sp<BaseDynamicSensorDaemon>> mDaemonVector;
|
||||
|
||||
// Sensor operation queue. Calls to the sensor HAL must complete within 1
|
||||
// second.
|
||||
static constexpr std::chrono::milliseconds
|
||||
kSensorOpTimeout = std::chrono::milliseconds(900);
|
||||
std::mutex mSensorOpQueueLock;
|
||||
std::queue<std::pair<uint64_t, std::shared_future<int>>> mSensorOpQueue;
|
||||
uint64_t mNextSensorOpIndex = 0;
|
||||
};
|
||||
|
||||
} // namespace SensorHalExt
|
||||
|
|
|
@ -71,7 +71,7 @@ public:
|
|||
const sensors_event_t& e) override;
|
||||
|
||||
private:
|
||||
static constexpr int32_t kDynamicHandleBase = 0;
|
||||
static constexpr int32_t kDynamicHandleBase = 1;
|
||||
static constexpr int32_t kDynamicHandleEnd = 0x1000000;
|
||||
static constexpr int32_t kMaxDynamicHandleCount = kDynamicHandleEnd -
|
||||
kDynamicHandleBase;
|
||||
|
|
|
@ -439,6 +439,7 @@ void HidRawSensor::initFeatureValueFromHidDeviceInfo(
|
|||
|
||||
featureValue->reportModeFlag = SENSOR_FLAG_SPECIAL_REPORTING_MODE;
|
||||
featureValue->isWakeUp = false;
|
||||
featureValue->useUniqueIdForUuid = false;
|
||||
memset(featureValue->uuid, 0, sizeof(featureValue->uuid));
|
||||
featureValue->isAndroidCustom = false;
|
||||
}
|
||||
|
@ -465,28 +466,16 @@ bool HidRawSensor::populateFeatureValueFromFeatureReport(
|
|||
for (const auto & r : packet.reports) {
|
||||
switch (r.usage) {
|
||||
case FRIENDLY_NAME:
|
||||
if (!r.isByteAligned() || r.bitSize != 16 || r.count < 1) {
|
||||
// invalid friendly name
|
||||
break;
|
||||
}
|
||||
if (decodeString(r, buffer, &str) && !str.empty()) {
|
||||
featureValue->name = str;
|
||||
}
|
||||
break;
|
||||
case SENSOR_MANUFACTURER:
|
||||
if (!r.isByteAligned() || r.bitSize != 16 || r.count < 1) {
|
||||
// invalid manufacturer
|
||||
break;
|
||||
}
|
||||
if (decodeString(r, buffer, &str) && !str.empty()) {
|
||||
featureValue->vendor = str;
|
||||
}
|
||||
break;
|
||||
case PERSISTENT_UNIQUE_ID:
|
||||
if (!r.isByteAligned() || r.bitSize != 16 || r.count < 1) {
|
||||
// invalid unique id string
|
||||
break;
|
||||
}
|
||||
if (decodeString(r, buffer, &str) && !str.empty()) {
|
||||
featureValue->uniqueId = str;
|
||||
}
|
||||
|
@ -541,7 +530,16 @@ bool HidRawSensor::validateFeatureValueAndBuildSensor() {
|
|||
}
|
||||
|
||||
// initialize uuid field, use name, vendor and uniqueId
|
||||
if (mFeatureInfo.name.size() >= 4
|
||||
// initialize uuid field using one of the following methods:
|
||||
//
|
||||
// 1. use uniqueId
|
||||
// 2. use name, vendor and uniqueId
|
||||
if (mFeatureInfo.useUniqueIdForUuid) {
|
||||
if (mFeatureInfo.uniqueId.size() == sizeof(mFeatureInfo.uuid)) {
|
||||
memcpy(mFeatureInfo.uuid, mFeatureInfo.uniqueId.c_str(),
|
||||
sizeof(mFeatureInfo.uuid));
|
||||
}
|
||||
} else if (mFeatureInfo.name.size() >= 4
|
||||
&& mFeatureInfo.vendor.size() >= 4
|
||||
&& mFeatureInfo.typeString.size() >= 4
|
||||
&& mFeatureInfo.uniqueId.size() >= 4) {
|
||||
|
@ -637,12 +635,17 @@ bool HidRawSensor::detectAndroidHeadTrackerSensor(
|
|||
return false;
|
||||
}
|
||||
|
||||
mFeatureInfo.type = SENSOR_TYPE_DEVICE_PRIVATE_BASE;
|
||||
mFeatureInfo.typeString = CUSTOM_TYPE_PREFIX + "headtracker";
|
||||
mFeatureInfo.type = SENSOR_TYPE_HEAD_TRACKER;
|
||||
mFeatureInfo.typeString = SENSOR_STRING_TYPE_HEAD_TRACKER;
|
||||
mFeatureInfo.reportModeFlag = SENSOR_FLAG_CONTINUOUS_MODE;
|
||||
mFeatureInfo.permission = "";
|
||||
mFeatureInfo.isWakeUp = false;
|
||||
|
||||
// HID head tracker sensors must use the HID unique ID for the sensor UUID
|
||||
// to permit association between the sensor and audio device (see
|
||||
// specification for HEAD_TRACKER in SensorType).
|
||||
mFeatureInfo.useUniqueIdForUuid = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -888,10 +891,18 @@ bool HidRawSensor::findSensorControlUsage(const std::vector<HidParser::ReportPac
|
|||
mReportIntervalId = reportInterval->id;
|
||||
mReportIntervalBitOffset = reportInterval->bitOffset;
|
||||
mReportIntervalBitSize = reportInterval->bitSize;
|
||||
mReportIntervalScale = reportInterval->a;
|
||||
mReportIntervalOffset = reportInterval->b;
|
||||
|
||||
mFeatureInfo.minDelay = std::max(static_cast<int64_t>(1), reportInterval->minRaw) * 1000;
|
||||
mFeatureInfo.maxDelay = std::min(static_cast<int64_t>(1000000),
|
||||
reportInterval->maxRaw) * 1000; // maximum 1000 second
|
||||
mFeatureInfo.minDelay = 1000000.0
|
||||
* (reportInterval->minRaw + reportInterval->b)
|
||||
* reportInterval->a;
|
||||
mFeatureInfo.minDelay = std::max(1000, mFeatureInfo.minDelay);
|
||||
mFeatureInfo.maxDelay = 1000000.0
|
||||
* (reportInterval->maxRaw + reportInterval->b)
|
||||
* reportInterval->a;
|
||||
mFeatureInfo.maxDelay = std::min(static_cast<int64_t>(1000000000),
|
||||
mFeatureInfo.maxDelay);
|
||||
}
|
||||
return true;
|
||||
return (mPowerStateId >= 0 || mReportingStateId >= 0) && mReportIntervalId >= 0;
|
||||
|
@ -978,7 +989,9 @@ int HidRawSensor::batch(int64_t samplingPeriod, int64_t batchingPeriod) {
|
|||
if (device->getFeature(id, &buffer)
|
||||
&& (8 * buffer.size()) >=
|
||||
(mReportIntervalBitOffset + mReportIntervalBitSize)) {
|
||||
int64_t periodMs = samplingPeriod / 1000000; //ns -> ms
|
||||
int64_t periodMs =
|
||||
(((static_cast<double>(samplingPeriod)) / 1000000000.0)
|
||||
/ mReportIntervalScale) - mReportIntervalOffset;
|
||||
int64_t maxPeriodMs =
|
||||
(1LL << std::min(mReportIntervalBitSize, 63U)) - 1;
|
||||
periodMs = std::min(periodMs, maxPeriodMs);
|
||||
|
@ -1008,6 +1021,50 @@ void HidRawSensor::handleInput(uint8_t id, const std::vector<uint8_t> &message)
|
|||
.type = mSensor.type
|
||||
};
|
||||
bool valid = true;
|
||||
|
||||
switch (mFeatureInfo.type) {
|
||||
case SENSOR_TYPE_HEAD_TRACKER:
|
||||
valid = getHeadTrackerEventData(message, &event);
|
||||
break;
|
||||
default:
|
||||
valid = getSensorEventData(message, &event);
|
||||
break;
|
||||
}
|
||||
if (!valid) {
|
||||
LOG_E << "Invalid data observed in decoding, discard" << LOG_ENDL;
|
||||
return;
|
||||
}
|
||||
event.timestamp = -1;
|
||||
generateEvent(event);
|
||||
}
|
||||
|
||||
bool HidRawSensor::getHeadTrackerEventData(const std::vector<uint8_t> &message,
|
||||
sensors_event_t *event) {
|
||||
head_tracker_event_t *head_tracker;
|
||||
|
||||
head_tracker = &(event->head_tracker);
|
||||
if (!getReportFieldValue(message, &(mTranslateTable[0]),
|
||||
&(head_tracker->rx))
|
||||
|| !getReportFieldValue(message, &(mTranslateTable[1]),
|
||||
&(head_tracker->ry))
|
||||
|| !getReportFieldValue(message, &(mTranslateTable[2]),
|
||||
&(head_tracker->rz))
|
||||
|| !getReportFieldValue(message, &(mTranslateTable[3]),
|
||||
&(head_tracker->vx))
|
||||
|| !getReportFieldValue(message, &(mTranslateTable[4]),
|
||||
&(head_tracker->vy))
|
||||
|| !getReportFieldValue(message, &(mTranslateTable[5]),
|
||||
&(head_tracker->vz))
|
||||
|| !getReportFieldValue(message, &(mTranslateTable[6]),
|
||||
&(head_tracker->discontinuity_count))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool HidRawSensor::getSensorEventData(const std::vector<uint8_t> &message,
|
||||
sensors_event_t *event) {
|
||||
for (const auto &rec : mTranslateTable) {
|
||||
int64_t v = (message[rec.byteOffset + rec.byteSize - 1] & 0x80) ? -1 : 0;
|
||||
for (int i = static_cast<int>(rec.byteSize) - 1; i >= 0; --i) {
|
||||
|
@ -1017,26 +1074,23 @@ void HidRawSensor::handleInput(uint8_t id, const std::vector<uint8_t> &message)
|
|||
switch (rec.type) {
|
||||
case TYPE_FLOAT:
|
||||
if (v > rec.maxValue || v < rec.minValue) {
|
||||
valid = false;
|
||||
return false;
|
||||
}
|
||||
event.data[rec.index] = rec.a * (v + rec.b);
|
||||
event->data[rec.index] = rec.a * (v + rec.b);
|
||||
break;
|
||||
case TYPE_INT64:
|
||||
if (v > rec.maxValue || v < rec.minValue) {
|
||||
valid = false;
|
||||
return false;
|
||||
}
|
||||
event.u64.data[rec.index] = v + rec.b;
|
||||
event->u64.data[rec.index] = v + rec.b;
|
||||
break;
|
||||
case TYPE_ACCURACY:
|
||||
event.magnetic.status = (v & 0xFF) + rec.b;
|
||||
event->magnetic.status = (v & 0xFF) + rec.b;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!valid) {
|
||||
LOG_V << "Range error observed in decoding, discard" << LOG_ENDL;
|
||||
}
|
||||
event.timestamp = -1;
|
||||
generateEvent(event);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
std::string HidRawSensor::dump() const {
|
||||
|
@ -1055,11 +1109,15 @@ std::string HidRawSensor::dump() const {
|
|||
<< " fifoSize: " << mFeatureInfo.fifoSize << LOG_ENDL
|
||||
<< " fifoMaxSize: " << mFeatureInfo.fifoMaxSize << LOG_ENDL
|
||||
<< " reportModeFlag: " << mFeatureInfo.reportModeFlag << LOG_ENDL
|
||||
<< " isWakeUp: " << (mFeatureInfo.isWakeUp ? "true" : "false") << LOG_ENDL
|
||||
<< " uniqueId: " << mFeatureInfo.uniqueId << LOG_ENDL
|
||||
<< " uuid: ";
|
||||
<< " isWakeUp: " << (mFeatureInfo.isWakeUp ? "true" : "false") << LOG_ENDL;
|
||||
|
||||
ss << std::hex << std::setfill('0');
|
||||
ss << " uniqueId: " << std::hex << std::setfill('0');
|
||||
for (auto d : mFeatureInfo.uniqueId) {
|
||||
ss << std::setw(2) << static_cast<int>(d) << " ";
|
||||
}
|
||||
ss << std::dec << std::setfill(' ') << LOG_ENDL;
|
||||
|
||||
ss << " uuid: " << std::hex << std::setfill('0');
|
||||
for (auto d : mFeatureInfo.uuid) {
|
||||
ss << std::setw(2) << static_cast<int>(d) << " ";
|
||||
}
|
||||
|
|
|
@ -46,6 +46,14 @@ public:
|
|||
// handle input report received
|
||||
void handleInput(uint8_t id, const std::vector<uint8_t> &message);
|
||||
|
||||
// get head tracker sensor event data
|
||||
bool getHeadTrackerEventData(const std::vector<uint8_t> &message,
|
||||
sensors_event_t *event);
|
||||
|
||||
// get generic sensor event data
|
||||
bool getSensorEventData(const std::vector<uint8_t> &message,
|
||||
sensors_event_t *event);
|
||||
|
||||
// indicate if the HidRawSensor is a valid one
|
||||
bool isValid() const { return mValid; };
|
||||
|
||||
|
@ -86,6 +94,7 @@ private:
|
|||
size_t fifoMaxSize;
|
||||
uint32_t reportModeFlag;
|
||||
bool isWakeUp;
|
||||
bool useUniqueIdForUuid;
|
||||
|
||||
// dynamic sensor specific
|
||||
std::string uniqueId;
|
||||
|
@ -140,6 +149,33 @@ private:
|
|||
// process HID snesor spec defined orientation(quaternion) sensor usages.
|
||||
bool processQuaternionUsage(const std::vector<HidParser::ReportPacket> &packets);
|
||||
|
||||
// get the value of a report field
|
||||
template<typename ValueType>
|
||||
bool getReportFieldValue(const std::vector<uint8_t> &message,
|
||||
ReportTranslateRecord* rec, ValueType* value) {
|
||||
bool valid = true;
|
||||
int64_t v;
|
||||
|
||||
v = (message[rec->byteOffset + rec->byteSize - 1] & 0x80) ? -1 : 0;
|
||||
for (int i = static_cast<int>(rec->byteSize) - 1; i >= 0; --i) {
|
||||
v = (v << 8) | message[rec->byteOffset + i]; // HID is little endian
|
||||
}
|
||||
if (v > rec->maxValue || v < rec->minValue) {
|
||||
valid = false;
|
||||
}
|
||||
|
||||
switch (rec->type) {
|
||||
case TYPE_FLOAT:
|
||||
*value = rec->a * (v + rec->b);
|
||||
break;
|
||||
case TYPE_INT64:
|
||||
*value = v + rec->b;
|
||||
break;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
// dump data for test/debug purpose
|
||||
std::string dump() const;
|
||||
|
||||
|
@ -159,6 +195,8 @@ private:
|
|||
int mReportIntervalId;
|
||||
unsigned int mReportIntervalBitOffset;
|
||||
unsigned int mReportIntervalBitSize;
|
||||
double mReportIntervalScale;
|
||||
int64_t mReportIntervalOffset;
|
||||
|
||||
// Input report translate table
|
||||
std::vector<ReportTranslateRecord> mTranslateTable;
|
||||
|
|
|
@ -240,10 +240,18 @@ std::vector<HidParser::ReportPacket> HidParser::convertGroupToPacket(
|
|||
auto logical = r.getLogicalRange();
|
||||
auto physical = r.getPhysicalRange();
|
||||
|
||||
int64_t offset = physical.first - logical.first;
|
||||
double scale = static_cast<double>((physical.second - physical.first))
|
||||
double scale;
|
||||
if ((physical.first != physical.second) &&
|
||||
(logical.first != logical.second)) {
|
||||
scale = static_cast<double>(physical.second - physical.first)
|
||||
/ (logical.second - logical.first);
|
||||
} else {
|
||||
scale = (physical.first != 0) ? physical.first : 1.0;
|
||||
}
|
||||
scale *= r.getExponentValue();
|
||||
int64_t offset =
|
||||
(physical.first * r.getExponentValue() / scale) -
|
||||
logical.first;
|
||||
|
||||
ReportItem digest = {
|
||||
.usage = r.getFullUsage(),
|
||||
|
|
153
modules/sensors/dynamic_sensor/README.md
Normal file
153
modules/sensors/dynamic_sensor/README.md
Normal file
|
@ -0,0 +1,153 @@
|
|||
# Dynamic Sensors
|
||||
|
||||
[TOC]
|
||||
|
||||
## Links
|
||||
|
||||
* [Sensor HAL dynamic sensor support](https://source.android.com/devices/sensors/sensors-hal2#dynamic-sensors)
|
||||
* [Sensors Multi-HAL](https://source.android.com/devices/sensors/sensors-multihal)
|
||||
|
||||
## Adding dynamic sensor support to a device
|
||||
|
||||
A few files need to be modified to add dynamic sensor support to a device. The
|
||||
dynamic sensor HAL must be enabled in the device product makefile and in the
|
||||
sensor sub-HAL configuration file, raw HID devices must be configured, and raw
|
||||
HID device and dynamic sensor property permissions must be set up in the SELinux
|
||||
policy files.
|
||||
|
||||
```shell
|
||||
acme-co$ git -C device/acme/rocket-phone diff
|
||||
diff --git a/sensor_hal/hals.conf b/sensor_hal/hals.conf
|
||||
index a1f4b8b..d112546 100644
|
||||
--- a/sensor_hal/hals.conf
|
||||
+++ b/sensor_hal/hals.conf
|
||||
@@ -1 +1,2 @@
|
||||
+sensors.dynamic_sensor_hal.so
|
||||
sensors.rocket-phone.so
|
||||
diff --git a/rocket-phone.mk b/rocket-phone.mk
|
||||
index 3fc8538..b1bd8a1 100644
|
||||
--- a/rocket-phone.mk
|
||||
+++ b/rocket-phone.mk
|
||||
@@ -73,6 +73,9 @@
|
||||
PRODUCT_PACKAGES += sensors.rocket-phone
|
||||
PRODUCT_PACKAGES += thruster_stats
|
||||
|
||||
+# Add the dynamic sensor HAL.
|
||||
+PRODUCT_PACKAGES += sensors.dynamic_sensor_hal
|
||||
+
|
||||
# Only install test tools in debug build or eng build.
|
||||
ifneq ($(filter userdebug eng,$(TARGET_BUILD_VARIANT)),)
|
||||
PRODUCT_PACKAGES += thruster_test
|
||||
diff --git a/conf/ueventd.rc b/conf/ueventd.rc
|
||||
index 88ee00b..2f03009 100644
|
||||
--- a/conf/ueventd.rc
|
||||
+++ b/conf/ueventd.rc
|
||||
@@ -209,3 +209,7 @@
|
||||
|
||||
# Thrusters
|
||||
/dev/thruster* 0600 system system
|
||||
+
|
||||
+# Raw HID devices
|
||||
+/dev/hidraw* 0660 system system
|
||||
+
|
||||
diff --git a/sepolicy/sensor_hal.te b/sepolicy/sensor_hal.te
|
||||
index 0797253..22a4208 100644
|
||||
--- a/sepolicy/sensor_hal.te
|
||||
+++ b/sepolicy/sensor_hal.te
|
||||
@@ -52,6 +52,9 @@
|
||||
# Allow sensor HAL to read thruster state.
|
||||
allow hal_sensors_default thruster_state:file r_file_perms;
|
||||
|
||||
+# Allow access for dynamic sensor properties.
|
||||
+get_prop(hal_sensors_default, vendor_dynamic_sensor_prop)
|
||||
+
|
||||
+# Allow access to raw HID devices for dynamic sensors.
|
||||
+allow hal_sensors_default device:dir r_dir_perms;
|
||||
+allow hal_sensors_default hidraw_device:chr_file rw_file_perms;
|
||||
+
|
||||
#
|
||||
# Thruster sensor enforcements.
|
||||
#
|
||||
diff --git a/sepolicy/device.te b/sepolicy/device.te
|
||||
index bc3c947..bad0be0 100644
|
||||
--- a/sepolicy/device.te
|
||||
+++ b/sepolicy/device.te
|
||||
@@ -55,3 +55,7 @@
|
||||
|
||||
# Thruster
|
||||
type thruster_device, dev_type;
|
||||
+
|
||||
+# Raw HID device
|
||||
+type hidraw_device, dev_type;
|
||||
+
|
||||
diff --git a/sepolicy/property.te b/sepolicy/property.te
|
||||
index 4b671a4..bb0894f 100644
|
||||
--- a/sepolicy/property.te
|
||||
+++ b/sepolicy/property.te
|
||||
@@ -49,3 +49,7 @@
|
||||
|
||||
# Thruster
|
||||
vendor_internal_prop(vendor_thruster_debug_prop)
|
||||
+
|
||||
+# Dynamic sensor
|
||||
+vendor_internal_prop(vendor_dynamic_sensor_prop)
|
||||
+
|
||||
diff --git a/sepolicy/file_contexts b/sepolicy/file_contexts
|
||||
index bc03a78..ff401dc 100644
|
||||
--- a/sepolicy/file_contexts
|
||||
+++ b/sepolicy/file_contexts
|
||||
@@ -441,3 +441,7 @@
|
||||
/dev/thruster-fuel u:object_r:thruster_device:s0
|
||||
/dev/thruster-output u:object_r:thruster_device:s0
|
||||
/dev/thruster-telemetry u:object_r:thruster_device:s0
|
||||
+
|
||||
+# Raw HID device
|
||||
+/dev/hidraw[0-9]* u:object_r:hidraw_device:s0
|
||||
+
|
||||
diff --git a/sepolicy/property_contexts b/sepolicy/property_contexts
|
||||
index 5d2f018..18a6059 100644
|
||||
--- a/sepolicy/property_contexts
|
||||
+++ b/sepolicy/property_contexts
|
||||
@@ -104,3 +104,7 @@
|
||||
|
||||
# Thruster
|
||||
vendor.thruster.debug u:object_r:vendor_thruster_debug_prop:s0
|
||||
+
|
||||
+# Dynamic sensor
|
||||
+vendor.dynamic_sensor. u:object_r:vendor_dynamic_sensor_prop:s0
|
||||
+
|
||||
acme-co$
|
||||
```
|
||||
|
||||
Once the file modifications are made, rebuild and flash. The dynamic sensor HAL
|
||||
should be initialized and appear in the sensor service.
|
||||
|
||||
```shell
|
||||
acme-co$ make -j28 && fastboot flashall
|
||||
.
|
||||
.
|
||||
.
|
||||
acme-co$ adb logcat -d | grep DynamicSensorHal
|
||||
12-15 18:18:45.735 791 791 D DynamicSensorHal: DynamicSensorsSubHal::getSensorsList_2_1 invoked.
|
||||
12-15 18:18:47.474 791 791 D DynamicSensorHal: DynamicSensorsSubHal::initialize invoked.
|
||||
acme-co$ adb shell dumpsys sensorservice | grep Dynamic
|
||||
0000000000) Dynamic Sensor Manager | Google | ver: 1 | type: android.sensor.dynamic_sensor_meta(32) | perm: n/a | flags: 0x00000007
|
||||
Dynamic Sensor Manager (handle=0x00000000, connections=1)
|
||||
Dynamic Sensor Manager 0x00000000 | status: active | pending flush events 0
|
||||
acme-co$ adb logcat -c
|
||||
acme-co$
|
||||
```
|
||||
|
||||
When a dynamic sensor is paired with the device (e.g., Bluetooth rocket buds),
|
||||
it will appear in the sensor service.
|
||||
|
||||
```shell
|
||||
acme-co$ adb logcat -d | grep "DynamicSensorHal\|hidraw\|Rocket"
|
||||
12-15 18:19:55.268 157 157 I hid-generic 0003: 1234:5678.0001: hidraw0: BLUETOOTH HID v0.00 Device [RocketBuds] on
|
||||
12-15 18:19:55.235 791 809 E DynamicSensorHal: return 1 sensors
|
||||
12-15 18:19:56.239 1629 1787 I SensorService: Dynamic sensor handle 0x1 connected, type 65536, name RocketBuds
|
||||
acme-co$ adb shell dumpsys sensorservice | grep Rocket
|
||||
0x00000001) RocketBuds | BLUETOOTH 1234:1234 | ver: 1 | type: com.google.hardware.sensor.hid_dynamic.headtracker(65536) | perm: n/a | flags: 0x00000020
|
||||
acme-co$
|
||||
```
|
||||
|
|
@ -460,6 +460,41 @@ static unsigned int populate_sample_rates_from_profile(const alsa_device_profile
|
|||
return num_sample_rates;
|
||||
}
|
||||
|
||||
static bool are_all_devices_found(unsigned int num_devices_to_find,
|
||||
const int cards_to_find[],
|
||||
const int devices_to_find[],
|
||||
unsigned int num_devices,
|
||||
const int cards[],
|
||||
const int devices[]) {
|
||||
for (unsigned int i = 0; i < num_devices_to_find; ++i) {
|
||||
unsigned int j = 0;
|
||||
for (; j < num_devices; ++j) {
|
||||
if (cards_to_find[i] == cards[j] && devices_to_find[i] == devices[j]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (j >= num_devices) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool are_devices_the_same(unsigned int left_num_devices,
|
||||
const int left_cards[],
|
||||
const int left_devices[],
|
||||
unsigned int right_num_devices,
|
||||
const int right_cards[],
|
||||
const int right_devices[]) {
|
||||
if (left_num_devices != right_num_devices) {
|
||||
return false;
|
||||
}
|
||||
return are_all_devices_found(left_num_devices, left_cards, left_devices,
|
||||
right_num_devices, right_cards, right_devices) &&
|
||||
are_all_devices_found(right_num_devices, right_cards, right_devices,
|
||||
left_num_devices, left_cards, left_devices);
|
||||
}
|
||||
|
||||
/*
|
||||
* HAl Functions
|
||||
*/
|
||||
|
@ -548,10 +583,11 @@ static void stream_dump_alsa_devices(const struct listnode *alsa_devices, int fd
|
|||
list_for_each(node, alsa_devices) {
|
||||
struct alsa_device_info *device_info =
|
||||
node_to_item(node, struct alsa_device_info, list_node);
|
||||
dprintf(fd, "Output Profile %zu:\n", i);
|
||||
const char* direction = device_info->profile.direction == PCM_OUT ? "Output" : "Input";
|
||||
dprintf(fd, "%s Profile %zu:\n", direction, i);
|
||||
profile_dump(&device_info->profile, fd);
|
||||
|
||||
dprintf(fd, "Output Proxy %zu:\n", i);
|
||||
dprintf(fd, "%s Proxy %zu:\n", direction, i);
|
||||
proxy_dump(&device_info->proxy, fd);
|
||||
}
|
||||
}
|
||||
|
@ -1648,6 +1684,13 @@ static int adev_create_audio_patch(struct audio_hw_device *dev,
|
|||
saved_devices[num_saved_devices++] = device_info->profile.device;
|
||||
}
|
||||
|
||||
if (are_devices_the_same(
|
||||
num_configs, cards, devices, num_saved_devices, saved_cards, saved_devices)) {
|
||||
// The new devices are the same as original ones. No need to update.
|
||||
stream_unlock(lock);
|
||||
return 0;
|
||||
}
|
||||
|
||||
device_lock(adev);
|
||||
stream_standby_l(alsa_devices, out == NULL ? &in->standby : &out->standby);
|
||||
device_unlock(adev);
|
||||
|
|
Loading…
Reference in a new issue