audio: Add presentation and latency control to IStreamOut
Add the following methods to IStreamOut: - get/setAudioDescriptionMixLevel; - get/setDualMonoMode; - getRecommendedLatencyModes/setLatencyMode; - get/setPlaybackRateParameters; - selectPresentation. Add IStreamOutEventCallback interface for async notifications of the client about playback-related events. This callback can be provided to IModule.openOutputStream. Bug: 205884982 Test: atest VtsHalAudioCoreTargetTest Change-Id: I8974102d232cdd121a53edccbf26c33778a94e5e
This commit is contained in:
parent
95aa48b473
commit
7492720e49
12 changed files with 666 additions and 0 deletions
|
@ -119,6 +119,7 @@ aidl_interface {
|
|||
"android/hardware/audio/core/IStreamCommon.aidl",
|
||||
"android/hardware/audio/core/IStreamIn.aidl",
|
||||
"android/hardware/audio/core/IStreamOut.aidl",
|
||||
"android/hardware/audio/core/IStreamOutEventCallback.aidl",
|
||||
"android/hardware/audio/core/ITelephony.aidl",
|
||||
"android/hardware/audio/core/MicrophoneDynamicInfo.aidl",
|
||||
"android/hardware/audio/core/MicrophoneInfo.aidl",
|
||||
|
|
|
@ -46,6 +46,7 @@ interface IModule {
|
|||
android.hardware.audio.core.AudioRoute[] getAudioRoutesForAudioPort(int portId);
|
||||
android.hardware.audio.core.IModule.OpenInputStreamReturn openInputStream(in android.hardware.audio.core.IModule.OpenInputStreamArguments args);
|
||||
android.hardware.audio.core.IModule.OpenOutputStreamReturn openOutputStream(in android.hardware.audio.core.IModule.OpenOutputStreamArguments args);
|
||||
android.hardware.audio.core.IModule.SupportedPlaybackRateFactors getSupportedPlaybackRateFactors();
|
||||
android.hardware.audio.core.AudioPatch setAudioPatch(in android.hardware.audio.core.AudioPatch requested);
|
||||
boolean setAudioPortConfig(in android.media.audio.common.AudioPortConfig requested, out android.media.audio.common.AudioPortConfig suggested);
|
||||
void resetAudioPatch(int patchId);
|
||||
|
@ -84,12 +85,20 @@ interface IModule {
|
|||
@nullable android.media.audio.common.AudioOffloadInfo offloadInfo;
|
||||
long bufferSizeFrames;
|
||||
@nullable android.hardware.audio.core.IStreamCallback callback;
|
||||
@nullable android.hardware.audio.core.IStreamOutEventCallback eventCallback;
|
||||
}
|
||||
@VintfStability
|
||||
parcelable OpenOutputStreamReturn {
|
||||
android.hardware.audio.core.IStreamOut stream;
|
||||
android.hardware.audio.core.StreamDescriptor desc;
|
||||
}
|
||||
@VintfStability
|
||||
parcelable SupportedPlaybackRateFactors {
|
||||
float minSpeed;
|
||||
float maxSpeed;
|
||||
float minPitch;
|
||||
float maxPitch;
|
||||
}
|
||||
@Backing(type="int") @VintfStability
|
||||
enum ScreenRotation {
|
||||
DEG_0 = 0,
|
||||
|
|
|
@ -38,6 +38,16 @@ interface IStreamOut {
|
|||
void updateMetadata(in android.hardware.audio.common.SourceMetadata sourceMetadata);
|
||||
float[] getHwVolume();
|
||||
void setHwVolume(in float[] channelVolumes);
|
||||
float getAudioDescriptionMixLevel();
|
||||
void setAudioDescriptionMixLevel(float leveldB);
|
||||
android.media.audio.common.AudioDualMonoMode getDualMonoMode();
|
||||
void setDualMonoMode(android.media.audio.common.AudioDualMonoMode mode);
|
||||
android.media.audio.common.AudioLatencyMode[] getRecommendedLatencyModes();
|
||||
void setLatencyMode(android.media.audio.common.AudioLatencyMode mode);
|
||||
android.media.audio.common.AudioPlaybackRate getPlaybackRateParameters();
|
||||
void setPlaybackRateParameters(in android.media.audio.common.AudioPlaybackRate playbackRate);
|
||||
void selectPresentation(int presentationId, int programId);
|
||||
const int HW_VOLUME_MIN = 0;
|
||||
const int HW_VOLUME_MAX = 1;
|
||||
const int AUDIO_DESCRIPTION_MIX_LEVEL_MAX = 48;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright (C) 2022 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// This file is a snapshot of an AIDL file. Do not edit it manually. There are
|
||||
// two cases:
|
||||
// 1). this is a frozen version file - do not edit this in any case.
|
||||
// 2). this is a 'current' file. If you make a backwards compatible change to
|
||||
// the interface (from the latest frozen version), the build system will
|
||||
// prompt you to update this file with `m <name>-update-api`.
|
||||
//
|
||||
// You must not make a backward incompatible change to any AIDL file built
|
||||
// with the aidl_interface module type with versions property set. The module
|
||||
// type is used to build AIDL files in a way that they can be used across
|
||||
// independently updatable components of the system. If a device is shipped
|
||||
// with such a backward incompatible change, it has a high risk of breaking
|
||||
// later when a module using the interface is updated, e.g., Mainline modules.
|
||||
|
||||
package android.hardware.audio.core;
|
||||
@VintfStability
|
||||
interface IStreamOutEventCallback {
|
||||
oneway void onCodecFormatChanged(in byte[] audioMetadata);
|
||||
oneway void onRecommendedLatencyModeChanged(in android.media.audio.common.AudioLatencyMode[] modes);
|
||||
}
|
|
@ -25,6 +25,7 @@ import android.hardware.audio.core.ISoundDose;
|
|||
import android.hardware.audio.core.IStreamCallback;
|
||||
import android.hardware.audio.core.IStreamIn;
|
||||
import android.hardware.audio.core.IStreamOut;
|
||||
import android.hardware.audio.core.IStreamOutEventCallback;
|
||||
import android.hardware.audio.core.ITelephony;
|
||||
import android.hardware.audio.core.MicrophoneInfo;
|
||||
import android.hardware.audio.core.ModuleDebug;
|
||||
|
@ -34,6 +35,7 @@ import android.hardware.audio.effect.IEffect;
|
|||
import android.media.audio.common.AudioOffloadInfo;
|
||||
import android.media.audio.common.AudioPort;
|
||||
import android.media.audio.common.AudioPortConfig;
|
||||
import android.media.audio.common.Float;
|
||||
|
||||
/**
|
||||
* Each instance of IModule corresponds to a separate audio module. The system
|
||||
|
@ -389,6 +391,8 @@ interface IModule {
|
|||
long bufferSizeFrames;
|
||||
/** Client callback interface for the non-blocking output mode. */
|
||||
@nullable IStreamCallback callback;
|
||||
/** Optional callback to notify client about stream events. */
|
||||
@nullable IStreamOutEventCallback eventCallback;
|
||||
}
|
||||
@VintfStability
|
||||
parcelable OpenOutputStreamReturn {
|
||||
|
@ -397,6 +401,33 @@ interface IModule {
|
|||
}
|
||||
OpenOutputStreamReturn openOutputStream(in OpenOutputStreamArguments args);
|
||||
|
||||
/**
|
||||
* Get supported ranges of playback rate factors.
|
||||
*
|
||||
* See 'PlaybackRate' for the information on the playback rate parameters.
|
||||
* This method provides supported ranges (inclusive) for the speed factor
|
||||
* and the pitch factor.
|
||||
*
|
||||
* If the HAL module supports setting the playback rate, it is recommended
|
||||
* to support speed and pitch factor values at least in the range from 0.5f
|
||||
* to 2.0f.
|
||||
*
|
||||
* @throws EX_UNSUPPORTED_OPERATION If setting of playback rate parameters
|
||||
* is not supported by the module.
|
||||
*/
|
||||
@VintfStability
|
||||
parcelable SupportedPlaybackRateFactors {
|
||||
/** The minimum allowed speed factor. */
|
||||
float minSpeed;
|
||||
/** The maximum allowed speed factor. */
|
||||
float maxSpeed;
|
||||
/** The minimum allowed pitch factor. */
|
||||
float minPitch;
|
||||
/** The maximum allowed pitch factor. */
|
||||
float maxPitch;
|
||||
}
|
||||
SupportedPlaybackRateFactors getSupportedPlaybackRateFactors();
|
||||
|
||||
/**
|
||||
* Set an audio patch.
|
||||
*
|
||||
|
|
|
@ -18,6 +18,9 @@ package android.hardware.audio.core;
|
|||
|
||||
import android.hardware.audio.common.SourceMetadata;
|
||||
import android.hardware.audio.core.IStreamCommon;
|
||||
import android.media.audio.common.AudioDualMonoMode;
|
||||
import android.media.audio.common.AudioLatencyMode;
|
||||
import android.media.audio.common.AudioPlaybackRate;
|
||||
|
||||
/**
|
||||
* This interface provides means for sending audio data to output devices.
|
||||
|
@ -86,4 +89,140 @@ interface IStreamOut {
|
|||
* @throws EX_UNSUPPORTED_OPERATION If hardware volume control is not supported.
|
||||
*/
|
||||
void setHwVolume(in float[] channelVolumes);
|
||||
|
||||
// aidl: Constant of type float is not supported (b/251286924).
|
||||
// const float AUDIO_DESCRIPTION_MIX_LEVEL_MIN = -Inf;
|
||||
const int AUDIO_DESCRIPTION_MIX_LEVEL_MAX = 48;
|
||||
/**
|
||||
* Returns the Audio Description Mix level in dB.
|
||||
*
|
||||
* The level is applied to streams incorporating a secondary Audio
|
||||
* Description stream. It specifies the relative level of mixing for
|
||||
* the Audio Description with a reference to the Main Audio.
|
||||
*
|
||||
* The value of the relative level is in the range from negative infinity
|
||||
* to +48, see AUDIO_DESCRIPTION_MIX_LEVEL_* constants.
|
||||
*
|
||||
* @return The current Audio Description Mix Level in dB.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
|
||||
*/
|
||||
float getAudioDescriptionMixLevel();
|
||||
/**
|
||||
* Sets the Audio Description Mix level in dB.
|
||||
*
|
||||
* For streams incorporating a secondary Audio Description stream the
|
||||
* relative level of mixing of the Audio Description to the Main Audio is
|
||||
* controlled by this method.
|
||||
*
|
||||
* The value of the relative level must be in the range from negative
|
||||
* infinity to +48, see AUDIO_DESCRIPTION_MIX_LEVEL_* constants.
|
||||
*
|
||||
* @param leveldB Audio Description Mix Level in dB.
|
||||
* @throws EX_ILLEGAL_ARGUMENT If the provided value is out of range.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If setting of this parameter is not supported.
|
||||
*/
|
||||
void setAudioDescriptionMixLevel(float leveldB);
|
||||
|
||||
/**
|
||||
* Returns the Dual Mono mode presentation setting.
|
||||
*
|
||||
* @return The current setting of Dual Mono mode.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
|
||||
*/
|
||||
AudioDualMonoMode getDualMonoMode();
|
||||
/**
|
||||
* Sets the Dual Mono mode presentation on the output device.
|
||||
*
|
||||
* The Dual Mono mode is generally applied to stereo audio streams
|
||||
* where the left and right channels come from separate sources.
|
||||
*
|
||||
* @param mode Selected Dual Mono mode.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If setting of this parameter is not supported.
|
||||
*/
|
||||
void setDualMonoMode(AudioDualMonoMode mode);
|
||||
|
||||
/**
|
||||
* Retrieve supported latency modes.
|
||||
*
|
||||
* Indicates which latency modes are currently supported on this output
|
||||
* stream. If the transport protocol (for example, Bluetooth A2DP) used by
|
||||
* this output stream to reach the output device supports variable latency
|
||||
* modes, the HAL indicates which modes are currently supported. The client
|
||||
* can then call setLatencyMode() with one of the supported modes to select
|
||||
* the desired operation mode.
|
||||
*
|
||||
* Implementation for this method is mandatory only on specific spatial
|
||||
* audio streams indicated by AUDIO_OUTPUT_FLAG_SPATIALIZER flag if they can
|
||||
* be routed to a BT classic sink.
|
||||
*
|
||||
* @return Currently supported latency modes.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
|
||||
*/
|
||||
AudioLatencyMode[] getRecommendedLatencyModes();
|
||||
/**
|
||||
* Sets the latency mode.
|
||||
*
|
||||
* The requested mode must be one of the modes returned by the
|
||||
* 'getRecommendedLatencyModes()' method.
|
||||
*
|
||||
* Implementation for this method is mandatory only on specific spatial
|
||||
* audio streams indicated by AUDIO_OUTPUT_FLAG_SPATIALIZER flag if they can
|
||||
* be routed to a BT classic sink.
|
||||
*
|
||||
* @throws EX_ILLEGAL_ARGUMENT If the specified mode is not supported.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If setting of this parameter is not supported.
|
||||
*/
|
||||
void setLatencyMode(AudioLatencyMode mode);
|
||||
|
||||
/**
|
||||
* Retrieve current playback rate parameters.
|
||||
*
|
||||
* @return Current playback parameters.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
|
||||
*/
|
||||
AudioPlaybackRate getPlaybackRateParameters();
|
||||
/**
|
||||
* Set playback rate parameters.
|
||||
*
|
||||
* Sets the playback rate parameters that control playback behavior. This
|
||||
* is normally used when playing encoded content and decoding is performed
|
||||
* in hardware. Otherwise, the client can apply necessary transformations
|
||||
* itself.
|
||||
*
|
||||
* The range of supported values for speed and pitch factors is provided by
|
||||
* the 'IModule.getSupportedPlaybackRateFactors' method. Out of range speed
|
||||
* and pitch values must not be rejected if the fallback mode is 'MUTE'.
|
||||
*
|
||||
* @param playbackRate Playback parameters to set.
|
||||
* @throws EX_ILLEGAL_ARGUMENT If provided parameters are out of acceptable range.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If setting playback rate parameters
|
||||
* is not supported.
|
||||
*/
|
||||
void setPlaybackRateParameters(in AudioPlaybackRate playbackRate);
|
||||
|
||||
/**
|
||||
* Select presentation and program from for decoding.
|
||||
*
|
||||
* Selects a presentation for decoding from a next generation media stream
|
||||
* (as defined per ETSI TS 103 190-2) and a program within the presentation.
|
||||
* The client must obtain valid presentation and program IDs from the media
|
||||
* stream on its own.
|
||||
*
|
||||
* @param presentationId Selected audio presentation.
|
||||
* @param programId Refinement for the presentation.
|
||||
* @throws EX_ILLEGAL_ARGUMENT If the HAL module is unable to locate
|
||||
* the specified presentation or program in
|
||||
* the media stream.
|
||||
* @throws EX_ILLEGAL_STATE If the stream is closed.
|
||||
* @throws EX_UNSUPPORTED_OPERATION If presentation selection is not supported.
|
||||
*/
|
||||
void selectPresentation(int presentationId, int programId);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,168 @@
|
|||
/*
|
||||
* Copyright (C) 2022 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio.core;
|
||||
|
||||
import android.media.audio.common.AudioLatencyMode;
|
||||
|
||||
/**
|
||||
* This interface provides means for asynchronous notification of the client
|
||||
* by an output stream.
|
||||
*/
|
||||
@VintfStability
|
||||
oneway interface IStreamOutEventCallback {
|
||||
/**
|
||||
* Codec format changed notification.
|
||||
*
|
||||
* onCodecFormatChanged returns an AudioMetadata object in read-only
|
||||
* ByteString format. It represents the most recent codec format decoded by
|
||||
* a HW audio decoder.
|
||||
*
|
||||
* Codec format is an optional message from HW audio decoders. It serves to
|
||||
* notify the application about the codec format and audio objects contained
|
||||
* within the compressed audio stream for control, informational,
|
||||
* and display purposes.
|
||||
*
|
||||
* audioMetadata ByteString is convertible to an AudioMetadata object
|
||||
* through both a C++ and a C API present in Metadata.h [1], or through a
|
||||
* Java API present in AudioMetadata.java [2].
|
||||
*
|
||||
* The ByteString format is a stable format used for parcelling
|
||||
* (marshalling) across JNI, AIDL, and HIDL interfaces. The test for R
|
||||
* compatibility for native marshalling is TEST(metadata_tests,
|
||||
* compatibility_R) [3]. The test for R compatibility for JNI marshalling
|
||||
* is android.media.cts.AudioMetadataTest#testCompatibilityR [4].
|
||||
*
|
||||
* Android R defined keys are as follows [2]:
|
||||
* "bitrate", int32
|
||||
* "channel-mask", int32
|
||||
* "mime", string
|
||||
* "sample-rate", int32
|
||||
* "bit-width", int32
|
||||
* "has-atmos", int32
|
||||
* "audio-encoding", int32
|
||||
*
|
||||
* Android S in addition adds the following keys:
|
||||
* "presentation-id", int32
|
||||
* "program-id", int32
|
||||
* "presentation-content-classifier", int32
|
||||
* presentation-content-classifier key values can be referenced from
|
||||
* frameworks/base/media/java/android/media/AudioPresentation.java
|
||||
* i.e. AudioPresentation.ContentClassifier
|
||||
* It can contain any of the below values
|
||||
* CONTENT_UNKNOWN = -1,
|
||||
* CONTENT_MAIN = 0,
|
||||
* CONTENT_MUSIC_AND_EFFECTS = 1,
|
||||
* CONTENT_VISUALLY_IMPAIRED = 2,
|
||||
* CONTENT_HEARING_IMPAIRED = 3,
|
||||
* CONTENT_DIALOG = 4,
|
||||
* CONTENT_COMMENTARY = 5,
|
||||
* CONTENT_EMERGENCY = 6,
|
||||
* CONTENT_VOICEOVER = 7
|
||||
* "presentation-language", string // represents ISO 639-2 (three letter code)
|
||||
*
|
||||
* Parceling Format:
|
||||
* All values are native endian order. [1]
|
||||
*
|
||||
* using type_size_t = uint32_t;
|
||||
* using index_size_t = uint32_t;
|
||||
* using datum_size_t = uint32_t;
|
||||
*
|
||||
* Permitted type indexes are
|
||||
* TYPE_NONE = 0, // Reserved
|
||||
* TYPE_INT32 = 1,
|
||||
* TYPE_INT64 = 2,
|
||||
* TYPE_FLOAT = 3,
|
||||
* TYPE_DOUBLE = 4,
|
||||
* TYPE_STRING = 5,
|
||||
* TYPE_DATA = 6, // A data table of <String, Datum>
|
||||
*
|
||||
* Datum = {
|
||||
* (type_size_t) Type (the type index from type_as_value<T>.)
|
||||
* (datum_size_t) Size (size of the Payload)
|
||||
* (byte string) Payload<Type>
|
||||
* }
|
||||
*
|
||||
* The data is specified in native endian order. Since the size of the
|
||||
* Payload is always present, unknown types may be skipped.
|
||||
*
|
||||
* Payload<Fixed-size Primitive_Value>
|
||||
* [ sizeof(Primitive_Value) in raw bytes ]
|
||||
*
|
||||
* Example of Payload<Int32> of 123:
|
||||
* Payload<Int32>
|
||||
* [ value of 123 ] = 0x7b 0x00 0x00 0x00 123
|
||||
*
|
||||
* Payload<String>
|
||||
* [ (index_size_t) length, not including zero terminator.]
|
||||
* [ (length) raw bytes ]
|
||||
*
|
||||
* Example of Payload<String> of std::string("hi"):
|
||||
* [ (index_size_t) length ] = 0x02 0x00 0x00 0x00 2 strlen("hi")
|
||||
* [ raw bytes "hi" ] = 0x68 0x69 "hi"
|
||||
*
|
||||
* Payload<Data>
|
||||
* [ (index_size_t) entries ]
|
||||
* [ raw bytes (entry 1) Key (Payload<String>)
|
||||
* Value (Datum)
|
||||
* ... (until #entries) ]
|
||||
*
|
||||
* Example of Payload<Data> of {{"hello", "world"},
|
||||
* {"value", (int32_t)1000}};
|
||||
* [ (index_size_t) #entries ] = 0x02 0x00 0x00 0x00 2 entries
|
||||
* Key (Payload<String>)
|
||||
* [ index_size_t length ] = 0x05 0x00 0x00 0x00 5 strlen("hello")
|
||||
* [ raw bytes "hello" ] = 0x68 0x65 0x6c 0x6c 0x6f "hello"
|
||||
* Value (Datum)
|
||||
* [ (type_size_t) type ] = 0x05 0x00 0x00 0x00 5 (TYPE_STRING)
|
||||
* [ (datum_size_t) size ] = 0x09 0x00 0x00 0x00 sizeof(index_size_t) +
|
||||
* strlen("world")
|
||||
* Payload<String>
|
||||
* [ (index_size_t) length ] = 0x05 0x00 0x00 0x00 5 strlen("world")
|
||||
* [ raw bytes "world" ] = 0x77 0x6f 0x72 0x6c 0x64 "world"
|
||||
* Key (Payload<String>)
|
||||
* [ index_size_t length ] = 0x05 0x00 0x00 0x00 5 strlen("value")
|
||||
* [ raw bytes "value" ] = 0x76 0x61 0x6c 0x75 0x65 "value"
|
||||
* Value (Datum)
|
||||
* [ (type_size_t) type ] = 0x01 0x00 0x00 0x00 1 (TYPE_INT32)
|
||||
* [ (datum_size_t) size ] = 0x04 0x00 0x00 0x00 4 sizeof(int32_t)
|
||||
* Payload<Int32>
|
||||
* [ raw bytes 1000 ] = 0xe8 0x03 0x00 0x00 1000
|
||||
*
|
||||
* The contents of audioMetadata is a Payload<Data>.
|
||||
* An implementation dependent detail is that the Keys are always
|
||||
* stored sorted, so the byte string representation generated is unique.
|
||||
*
|
||||
* Vendor keys are allowed for informational and debugging purposes.
|
||||
* Vendor keys should consist of the vendor company name followed
|
||||
* by a dot; for example, "vendorCompany.someVolume" [2].
|
||||
*
|
||||
* [1] system/media/audio_utils/include/audio_utils/Metadata.h
|
||||
* [2] frameworks/base/media/java/android/media/AudioMetadata.java
|
||||
* [3] system/media/audio_utils/tests/metadata_tests.cpp
|
||||
* [4] cts/tests/tests/media/src/android/media/cts/AudioMetadataTest.java
|
||||
*
|
||||
* @param audioMetadata A buffer containing decoded format changes
|
||||
* reported by codec. The buffer contains data that can be transformed
|
||||
* to audio metadata, which is a C++ object based map.
|
||||
*/
|
||||
void onCodecFormatChanged(in byte[] audioMetadata);
|
||||
|
||||
/**
|
||||
* Called with the new list of supported latency modes when a change occurs.
|
||||
*/
|
||||
void onRecommendedLatencyModeChanged(in AudioLatencyMode[] modes);
|
||||
}
|
|
@ -603,6 +603,13 @@ ndk::ScopedAStatus Module::openOutputStream(const OpenOutputStreamArguments& in_
|
|||
return ndk::ScopedAStatus::ok();
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus Module::getSupportedPlaybackRateFactors(
|
||||
SupportedPlaybackRateFactors* _aidl_return) {
|
||||
LOG(DEBUG) << __func__;
|
||||
(void)_aidl_return;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus Module::setAudioPatch(const AudioPatch& in_requested, AudioPatch* _aidl_return) {
|
||||
LOG(DEBUG) << __func__ << ": requested patch " << in_requested.toString();
|
||||
if (in_requested.sourcePortConfigIds.empty()) {
|
||||
|
|
|
@ -27,7 +27,10 @@
|
|||
using aidl::android::hardware::audio::common::SinkMetadata;
|
||||
using aidl::android::hardware::audio::common::SourceMetadata;
|
||||
using aidl::android::media::audio::common::AudioDevice;
|
||||
using aidl::android::media::audio::common::AudioDualMonoMode;
|
||||
using aidl::android::media::audio::common::AudioLatencyMode;
|
||||
using aidl::android::media::audio::common::AudioOffloadInfo;
|
||||
using aidl::android::media::audio::common::AudioPlaybackRate;
|
||||
using android::hardware::audio::common::getChannelCount;
|
||||
using android::hardware::audio::common::getFrameSizeInBytes;
|
||||
|
||||
|
@ -724,4 +727,55 @@ ndk::ScopedAStatus StreamOut::setHwVolume(const std::vector<float>& in_channelVo
|
|||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::getAudioDescriptionMixLevel(float* _aidl_return) {
|
||||
LOG(DEBUG) << __func__;
|
||||
(void)_aidl_return;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::setAudioDescriptionMixLevel(float in_leveldB) {
|
||||
LOG(DEBUG) << __func__ << ": description mix level " << in_leveldB;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::getDualMonoMode(AudioDualMonoMode* _aidl_return) {
|
||||
LOG(DEBUG) << __func__;
|
||||
(void)_aidl_return;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::setDualMonoMode(AudioDualMonoMode in_mode) {
|
||||
LOG(DEBUG) << __func__ << ": dual mono mode " << toString(in_mode);
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::getRecommendedLatencyModes(
|
||||
std::vector<AudioLatencyMode>* _aidl_return) {
|
||||
LOG(DEBUG) << __func__;
|
||||
(void)_aidl_return;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::setLatencyMode(AudioLatencyMode in_mode) {
|
||||
LOG(DEBUG) << __func__ << ": latency mode " << toString(in_mode);
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) {
|
||||
LOG(DEBUG) << __func__;
|
||||
(void)_aidl_return;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::setPlaybackRateParameters(const AudioPlaybackRate& in_playbackRate) {
|
||||
LOG(DEBUG) << __func__ << ": " << in_playbackRate.toString();
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
ndk::ScopedAStatus StreamOut::selectPresentation(int32_t in_presentationId, int32_t in_programId) {
|
||||
LOG(DEBUG) << __func__ << ": presentationId " << in_presentationId << ", programId "
|
||||
<< in_programId;
|
||||
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
} // namespace aidl::android::hardware::audio::core
|
||||
|
|
|
@ -66,6 +66,8 @@ class Module : public BnModule {
|
|||
in_args,
|
||||
::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn* _aidl_return)
|
||||
override;
|
||||
ndk::ScopedAStatus getSupportedPlaybackRateFactors(
|
||||
SupportedPlaybackRateFactors* _aidl_return) override;
|
||||
ndk::ScopedAStatus setAudioPatch(const AudioPatch& in_requested,
|
||||
AudioPatch* _aidl_return) override;
|
||||
ndk::ScopedAStatus setAudioPortConfig(
|
||||
|
|
|
@ -367,6 +367,23 @@ class StreamOut : public StreamCommonImpl<::aidl::android::hardware::audio::comm
|
|||
}
|
||||
ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
|
||||
ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
|
||||
ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override;
|
||||
ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override;
|
||||
ndk::ScopedAStatus getDualMonoMode(
|
||||
::aidl::android::media::audio::common::AudioDualMonoMode* _aidl_return) override;
|
||||
ndk::ScopedAStatus setDualMonoMode(
|
||||
::aidl::android::media::audio::common::AudioDualMonoMode in_mode) override;
|
||||
ndk::ScopedAStatus getRecommendedLatencyModes(
|
||||
std::vector<::aidl::android::media::audio::common::AudioLatencyMode>* _aidl_return)
|
||||
override;
|
||||
ndk::ScopedAStatus setLatencyMode(
|
||||
::aidl::android::media::audio::common::AudioLatencyMode in_mode) override;
|
||||
ndk::ScopedAStatus getPlaybackRateParameters(
|
||||
::aidl::android::media::audio::common::AudioPlaybackRate* _aidl_return) override;
|
||||
ndk::ScopedAStatus setPlaybackRateParameters(
|
||||
const ::aidl::android::media::audio::common::AudioPlaybackRate& in_playbackRate)
|
||||
override;
|
||||
ndk::ScopedAStatus selectPresentation(int32_t in_presentationId, int32_t in_programId) override;
|
||||
|
||||
public:
|
||||
static ndk::ScopedAStatus createInstance(
|
||||
|
|
|
@ -72,9 +72,12 @@ using aidl::android::media::audio::common::AudioContentType;
|
|||
using aidl::android::media::audio::common::AudioDevice;
|
||||
using aidl::android::media::audio::common::AudioDeviceAddress;
|
||||
using aidl::android::media::audio::common::AudioDeviceType;
|
||||
using aidl::android::media::audio::common::AudioDualMonoMode;
|
||||
using aidl::android::media::audio::common::AudioFormatType;
|
||||
using aidl::android::media::audio::common::AudioIoFlags;
|
||||
using aidl::android::media::audio::common::AudioLatencyMode;
|
||||
using aidl::android::media::audio::common::AudioOutputFlags;
|
||||
using aidl::android::media::audio::common::AudioPlaybackRate;
|
||||
using aidl::android::media::audio::common::AudioPort;
|
||||
using aidl::android::media::audio::common::AudioPortConfig;
|
||||
using aidl::android::media::audio::common::AudioPortDeviceExt;
|
||||
|
@ -2392,6 +2395,192 @@ TEST_P(AudioStreamOut, RequireAsyncCallback) {
|
|||
<< "when no async callback is provided for a non-blocking mix port";
|
||||
}
|
||||
|
||||
TEST_P(AudioStreamOut, AudioDescriptionMixLevel) {
|
||||
const auto ports = moduleConfig->getOutputMixPorts(false /*attachedOnly*/);
|
||||
if (ports.empty()) {
|
||||
GTEST_SKIP() << "No output mix ports";
|
||||
}
|
||||
bool atLeastOneSupports = false;
|
||||
for (const auto& port : ports) {
|
||||
const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, port);
|
||||
ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for output mix port";
|
||||
WithStream<IStreamOut> stream(portConfig.value());
|
||||
ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
|
||||
bool isSupported = false;
|
||||
EXPECT_NO_FATAL_FAILURE(
|
||||
TestAccessors<float>(stream.get(), &IStreamOut::getAudioDescriptionMixLevel,
|
||||
&IStreamOut::setAudioDescriptionMixLevel,
|
||||
{IStreamOut::AUDIO_DESCRIPTION_MIX_LEVEL_MAX,
|
||||
IStreamOut::AUDIO_DESCRIPTION_MIX_LEVEL_MAX - 1, 0,
|
||||
-INFINITY /*IStreamOut::AUDIO_DESCRIPTION_MIX_LEVEL_MIN*/},
|
||||
{IStreamOut::AUDIO_DESCRIPTION_MIX_LEVEL_MAX * 2,
|
||||
IStreamOut::AUDIO_DESCRIPTION_MIX_LEVEL_MAX * 1.1f},
|
||||
&isSupported));
|
||||
if (isSupported) atLeastOneSupports = true;
|
||||
}
|
||||
if (!atLeastOneSupports) {
|
||||
GTEST_SKIP() << "Audio description mix level is not supported";
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(AudioStreamOut, DualMonoMode) {
|
||||
const auto ports = moduleConfig->getOutputMixPorts(false /*attachedOnly*/);
|
||||
if (ports.empty()) {
|
||||
GTEST_SKIP() << "No output mix ports";
|
||||
}
|
||||
bool atLeastOneSupports = false;
|
||||
for (const auto& port : ports) {
|
||||
const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, port);
|
||||
ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for output mix port";
|
||||
WithStream<IStreamOut> stream(portConfig.value());
|
||||
ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
|
||||
bool isSupported = false;
|
||||
EXPECT_NO_FATAL_FAILURE(TestAccessors<AudioDualMonoMode>(
|
||||
stream.get(), &IStreamOut::getDualMonoMode, &IStreamOut::setDualMonoMode,
|
||||
std::vector<AudioDualMonoMode>(enum_range<AudioDualMonoMode>().begin(),
|
||||
enum_range<AudioDualMonoMode>().end()),
|
||||
{}, &isSupported));
|
||||
if (isSupported) atLeastOneSupports = true;
|
||||
}
|
||||
if (!atLeastOneSupports) {
|
||||
GTEST_SKIP() << "Audio dual mono mode is not supported";
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(AudioStreamOut, LatencyMode) {
|
||||
const auto ports = moduleConfig->getOutputMixPorts(false /*attachedOnly*/);
|
||||
if (ports.empty()) {
|
||||
GTEST_SKIP() << "No output mix ports";
|
||||
}
|
||||
bool atLeastOneSupports = false;
|
||||
for (const auto& port : ports) {
|
||||
const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, port);
|
||||
ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for output mix port";
|
||||
WithStream<IStreamOut> stream(portConfig.value());
|
||||
ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
|
||||
bool isSupported = false;
|
||||
std::vector<AudioLatencyMode> supportedModes;
|
||||
ndk::ScopedAStatus status = stream.get()->getRecommendedLatencyModes(&supportedModes);
|
||||
if (status.getExceptionCode() == EX_UNSUPPORTED_OPERATION) continue;
|
||||
if (!status.isOk()) {
|
||||
ADD_FAILURE() << "When latency modes are supported, getRecommendedLatencyModes "
|
||||
<< "must succeed on a non-closed stream, but it failed with " << status;
|
||||
continue;
|
||||
}
|
||||
std::set<AudioLatencyMode> unsupportedModes(enum_range<AudioLatencyMode>().begin(),
|
||||
enum_range<AudioLatencyMode>().end());
|
||||
for (const auto mode : supportedModes) {
|
||||
unsupportedModes.erase(mode);
|
||||
ndk::ScopedAStatus status = stream.get()->setLatencyMode(mode);
|
||||
if (status.getExceptionCode() == EX_UNSUPPORTED_OPERATION) {
|
||||
ADD_FAILURE() << "When latency modes are supported, both getRecommendedLatencyModes"
|
||||
<< " and setLatencyMode must be supported";
|
||||
}
|
||||
EXPECT_IS_OK(status) << "Setting of supported latency mode must succeed";
|
||||
}
|
||||
for (const auto mode : unsupportedModes) {
|
||||
EXPECT_STATUS(EX_ILLEGAL_ARGUMENT, stream.get()->setLatencyMode(mode));
|
||||
}
|
||||
if (isSupported) atLeastOneSupports = true;
|
||||
}
|
||||
if (!atLeastOneSupports) {
|
||||
GTEST_SKIP() << "Audio latency modes are not supported";
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(AudioStreamOut, PlaybackRate) {
|
||||
static const auto kStatuses = {EX_NONE, EX_UNSUPPORTED_OPERATION};
|
||||
const auto offloadMixPorts =
|
||||
moduleConfig->getOffloadMixPorts(true /*attachedOnly*/, false /*singlePort*/);
|
||||
if (offloadMixPorts.empty()) {
|
||||
GTEST_SKIP()
|
||||
<< "No mix port for compressed offload that could be routed to attached devices";
|
||||
}
|
||||
ndk::ScopedAStatus status;
|
||||
IModule::SupportedPlaybackRateFactors factors;
|
||||
EXPECT_STATUS(kStatuses, status = module.get()->getSupportedPlaybackRateFactors(&factors));
|
||||
if (status.getExceptionCode() == EX_UNSUPPORTED_OPERATION) {
|
||||
GTEST_SKIP() << "Audio playback rate configuration is not supported";
|
||||
}
|
||||
EXPECT_LE(factors.minSpeed, factors.maxSpeed);
|
||||
EXPECT_LE(factors.minPitch, factors.maxPitch);
|
||||
EXPECT_LE(factors.minSpeed, 1.0f);
|
||||
EXPECT_GE(factors.maxSpeed, 1.0f);
|
||||
EXPECT_LE(factors.minPitch, 1.0f);
|
||||
EXPECT_GE(factors.maxPitch, 1.0f);
|
||||
constexpr auto tsDefault = AudioPlaybackRate::TimestretchMode::DEFAULT;
|
||||
constexpr auto tsVoice = AudioPlaybackRate::TimestretchMode::VOICE;
|
||||
constexpr auto fbFail = AudioPlaybackRate::TimestretchFallbackMode::FAIL;
|
||||
constexpr auto fbMute = AudioPlaybackRate::TimestretchFallbackMode::MUTE;
|
||||
const std::vector<AudioPlaybackRate> validValues = {
|
||||
AudioPlaybackRate{1.0f, 1.0f, tsDefault, fbFail},
|
||||
AudioPlaybackRate{1.0f, 1.0f, tsDefault, fbMute},
|
||||
AudioPlaybackRate{factors.maxSpeed, factors.maxPitch, tsDefault, fbMute},
|
||||
AudioPlaybackRate{factors.minSpeed, factors.minPitch, tsDefault, fbMute},
|
||||
AudioPlaybackRate{1.0f, 1.0f, tsVoice, fbMute},
|
||||
AudioPlaybackRate{1.0f, 1.0f, tsVoice, fbFail},
|
||||
AudioPlaybackRate{factors.maxSpeed, factors.maxPitch, tsVoice, fbMute},
|
||||
AudioPlaybackRate{factors.minSpeed, factors.minPitch, tsVoice, fbMute},
|
||||
// Out of range speed / pitch values must not be rejected if the fallback mode is "mute"
|
||||
AudioPlaybackRate{factors.maxSpeed * 2, factors.maxPitch * 2, tsDefault, fbMute},
|
||||
AudioPlaybackRate{factors.minSpeed / 2, factors.minPitch / 2, tsDefault, fbMute},
|
||||
AudioPlaybackRate{factors.maxSpeed * 2, factors.maxPitch * 2, tsVoice, fbMute},
|
||||
AudioPlaybackRate{factors.minSpeed / 2, factors.minPitch / 2, tsVoice, fbMute},
|
||||
};
|
||||
const std::vector<AudioPlaybackRate> invalidValues = {
|
||||
AudioPlaybackRate{factors.maxSpeed, factors.maxPitch * 2, tsDefault, fbFail},
|
||||
AudioPlaybackRate{factors.maxSpeed * 2, factors.maxPitch, tsDefault, fbFail},
|
||||
AudioPlaybackRate{factors.minSpeed, factors.minPitch / 2, tsDefault, fbFail},
|
||||
AudioPlaybackRate{factors.minSpeed / 2, factors.minPitch, tsDefault, fbFail},
|
||||
AudioPlaybackRate{factors.maxSpeed, factors.maxPitch * 2, tsVoice, fbFail},
|
||||
AudioPlaybackRate{factors.maxSpeed * 2, factors.maxPitch, tsVoice, fbFail},
|
||||
AudioPlaybackRate{factors.minSpeed, factors.minPitch / 2, tsVoice, fbFail},
|
||||
AudioPlaybackRate{factors.minSpeed / 2, factors.minPitch, tsVoice, fbFail},
|
||||
AudioPlaybackRate{1.0f, 1.0f, tsDefault,
|
||||
AudioPlaybackRate::TimestretchFallbackMode::SYS_RESERVED_CUT_REPEAT},
|
||||
AudioPlaybackRate{1.0f, 1.0f, tsDefault,
|
||||
AudioPlaybackRate::TimestretchFallbackMode::SYS_RESERVED_DEFAULT},
|
||||
};
|
||||
bool atLeastOneSupports = false;
|
||||
for (const auto& port : offloadMixPorts) {
|
||||
const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, port);
|
||||
ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for output mix port";
|
||||
WithStream<IStreamOut> stream(portConfig.value());
|
||||
ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
|
||||
bool isSupported = false;
|
||||
EXPECT_NO_FATAL_FAILURE(TestAccessors<AudioPlaybackRate>(
|
||||
stream.get(), &IStreamOut::getPlaybackRateParameters,
|
||||
&IStreamOut::setPlaybackRateParameters, validValues, invalidValues, &isSupported));
|
||||
if (isSupported) atLeastOneSupports = true;
|
||||
}
|
||||
if (!atLeastOneSupports) {
|
||||
GTEST_SKIP() << "Audio playback rate configuration is not supported";
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(AudioStreamOut, SelectPresentation) {
|
||||
static const auto kStatuses = {EX_ILLEGAL_ARGUMENT, EX_UNSUPPORTED_OPERATION};
|
||||
const auto offloadMixPorts =
|
||||
moduleConfig->getOffloadMixPorts(true /*attachedOnly*/, false /*singlePort*/);
|
||||
if (offloadMixPorts.empty()) {
|
||||
GTEST_SKIP()
|
||||
<< "No mix port for compressed offload that could be routed to attached devices";
|
||||
}
|
||||
bool atLeastOneSupports = false;
|
||||
for (const auto& port : offloadMixPorts) {
|
||||
const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, port);
|
||||
ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for output mix port";
|
||||
WithStream<IStreamOut> stream(portConfig.value());
|
||||
ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
|
||||
ndk::ScopedAStatus status;
|
||||
EXPECT_STATUS(kStatuses, status = stream.get()->selectPresentation(0, 0));
|
||||
if (status.getExceptionCode() != EX_UNSUPPORTED_OPERATION) atLeastOneSupports = true;
|
||||
}
|
||||
if (!atLeastOneSupports) {
|
||||
GTEST_SKIP() << "Presentation selection is not supported";
|
||||
}
|
||||
}
|
||||
|
||||
class StreamLogicDefaultDriver : public StreamLogicDriver {
|
||||
public:
|
||||
explicit StreamLogicDefaultDriver(std::shared_ptr<StateSequence> commands)
|
||||
|
|
Loading…
Reference in a new issue