Merge "Audio: Rearrange types in V7"

This commit is contained in:
Treehugger Robot 2020-11-20 19:39:55 +00:00 committed by Gerrit Code Review
commit c563bfc08e
12 changed files with 645 additions and 1035 deletions

View file

@ -315,7 +315,6 @@ interface IDevice {
* INVALID_STATE if the device was already closed
* or there are streams currently opened.
*/
@exit
close() generates (Result retval);
/**

View file

@ -43,13 +43,6 @@ interface IStream {
*/
getBufferSize() generates (uint64_t bufferSize);
/**
* Return the sampling rate in Hz.
*
* @return sampleRateHz sample rate in Hz.
*/
getSampleRate() generates (uint32_t sampleRateHz);
/**
* Return supported native sampling rates of the stream for a given format.
* A supported native sample rate is a sample rate that can be efficiently
@ -71,23 +64,6 @@ interface IStream {
getSupportedSampleRates(AudioFormat format)
generates (Result retval, vec<uint32_t> sampleRates);
/**
* Sets the sampling rate of the stream. Calling this method is equivalent
* to setting AUDIO_PARAMETER_STREAM_SAMPLING_RATE on the legacy HAL.
* Optional method. If implemented, only called on a stopped stream.
*
* @param sampleRateHz sample rate in Hz.
* @return retval operation completion status.
*/
setSampleRate(uint32_t sampleRateHz) generates (Result retval);
/**
* Return the channel mask of the stream.
*
* @return mask channel mask.
*/
getChannelMask() generates (bitfield<AudioChannelMask> mask);
/**
* Return supported channel masks of the stream. Calling this method is
* equivalent to getting AUDIO_PARAMETER_STREAM_SUP_CHANNELS on the legacy
@ -99,24 +75,7 @@ interface IStream {
* @return masks supported audio masks.
*/
getSupportedChannelMasks(AudioFormat format)
generates (Result retval, vec<bitfield<AudioChannelMask>> masks);
/**
* Sets the channel mask of the stream. Calling this method is equivalent to
* setting AUDIO_PARAMETER_STREAM_CHANNELS on the legacy HAL.
* Optional method
*
* @param format audio format.
* @return retval operation completion status.
*/
setChannelMask(bitfield<AudioChannelMask> mask) generates (Result retval);
/**
* Return the audio format of the stream.
*
* @return format audio format.
*/
getFormat() generates (AudioFormat format);
generates (Result retval, vec<vec<AudioChannelMask>> masks);
/**
* Return supported audio formats of the stream. Calling this method is
@ -130,25 +89,23 @@ interface IStream {
getSupportedFormats() generates (Result retval, vec<AudioFormat> formats);
/**
* Sets the audio format of the stream. Calling this method is equivalent to
* setting AUDIO_PARAMETER_STREAM_FORMAT on the legacy HAL.
* Optional method
* Retrieves basic stream configuration: sample rate, audio format,
* channel mask.
*
* @param format audio format.
* @return retval operation completion status.
* @return config basic stream configuration.
*/
setFormat(AudioFormat format) generates (Result retval);
getAudioProperties() generates (AudioBasicConfig config);
/**
* Convenience method for retrieving several stream parameters in
* one transaction.
* Sets stream parameters. Only sets parameters that are specified.
* See the description of AudioBasicConfig for the details.
*
* @return sampleRateHz sample rate in Hz.
* @return mask channel mask.
* @return format audio format.
* Optional method. If implemented, only called on a stopped stream.
*
* @param config basic stream configuration.
* @return retval operation completion status.
*/
getAudioProperties() generates (
uint32_t sampleRateHz, bitfield<AudioChannelMask> mask, AudioFormat format);
setAudioProperties(AudioBasicConfig config) generates (Result retval);
/**
* Applies audio effect to the stream.
@ -312,6 +269,5 @@ interface IStream {
* output stream interface.
* INVALID_STATE if the stream was already closed.
*/
@exit
close() generates (Result retval);
};

View file

@ -100,7 +100,7 @@ interface IStreamIn extends IStream {
*
* The driver operates on a dedicated thread. The client must ensure that
* the thread is given an appropriate priority and assigned to correct
* scheduler and cgroup. For this purpose, the method returns identifiers
* scheduler and cgroup. For this purpose, the method returns the identifier
* of the driver thread.
*
* @param frameSize the size of a single frame, in bytes.
@ -115,7 +115,9 @@ interface IStreamIn extends IStream {
* specified at the stream opening.
* @return statusMQ a message queue used for passing status from the driver
* using ReadStatus structures.
* @return threadInfo identifiers of the driver's dedicated thread.
* @return threadId identifier of the driver's dedicated thread; the caller
* may adjust the thread priority to match the priority
* of the thread that provides audio data.
*/
prepareForReading(uint32_t frameSize, uint32_t framesCount)
generates (
@ -123,7 +125,7 @@ interface IStreamIn extends IStream {
fmq_sync<ReadParameters> commandMQ,
fmq_sync<uint8_t> dataMQ,
fmq_sync<ReadStatus> statusMQ,
ThreadInfo threadInfo);
int32_t threadId);
/**
* Return the amount of input frames lost in the audio driver since the last

View file

@ -95,7 +95,7 @@ interface IStreamOut extends IStream {
*
* The driver operates on a dedicated thread. The client must ensure that
* the thread is given an appropriate priority and assigned to correct
* scheduler and cgroup. For this purpose, the method returns identifiers
* scheduler and cgroup. For this purpose, the method returns the identifier
* of the driver thread.
*
* @param frameSize the size of a single frame, in bytes.
@ -109,7 +109,9 @@ interface IStreamOut extends IStream {
* specified at the stream opening.
* @return statusMQ a message queue used for passing status from the driver
* using WriteStatus structures.
* @return threadInfo identifiers of the driver's dedicated thread.
* @return threadId identifier of the driver's dedicated thread; the caller
* may adjust the thread priority to match the priority
* of the thread that provides audio data.
*/
prepareForWriting(uint32_t frameSize, uint32_t framesCount)
generates (
@ -117,7 +119,7 @@ interface IStreamOut extends IStream {
fmq_sync<WriteCommand> commandMQ,
fmq_sync<uint8_t> dataMQ,
fmq_sync<WriteStatus> statusMQ,
ThreadInfo threadInfo);
int32_t threadId);
/**
* Return the number of audio frames written by the audio DSP to DAC since

View file

@ -6,6 +6,81 @@ package audio.policy.configuration.V7_0 {
method public java.util.List<java.lang.String> getItem();
}
public enum AudioChannelMask {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_1;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_10;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_11;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_12;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_13;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_14;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_15;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_16;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_17;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_18;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_19;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_20;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_21;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_22;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_23;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_24;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_3;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_4;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_5;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_6;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_7;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_8;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_9;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_2POINT0POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_2POINT1POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_3POINT0POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_3POINT1POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_5POINT1;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_6;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_FRONT_BACK;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_MONO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_STEREO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_CALL_MONO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT0POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT0POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT1POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT4;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_BACK;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_SIDE;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_6POINT1;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT2;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT4;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_HAPTIC_AB;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_MONO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_A;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_PENTA;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_BACK;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_SIDE;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB;
enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_SURROUND;
}
public enum AudioContentType {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_MOVIE;
enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_MUSIC;
enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_SONIFICATION;
enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_SPEECH;
enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_UNKNOWN;
}
public enum AudioDevice {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_AMBIENT;
@ -116,6 +191,7 @@ package audio.policy.configuration.V7_0 {
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APTX_HD;
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APTX_TWSP;
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_CELT;
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DEFAULT;
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DOLBY_TRUEHD;
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DSD;
enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DTS;
@ -164,18 +240,59 @@ package audio.policy.configuration.V7_0 {
method public void setVersion(audio.policy.configuration.V7_0.Version);
}
public enum AudioSource {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_CAMCORDER;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_DEFAULT;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_ECHO_REFERENCE;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_FM_TUNER;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_HOTWORD;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_MIC;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_REMOTE_SUBMIX;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_UNPROCESSED;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_CALL;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_COMMUNICATION;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_DOWNLINK;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_PERFORMANCE;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_RECOGNITION;
enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_UPLINK;
}
public enum AudioStreamType {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ACCESSIBILITY;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ALARM;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ASSISTANT;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_BLUETOOTH_SCO;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_DTMF;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ENFORCED_AUDIBLE;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_MUSIC;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_NOTIFICATION;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_PATCH;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_REROUTING;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_RING;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_SYSTEM;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_TTS;
enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_VOICE_CALL;
}
public enum AudioUsage {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ALARM;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ANNOUNCEMENT;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANCE_SONIFICATION;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANT;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_CALL_ASSISTANT;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_EMERGENCY;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_GAME;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_MEDIA;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_NOTIFICATION;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_SAFETY;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_UNKNOWN;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VEHICLE_STATUS;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VIRTUAL_SOURCE;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VOICE_COMMUNICATION;
enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
@ -234,7 +351,7 @@ package audio.policy.configuration.V7_0 {
public static class Gains.Gain {
ctor public Gains.Gain();
method public String getChannel_mask();
method public audio.policy.configuration.V7_0.AudioChannelMask getChannel_mask();
method public int getDefaultValueMB();
method public int getMaxRampMs();
method public int getMaxValueMB();
@ -244,7 +361,7 @@ package audio.policy.configuration.V7_0 {
method public String getName();
method public int getStepValueMB();
method public boolean getUseForVolume();
method public void setChannel_mask(String);
method public void setChannel_mask(audio.policy.configuration.V7_0.AudioChannelMask);
method public void setDefaultValueMB(int);
method public void setMaxRampMs(int);
method public void setMaxValueMB(int);
@ -327,14 +444,14 @@ package audio.policy.configuration.V7_0 {
public class Profile {
ctor public Profile();
method public String getChannelMasks();
method public java.util.List<audio.policy.configuration.V7_0.AudioChannelMask> getChannelMasks();
method public String getFormat();
method public String getName();
method public String getSamplingRates();
method public void setChannelMasks(String);
method public java.util.List<java.math.BigInteger> getSamplingRates();
method public void setChannelMasks(java.util.List<audio.policy.configuration.V7_0.AudioChannelMask>);
method public void setFormat(String);
method public void setName(String);
method public void setSamplingRates(String);
method public void setSamplingRates(java.util.List<java.math.BigInteger>);
}
public class Reference {
@ -365,24 +482,6 @@ package audio.policy.configuration.V7_0 {
method public void setType(audio.policy.configuration.V7_0.MixType);
}
public enum Stream {
method public String getRawName();
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_ACCESSIBILITY;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_ALARM;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_ASSISTANT;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_BLUETOOTH_SCO;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_DTMF;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_ENFORCED_AUDIBLE;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_MUSIC;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_NOTIFICATION;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_PATCH;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_REROUTING;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_RING;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_SYSTEM;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_TTS;
enum_constant public static final audio.policy.configuration.V7_0.Stream AUDIO_STREAM_VOICE_CALL;
}
public class SurroundFormats {
ctor public SurroundFormats();
method public java.util.List<audio.policy.configuration.V7_0.SurroundFormats.Format> getFormat();
@ -412,10 +511,10 @@ package audio.policy.configuration.V7_0 {
method public audio.policy.configuration.V7_0.DeviceCategory getDeviceCategory();
method public java.util.List<java.lang.String> getPoint();
method public String getRef();
method public audio.policy.configuration.V7_0.Stream getStream();
method public audio.policy.configuration.V7_0.AudioStreamType getStream();
method public void setDeviceCategory(audio.policy.configuration.V7_0.DeviceCategory);
method public void setRef(String);
method public void setStream(audio.policy.configuration.V7_0.Stream);
method public void setStream(audio.policy.configuration.V7_0.AudioStreamType);
}
public class Volumes {

View file

@ -13,7 +13,6 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- TODO: define a targetNamespace. Note that it will break retrocompatibility -->
<xs:schema version="2.0"
elementFormDefault="qualified"
attributeFormDefault="unqualified"
@ -27,7 +26,9 @@
<xs:simpleType name="halVersion">
<xs:annotation>
<xs:documentation xml:lang="en">
Version of the interface the hal implements.
Version of the interface the hal implements. Note that this
relates to legacy HAL API versions since HIDL APIs are versioned
using other mechanisms.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:decimal">
@ -154,7 +155,6 @@
<xs:element name="item" type="xs:token" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<!-- TODO: separate values by space for better xsd validations. -->
<xs:simpleType name="audioInOutFlags">
<xs:annotation>
<xs:documentation xml:lang="en">
@ -212,9 +212,6 @@
</xs:element>
</xs:sequence>
</xs:complexType>
<!-- Enum values of audio_device_t in audio.h
TODO: generate from hidl to avoid manual sync.
TODO: separate source and sink in the xml for better xsd validations. -->
<xs:simpleType name="audioDevice">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_DEVICE_NONE"/>
@ -252,7 +249,6 @@
<xs:enumeration value="AUDIO_DEVICE_OUT_DEFAULT"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_STUB"/>
<!-- Due to the xml format, IN types can not be a separated from OUT types -->
<xs:enumeration value="AUDIO_DEVICE_IN_COMMUNICATION"/>
<xs:enumeration value="AUDIO_DEVICE_IN_AMBIENT"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BUILTIN_MIC"/>
@ -298,10 +294,9 @@
<xs:simpleType name="extendableAudioDevice">
<xs:union memberTypes="audioDevice vendorExtension"/>
</xs:simpleType>
<!-- Enum values of audio_format_t in audio.h
TODO: generate from hidl to avoid manual sync. -->
<xs:simpleType name="audioFormat">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_FORMAT_DEFAULT" />
<xs:enumeration value="AUDIO_FORMAT_PCM_16_BIT" />
<xs:enumeration value="AUDIO_FORMAT_PCM_8_BIT"/>
<xs:enumeration value="AUDIO_FORMAT_PCM_32_BIT"/>
@ -382,9 +377,14 @@
<xs:simpleType name="extendableAudioFormat">
<xs:union memberTypes="audioFormat vendorExtension"/>
</xs:simpleType>
<!-- Enum values of audio::common::4_0::AudioUsage
TODO: generate from HIDL to avoid manual sync. -->
<xs:simpleType name="audioUsage">
<xs:annotation>
<xs:documentation xml:lang="en">
Audio usage specifies the intended use case for the sound being played.
Please consult frameworks/base/media/java/android/media/AudioAttributes.java
for the description of each value.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_USAGE_UNKNOWN" />
<xs:enumeration value="AUDIO_USAGE_MEDIA" />
@ -399,34 +399,119 @@
<xs:enumeration value="AUDIO_USAGE_GAME" />
<xs:enumeration value="AUDIO_USAGE_VIRTUAL_SOURCE" />
<xs:enumeration value="AUDIO_USAGE_ASSISTANT" />
<xs:enumeration value="AUDIO_USAGE_CALL_ASSISTANT" />
<xs:enumeration value="AUDIO_USAGE_EMERGENCY" />
<xs:enumeration value="AUDIO_USAGE_SAFETY" />
<xs:enumeration value="AUDIO_USAGE_VEHICLE_STATUS" />
<xs:enumeration value="AUDIO_USAGE_ANNOUNCEMENT" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioUsageList">
<xs:list itemType="audioUsage"/>
</xs:simpleType>
<!-- TODO: Change to a space separated list to xsd enforce correctness. -->
<xs:simpleType name="samplingRates">
<xs:restriction base="xs:string">
<xs:pattern value="[0-9]+(,[0-9]+)*"/>
</xs:restriction>
</xs:simpleType>
<!-- TODO: Change to a space separated list to xsd enforce correctness. -->
<xs:simpleType name="channelMask">
<xs:simpleType name="audioContentType">
<xs:annotation>
<xs:documentation xml:lang="en">
Comma (",") separated list of channel flags
from audio_channel_mask_t.
Audio content type expresses the general category of the content.
Please consult frameworks/base/media/java/android/media/AudioAttributes.java
for the description of each value.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:pattern value="[_A-Z][_A-Z0-9]*(,[_A-Z][_A-Z0-9]*)*"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_UNKNOWN"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_SPEECH"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_MUSIC"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_MOVIE"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_SONIFICATION"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="samplingRates">
<xs:list itemType="xs:nonNegativeInteger" />
</xs:simpleType>
<xs:simpleType name="audioChannelMask">
<xs:annotation>
<xs:documentation xml:lang="en">
Audio channel mask specifies presence of particular channels.
There are two representations:
- representation position (traditional discrete channel specification,
e.g. "left", "right");
- indexed (this is similar to "tracks" in audio mixing, channels
are represented using numbers).
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_CHANNEL_OUT_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD_SIDE"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_SURROUND"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_PENTA"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1_SIDE"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1POINT4"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_6POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_MONO_HAPTIC_A"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_HAPTIC_AB"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_STEREO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_FRONT_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_6"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_2POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_2POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_3POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_3POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_5POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_CALL_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_1"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_2"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_3"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_4"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_5"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_6"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_7"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_8"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_9"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_10"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_11"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_12"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_13"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_14"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_15"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_16"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_17"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_18"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_19"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_20"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_21"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_22"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_23"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_24"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="channelMasks">
<xs:list itemType="audioChannelMask" />
</xs:simpleType>
<xs:complexType name="profile">
<xs:attribute name="name" type="xs:token" use="optional"/>
<xs:attribute name="format" type="extendableAudioFormat" use="optional"/>
<xs:attribute name="samplingRates" type="samplingRates" use="optional"/>
<xs:attribute name="channelMasks" type="channelMask" use="optional"/>
<xs:attribute name="channelMasks" type="channelMasks" use="optional"/>
</xs:complexType>
<xs:simpleType name="gainMode">
<xs:restriction base="xs:string">
@ -441,7 +526,7 @@
<xs:complexType>
<xs:attribute name="name" type="xs:token" use="required"/>
<xs:attribute name="mode" type="gainMode" use="required"/>
<xs:attribute name="channel_mask" type="channelMask" use="optional"/>
<xs:attribute name="channel_mask" type="audioChannelMask" use="optional"/>
<xs:attribute name="minValueMB" type="xs:int" use="optional"/>
<xs:attribute name="maxValueMB" type="xs:int" use="optional"/>
<xs:attribute name="defaultValueMB" type="xs:int" use="optional"/>
@ -537,9 +622,14 @@
<xs:pattern value="([0-9]{1,2}|100),-?[0-9]+"/>
</xs:restriction>
</xs:simpleType>
<!-- Enum values of audio_stream_type_t in audio-base.h
TODO: generate from hidl to avoid manual sync. -->
<xs:simpleType name="stream">
<xs:simpleType name="audioStreamType">
<xs:annotation>
<xs:documentation xml:lang="en">
Audio stream type describing the intended use case of a stream.
Please consult frameworks/base/media/java/android/media/AudioSystem.java
for the description of each value.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_STREAM_VOICE_CALL"/>
<xs:enumeration value="AUDIO_STREAM_SYSTEM"/>
@ -557,8 +647,32 @@
<xs:enumeration value="AUDIO_STREAM_PATCH"/>
</xs:restriction>
</xs:simpleType>
<!-- Enum values of device_category from Volume.h.
TODO: generate from hidl to avoid manual sync. -->
<xs:simpleType name="audioSource">
<xs:annotation>
<xs:documentation xml:lang="en">
An audio source defines the intended use case for the sound being recorded.
Please consult frameworks/base/media/java/android/media/MediaRecorder.java
for the description of each value.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_SOURCE_DEFAULT"/>
<xs:enumeration value="AUDIO_SOURCE_MIC"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_UPLINK"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_DOWNLINK"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_CALL"/>
<xs:enumeration value="AUDIO_SOURCE_CAMCORDER"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_RECOGNITION"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_COMMUNICATION"/>
<xs:enumeration value="AUDIO_SOURCE_REMOTE_SUBMIX"/>
<xs:enumeration value="AUDIO_SOURCE_UNPROCESSED"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_PERFORMANCE"/>
<xs:enumeration value="AUDIO_SOURCE_ECHO_REFERENCE"/>
<xs:enumeration value="AUDIO_SOURCE_FM_TUNER"/>
<xs:enumeration value="AUDIO_SOURCE_HOTWORD"/>
</xs:restriction>
</xs:simpleType>
<!-- Enum values of device_category from Volume.h. -->
<xs:simpleType name="deviceCategory">
<xs:restriction base="xs:string">
<xs:enumeration value="DEVICE_CATEGORY_HEADSET"/>
@ -591,7 +705,7 @@
<xs:sequence>
<xs:element name="point" type="volumePoint" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="stream" type="stream"/>
<xs:attribute name="stream" type="audioStreamType"/>
<xs:attribute name="deviceCategory" type="deviceCategory"/>
<xs:attribute name="ref" type="xs:token" use="optional"/>
</xs:complexType>

View file

@ -0,0 +1,159 @@
#!/bin/bash
# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script is used to update audio policy configuration files
# to comply with the updated audio_policy_configuration.xsd from V7.0.
#
# The main difference is the separator used in lists for attributes.
# Since the XML Schema Definition standard only allows space to be
# used as a separator (see https://www.w3.org/TR/xmlschema11-2/#list-datatypes)
# the previous versions used a regular expression to validate lists
# in attribute values. E.g. the channel masks were validated using
# the following regexp: [_A-Z][_A-Z0-9]*(,[_A-Z][_A-Z0-9]*)*
# This has an obvious drawback of missing typos in the config file.
#
# The V7.0 has shifted to defining most of the frequently changed
# types in the XSD schema only. This allows for verifying all the values
# in lists, but in order to comply with XML Schema requirements
# list elements must be separated by space.
#
# Since the APM config files typically use include directives,
# the script must be pointed to the main APM config file and will
# take care all the included files automatically.
# If the included file is a shared version from 'frameworks/av',
# instead of updating it the script checks if there is a newer
# version with the corresponding name suffix (e.g.
# 'a2dp_audio_policy_configuration_7_0.xml') and updates the include
# path instead.
set -euo pipefail
if (echo "$@" | grep -qe -h); then
echo "This script will update Audio Policy Manager config file"
echo "to the format required by V7.0 XSD schema from a previous"
echo "version."
echo
echo "USAGE: $0 [APM_XML_FILE] [OLD_VERSION]"
echo " APM_XML_FILE specifies the path to audio_policy_configuration.xml"
echo " relative to Android repository root"
echo " OLD_VERSION specifies the version of schema currently used"
echo
echo "Example: $0 device/generic/goldfish/audio/policy/audio_policy_configuration.xml 6.0"
exit
fi
readonly HAL_DIRECTORY=hardware/interfaces/audio
readonly SHARED_CONFIGS_DIRECTORY=frameworks/av/services/audiopolicy/config
readonly OLD_VERSION=${2:-$(ls ${ANDROID_BUILD_TOP}/${HAL_DIRECTORY} | grep -E '[0-9]+\.[0-9]+' |
sort -n | tail -n1)}
readonly NEW_VERSION=7.0
readonly NEW_VERSION_UNDERSCORE=7_0
readonly SOURCE_CONFIG=${ANDROID_BUILD_TOP}/$1
# First, validate the input using the schema of the current version
echo Validating the source against the $OLD_VERSION schema
xmllint --noout --xinclude \
--nofixup-base-uris --path "$ANDROID_BUILD_TOP/$SHARED_CONFIGS_DIRECTORY" \
--schema ${ANDROID_BUILD_TOP}/${HAL_DIRECTORY}/${OLD_VERSION}/config/audio_policy_configuration.xsd \
${SOURCE_CONFIG}
if [ $? -ne 0 ]; then
echo
echo "Config file fails validation for the specified version $OLD_VERSION--unsafe to update"
exit 1
fi
# Find all the source files recursively
SOURCE_FILES=${SOURCE_CONFIG}
SHARED_FILES=
findIncludes() {
local FILES_TO_CHECK=
for F in $1; do
local FOUND_INCLUDES=$(grep -Po '<xi:include href="\K[^"]+(?="\/>)' ${F})
for I in ${FOUND_INCLUDES}; do
SOURCE_FULL_PATH=$(dirname ${F})/${I}
SHARED_FULL_PATH=${ANDROID_BUILD_TOP}/${SHARED_CONFIGS_DIRECTORY}/${I}
if [ -f "$SOURCE_FULL_PATH" ]; then
# Device-specific file.
SOURCE_FILES+=$'\n'${SOURCE_FULL_PATH}
FILES_TO_CHECK+=$'\n'${SOURCE_FULL_PATH}
elif [ -f "$SHARED_FULL_PATH" ]; then
# Shared file from the frameworks repo.
SHARED_FILES+=$'\n'${I}
FILES_TO_CHECK+=$'\n'${SHARED_FULL_PATH}
else
echo
echo "Include file not found: $I"
exit 1
fi
done
done
if [ "$FILES_TO_CHECK" ]; then
findIncludes "$FILES_TO_CHECK"
fi
}
findIncludes ${SOURCE_FILES}
echo "Will update $1 and included device-specific files in place."
echo "Will update paths to shared included files."
echo "Press Ctrl-C to cancel, Enter to continue"
read
updateFile() {
FILE=$1
ATTR=$2
SEPARATOR=$3
SRC_LINES=$(grep -nPo "$ATTR=\"[^\"]+\"" ${FILE} || true)
for S in $SRC_LINES; do
# Prepare instruction for 'sed' for in-place editing of specified line
R=$(echo ${S} | sed -e 's/^[0-9]\+:/\//' | sed -e "s/$SEPARATOR/ /g")
S=$(echo ${S} | sed -e 's/:/s\//')${R}/
echo ${S} | sed -i -f - ${FILE}
done
}
for F in $SOURCE_FILES; do
updateFile ${F} "channelMasks" ","
updateFile ${F} "samplingRates" ","
done;
updateIncludes() {
FILE=$1
for I in $SHARED_FILES; do
NEW_VERSION_I=${I%.*}_${NEW_VERSION_UNDERSCORE}.${I##*.}
if [ -e "$ANDROID_BUILD_TOP/$SHARED_CONFIGS_DIRECTORY/$NEW_VERSION_I" ]; then
echo "s/$I/$NEW_VERSION_I/g" | sed -i -f - ${FILE}
fi
done
}
for F in $SOURCE_FILES; do
updateIncludes ${F}
done
# Validate the results against the new schema
echo Validating the result against the $NEW_VERSION schema
xmllint --noout --xinclude \
--nofixup-base-uris --path "$ANDROID_BUILD_TOP/$SHARED_CONFIGS_DIRECTORY" \
--schema ${ANDROID_BUILD_TOP}/${HAL_DIRECTORY}/${NEW_VERSION}/config/audio_policy_configuration.xsd \
${SOURCE_CONFIG}
if [ $? -ne 0 ]; then
echo
echo "Config file fails validation for the specified version $NEW_VERSION--please check the changes"
exit 1
fi
echo
echo "Please check the diff and update path to APM shared files in the device makefile!"

View file

@ -355,3 +355,58 @@ struct PlaybackRate {
*/
TimestretchFallbackMode fallbackMode;
};
/**
* The audio output flags serve two purposes:
*
* - when an output stream is created they indicate its attributes;
*
* - when present in an output profile descriptor listed for a particular audio
* hardware module, they indicate that an output stream can be opened that
* supports the attributes indicated by the flags.
*/
@export(name="audio_output_flags_t", value_prefix="AUDIO_OUTPUT_FLAG_")
enum AudioOutputFlag : int32_t {
NONE = 0x0, // no attributes
DIRECT = 0x1, // this output directly connects a track
// to one output stream: no software mixer
PRIMARY = 0x2, // this output is the primary output of the device. It is
// unique and must be present. It is opened by default and
// receives routing, audio mode and volume controls related
// to voice calls.
FAST = 0x4, // output supports "fast tracks", defined elsewhere
DEEP_BUFFER = 0x8, // use deep audio buffers
COMPRESS_OFFLOAD = 0x10, // offload playback of compressed streams to
// hardware codec
NON_BLOCKING = 0x20, // use non-blocking write
HW_AV_SYNC = 0x40, // output uses a hardware A/V sync
TTS = 0x80, // output for streams transmitted through speaker at a
// sample rate high enough to accommodate lower-range
// ultrasonic p/b
RAW = 0x100, // minimize signal processing
SYNC = 0x200, // synchronize I/O streams
IEC958_NONAUDIO = 0x400, // Audio stream contains compressed audio in SPDIF
// data bursts, not PCM.
DIRECT_PCM = 0x2000, // Audio stream containing PCM data that needs
// to pass through compress path for DSP post proc.
MMAP_NOIRQ = 0x4000, // output operates in MMAP no IRQ mode.
VOIP_RX = 0x8000, // preferred output for VoIP calls.
/** preferred output for call music */
INCALL_MUSIC = 0x10000,
};
/**
* The audio input flags are analogous to audio output flags.
*/
@export(name="audio_input_flags_t", value_prefix="AUDIO_INPUT_FLAG_")
enum AudioInputFlag : int32_t {
NONE = 0x0, // no attributes
FAST = 0x1, // prefer an input that supports "fast tracks"
HW_HOTWORD = 0x2, // prefer an input that captures from hw hotword source
RAW = 0x4, // minimize signal processing
SYNC = 0x8, // synchronize I/O streams
MMAP_NOIRQ = 0x10, // input operates in MMAP no IRQ mode.
VOIP_TX = 0x20, // preferred input for VoIP calls.
HW_AV_SYNC = 0x40, // input connected to an output that uses a hardware A/V sync
DIRECT = 0x80, // for acquiring encoded streams
};

File diff suppressed because it is too large Load diff

View file

@ -56,7 +56,6 @@ interface IEffect {
*
* @return retval operation completion status.
*/
@callflow(next={"prepareForProcessing"})
enable() generates (Result retval);
/**
@ -64,7 +63,6 @@ interface IEffect {
*
* @return retval operation completion status.
*/
@callflow(next={"close"})
disable() generates (Result retval);
/**
@ -78,7 +76,7 @@ interface IEffect {
* @param device output device specification.
* @return retval operation completion status.
*/
setDevice(bitfield<AudioDevice> device) generates (Result retval);
setDevice(DeviceAddress device) generates (Result retval);
/**
* Set and get volume. Used by audio framework to delegate volume control to
@ -147,7 +145,7 @@ interface IEffect {
* @param device input device specification.
* @return retval operation completion status.
*/
setInputDevice(bitfield<AudioDevice> device) generates (Result retval);
setInputDevice(DeviceAddress device) generates (Result retval);
/**
* Read audio parameters configurations for input and output buffers.
@ -251,7 +249,6 @@ interface IEffect {
* the queue.
* @return statusMQ a message queue used for passing status from the effect.
*/
@callflow(next={"setProcessBuffers"})
prepareForProcessing() generates (Result retval, fmq_sync<Result> statusMQ);
/**
@ -416,6 +413,5 @@ interface IEffect {
* @return retval OK in case the success.
* INVALID_STATE if the effect was already closed.
*/
@exit
close() generates (Result retval);
};

View file

@ -48,7 +48,7 @@ interface IVirtualizerEffect extends IEffect {
struct SpeakerAngle {
/** Speaker channel mask */
bitfield<AudioChannelMask> mask;
vec<AudioChannelMask> mask;
// all angles are expressed in degrees and
// are relative to the listener.
int16_t azimuth; // 0 is the direction the listener faces
@ -61,17 +61,17 @@ interface IVirtualizerEffect extends IEffect {
* Retrieves virtual speaker angles for the given channel mask on the
* specified device.
*/
getVirtualSpeakerAngles(bitfield<AudioChannelMask> mask, AudioDevice device)
getVirtualSpeakerAngles(vec<AudioChannelMask> mask, DeviceAddress device)
generates (Result retval, vec<SpeakerAngle> speakerAngles);
/**
* Forces the virtualizer effect for the given output device.
*/
forceVirtualizationMode(AudioDevice device) generates (Result retval);
forceVirtualizationMode(DeviceAddress device) generates (Result retval);
/**
* Returns audio device reflecting the current virtualization mode,
* AUDIO_DEVICE_NONE when not virtualizing.
* Device type can be empty when not virtualizing.
*/
getVirtualizationMode() generates (Result retval, AudioDevice device);
getVirtualizationMode() generates (Result retval, DeviceAddress device);
};

View file

@ -257,7 +257,7 @@ enum EffectConfigParameters : int32_t {
struct EffectBufferConfig {
AudioBuffer buffer;
uint32_t samplingRateHz;
bitfield<AudioChannelMask> channels;
AudioChannelMask channels;
AudioFormat format;
EffectBufferAccess accessMode;
bitfield<EffectConfigParameters> mask;
@ -276,8 +276,8 @@ enum EffectFeature : int32_t {
};
struct EffectAuxChannelsConfig {
bitfield<AudioChannelMask> mainChannels; // channel mask for main channels
bitfield<AudioChannelMask> auxChannels; // channel mask for auxiliary channels
vec<AudioChannelMask> mainChannels; // channel mask for main channels
vec<AudioChannelMask> auxChannels; // channel mask for auxiliary channels
};
struct EffectOffloadParameter {