mirror of
https://github.com/danog/termux-api.git
synced 2024-11-26 20:04:42 +01:00
AudioAPI: a few updates (#184)
* AudioAPI: simplify code * AudioAPI: remove volume info Redundunt with the introduction of VolumeAPI * AudioAPI: remove PROPERTY_SUPPORT_AUDIO_SOURCE_UNPROCESSED It does not seem to have a point for us to get this property as it is not very clear what it actually reflects. We have not been getting PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND and PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND either. In fact, properties from getProperty() tend to be rather useless. PROPERTY_OUTPUT_FRAMES_PER_BUFFER and PROPERTY_OUTPUT_SAMPLE_RATE are kept only to show that, they should NOT be used to optimize any player in audio output, as they do not change as per the current sink, not like the values from the AudioTrack methods do. * AudioAPI: order output * AudioAPI: check info in PERFORMANCE_MODE_POWER_SAVING When setPerformanceMode was introduced in Android O, it was introduced with three possible modes, namely: PERFORMANCE_MODE_NONE (default) PERFORMANCE_MODE_LOW_LATENCY PERFORMANCE_MODE_POWER_SAVING While PERFORMANCE_MODE_NONE essentially causes tracks to be routed to the deep buffer mixer path on my phone (as PERFORMANCE_MODE_POWER_SAVING does), it may not be the case on every single device. Therefore, check info with track in PERFORMANCE_MODE_POWER_SAVING, compare it against that of the default PERFORMANCE_MODE_NONE, and print it out if they are not the same. Note: shouldEnablePowerSaving for PERFORMANCE_MODE_NONE always returns false for SAMPLE_RATE_UNSPECIFIED as of the current AudioTrack.java.
This commit is contained in:
parent
dc8a7157b5
commit
0da7e68c10
@ -16,76 +16,60 @@ public class AudioAPI {
|
||||
AudioManager am = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
final String SampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||
final String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
||||
final String AudioUnprocessed;
|
||||
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
|
||||
AudioUnprocessed = am.getProperty(AudioManager.PROPERTY_SUPPORT_AUDIO_SOURCE_UNPROCESSED);
|
||||
} else {
|
||||
AudioUnprocessed = null;
|
||||
}
|
||||
final int volume_level = am.getStreamVolume(AudioManager.STREAM_MUSIC);
|
||||
final int maxvolume_level = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
|
||||
final boolean bluetootha2dp = am.isBluetoothA2dpOn();
|
||||
final boolean wiredhs = am.isWiredHeadsetOn();
|
||||
|
||||
int _sr, _bs, _sr_ll, _bs_ll, _nosr;
|
||||
_sr = _bs = _sr_ll = _bs_ll = _nosr = 0;
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
int[] modes = {AudioTrack.PERFORMANCE_MODE_POWER_SAVING,
|
||||
AudioTrack.PERFORMANCE_MODE_LOW_LATENCY};
|
||||
for (int mode: modes) {
|
||||
AudioTrack at = new AudioTrack.Builder()
|
||||
final int sr, bs, sr_ll, bs_ll, sr_ps, bs_ps, nosr;
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
||||
nosr = 0;
|
||||
AudioTrack at;
|
||||
at = new AudioTrack.Builder()
|
||||
.setBufferSizeInBytes(4) // one 16bit 2ch frame
|
||||
.build();
|
||||
sr = at.getSampleRate();
|
||||
bs = at.getBufferSizeInFrames();
|
||||
at.release();
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
at = new AudioTrack.Builder()
|
||||
.setBufferSizeInBytes(4) // one 16bit 2ch frame
|
||||
.setPerformanceMode(mode)
|
||||
.setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY)
|
||||
.build();
|
||||
if (mode == AudioTrack.PERFORMANCE_MODE_POWER_SAVING) {
|
||||
_sr = at.getSampleRate();
|
||||
_bs = at.getBufferSizeInFrames();
|
||||
} else if (mode == AudioTrack.PERFORMANCE_MODE_LOW_LATENCY) {
|
||||
_sr_ll = at.getSampleRate();
|
||||
_bs_ll = at.getBufferSizeInFrames();
|
||||
}
|
||||
at.release();
|
||||
}
|
||||
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
||||
int[] flags = {0,AudioAttributes.FLAG_LOW_LATENCY};
|
||||
for (int flag: flags) {
|
||||
} else {
|
||||
AudioAttributes aa = new AudioAttributes.Builder()
|
||||
.setFlags(flag)
|
||||
.setFlags(AudioAttributes.FLAG_LOW_LATENCY)
|
||||
.build();
|
||||
AudioTrack at = new AudioTrack.Builder()
|
||||
at = new AudioTrack.Builder()
|
||||
.setAudioAttributes(aa)
|
||||
.setBufferSizeInBytes(4) // one 16bit 2ch frame
|
||||
.build();
|
||||
if (flag == 0) {
|
||||
_sr = at.getSampleRate();
|
||||
_bs = at.getBufferSizeInFrames();
|
||||
} else if (flag == AudioAttributes.FLAG_LOW_LATENCY) {
|
||||
_sr_ll = at.getSampleRate();
|
||||
_bs_ll = at.getBufferSizeInFrames();
|
||||
}
|
||||
at.release();
|
||||
}
|
||||
sr_ll = at.getSampleRate();
|
||||
bs_ll = at.getBufferSizeInFrames();
|
||||
at.release();
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
at = new AudioTrack.Builder()
|
||||
.setBufferSizeInBytes(4) // one 16bit 2ch frame
|
||||
.setPerformanceMode(AudioTrack.PERFORMANCE_MODE_POWER_SAVING)
|
||||
.build();
|
||||
sr_ps = at.getSampleRate();
|
||||
bs_ps = at.getBufferSizeInFrames();
|
||||
at.release();
|
||||
} else {
|
||||
sr_ps = sr;
|
||||
bs_ps = bs;
|
||||
}
|
||||
} else {
|
||||
_nosr = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
|
||||
sr = bs = sr_ll = bs_ll = sr_ps = bs_ps = 0;
|
||||
nosr = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
|
||||
}
|
||||
final int sr = _sr;
|
||||
final int bs = _bs;
|
||||
final int sr_ll = _sr_ll;
|
||||
final int bs_ll = _bs_ll;
|
||||
final int nosr = _nosr;
|
||||
|
||||
ResultReturner.returnData(apiReceiver, intent, new ResultReturner.ResultJsonWriter() {
|
||||
public void writeJson(JsonWriter out) throws Exception {
|
||||
out.beginObject();
|
||||
out.name("PROPERTY_OUTPUT_SAMPLE_RATE").value(SampleRate);
|
||||
out.name("PROPERTY_OUTPUT_FRAMES_PER_BUFFER").value(framesPerBuffer);
|
||||
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
|
||||
out.name("PROPERTY_SUPPORT_AUDIO_SOURCE_UNPROCESSED").value(AudioUnprocessed);
|
||||
}
|
||||
out.name("STREAM_MUSIC_VOLUME").value(volume_level);
|
||||
out.name("STREAM_MUSIC_MAXVOLUME").value(maxvolume_level);
|
||||
out.name("BLUETOOTH_A2DP_IS_ON").value(bluetootha2dp);
|
||||
out.name("WIREDHEADSET_IS_CONNECTED").value(wiredhs);
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
||||
out.name("AUDIOTRACK_SAMPLE_RATE").value(sr);
|
||||
out.name("AUDIOTRACK_BUFFER_SIZE_IN_FRAMES").value(bs);
|
||||
@ -93,9 +77,15 @@ public class AudioAPI {
|
||||
out.name("AUDIOTRACK_SAMPLE_RATE_LOW_LATENCY").value(sr_ll);
|
||||
out.name("AUDIOTRACK_BUFFER_SIZE_IN_FRAMES_LOW_LATENCY").value(bs_ll);
|
||||
}
|
||||
if (sr_ps != sr || bs_ps != bs) { // all or nothing
|
||||
out.name("AUDIOTRACK_SAMPLE_RATE_POWER_SAVING").value(sr_ps);
|
||||
out.name("AUDIOTRACK_BUFFER_SIZE_IN_FRAMES_POWER_SAVING").value(bs_ps);
|
||||
}
|
||||
} else {
|
||||
out.name("AUDIOTRACK_NATIVE_OUTPUT_SAMPLE_RATE").value(nosr);
|
||||
}
|
||||
out.name("BLUETOOTH_A2DP_IS_ON").value(bluetootha2dp);
|
||||
out.name("WIREDHEADSET_IS_CONNECTED").value(wiredhs);
|
||||
out.endObject();
|
||||
}
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user