Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix!: Default audio output to system preferences #1563

Merged
merged 10 commits into from
Jul 6, 2023
10 changes: 6 additions & 4 deletions packages/audioplayers/example/integration_test/lib_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ void main() {

var audioContext = AudioContextConfig(
//ignore: avoid_redundant_argument_values
forceSpeaker: true,
route: AudioContextConfigRoute.system,
//ignore: avoid_redundant_argument_values
respectSilence: false,
).build();
Expand All @@ -170,7 +170,8 @@ void main() {
expect(player.state, PlayerState.completed);

audioContext = AudioContextConfig(
forceSpeaker: false,
//ignore: avoid_redundant_argument_values
route: AudioContextConfigRoute.system,
respectSilence: true,
).build();
await AudioPlayer.global.setAudioContext(audioContext);
Expand Down Expand Up @@ -203,7 +204,7 @@ void main() {

var audioContext = AudioContextConfig(
//ignore: avoid_redundant_argument_values
forceSpeaker: true,
route: AudioContextConfigRoute.system,
//ignore: avoid_redundant_argument_values
respectSilence: false,
).build();
Expand All @@ -223,7 +224,8 @@ void main() {
expect(player.state, PlayerState.stopped);

audioContext = AudioContextConfig(
forceSpeaker: false,
//ignore: avoid_redundant_argument_values
route: AudioContextConfigRoute.system,
respectSilence: true,
).build();
await AudioPlayer.global.setAudioContext(audioContext);
Expand Down
13 changes: 8 additions & 5 deletions packages/audioplayers/example/lib/tabs/audio_context.dart
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,14 @@ class AudioContextTabState extends State<AudioContextTab>
Widget _genericTab() {
return TabContent(
children: [
Cbx(
'Force Speaker',
value: audioContextConfig.forceSpeaker,
({value}) =>
updateConfig(audioContextConfig.copy(forceSpeaker: value)),
LabeledDropDown<AudioContextConfigRoute>(
label: 'Audio Route',
key: const Key('audioRoute'),
options: {for (var e in AudioContextConfigRoute.values) e: e.name},
selected: audioContextConfig.route,
onChange: (v) => updateConfig(
audioContextConfig.copy(route: v),
),
),
Cbx(
'Duck Audio',
Expand Down
14 changes: 8 additions & 6 deletions packages/audioplayers/test/global_audioplayers_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ void main() {
globalPlatform.clear();
});

/// Note that the [AudioContextIOS.category] has to be
/// [AVAudioSessionCategory.playback] to default the audio to the receiver
/// (e.g. built-in speakers or BT-device, if connected).
/// If using [AVAudioSessionCategory.playAndRecord] the audio will come from
/// the earpiece unless [AVAudioSessionOptions.defaultToSpeaker] is used.
test('set AudioContext', () async {
await globalScope.setAudioContext(const AudioContext());
final call = globalPlatform.popLastCall();
Expand All @@ -35,19 +40,16 @@ void main() {
call.value,
const AudioContext(
android: AudioContextAndroid(
isSpeakerphoneOn: true,
isSpeakerphoneOn: false,
audioMode: AndroidAudioMode.normal,
stayAwake: true,
stayAwake: false,
contentType: AndroidContentType.music,
usageType: AndroidUsageType.media,
audioFocus: AndroidAudioFocus.gain,
),
iOS: AudioContextIOS(
category: AVAudioSessionCategory.playback,
options: [
AVAudioSessionOptions.mixWithOthers,
AVAudioSessionOptions.defaultToSpeaker
],
options: [],
),
),
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ data class AudioContextAndroid(
) {
@SuppressLint("InlinedApi") // we are just using numerical constants
constructor() : this(
isSpeakerphoneOn = true,
isSpeakerphoneOn = false,
stayAwake = false,
contentType = CONTENT_TYPE_MUSIC,
usageType = USAGE_MEDIA,
Expand Down
4 changes: 2 additions & 2 deletions packages/audioplayers_darwin/ios/Classes/AudioContext.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ struct AudioContext {
let options: [AVAudioSession.CategoryOptions]

init() {
self.category = .playAndRecord
self.options = [.mixWithOthers, .defaultToSpeaker]
self.category = .playback
self.options = []
}

init(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,11 @@ class AudioContextAndroid {
final AndroidUsageType usageType;
final AndroidAudioFocus audioFocus;

// Note when changing the defaults, it should also be changed in native code.
const AudioContextAndroid({
this.isSpeakerphoneOn = true,
this.isSpeakerphoneOn = false,
this.audioMode = AndroidAudioMode.normal,
this.stayAwake = true,
this.stayAwake = false,
this.contentType = AndroidContentType.music,
this.usageType = AndroidUsageType.media,
this.audioFocus = AndroidAudioFocus.gain,
Expand Down Expand Up @@ -105,12 +106,10 @@ class AudioContextIOS {
final AVAudioSessionCategory category;
final List<AVAudioSessionOptions> options;

// Note when changing the defaults, it should also be changed in native code.
const AudioContextIOS({
this.category = AVAudioSessionCategory.playback,
this.options = const [
AVAudioSessionOptions.mixWithOthers,
AVAudioSessionOptions.defaultToSpeaker
],
this.options = const [],
});

AudioContextIOS copy({
Expand Down Expand Up @@ -394,34 +393,64 @@ enum AVAudioSessionCategory {
enum AVAudioSessionOptions {
/// An option that indicates whether audio from this session mixes with audio
/// from active sessions in other audio apps.
/// You can set this option explicitly only if the audio session category is
/// `playAndRecord`, `playback`, or `multiRoute`.
/// If you set the audio session category to `ambient`, the session
/// automatically sets this option. Likewise, setting the `duckOthers` or
/// `interruptSpokenAudioAndMixWithOthers` options also enables this option.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1616611-mixwithothers
mixWithOthers,

/// An option that reduces the volume of other audio sessions while audio from
/// this session plays.
/// You can set this option only if the audio session category is
/// `playAndRecord`, `playback`, or `multiRoute`.
/// Setting it implicitly sets the `mixWithOthers` option.
/// https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1616618-duckothers
duckOthers,

/// An option that determines whether to pause spoken audio content from other
/// sessions when your app plays its audio.
/// You can set this option only if the audio session category is
/// `playAndRecord`, `playback`, or `multiRoute`. Setting this option also
/// sets `mixWithOthers`.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1616534-interruptspokenaudioandmixwithot
interruptSpokenAudioAndMixWithOthers,

/// An option that determines whether Bluetooth hands-free devices appear as
/// available input routes.
/// You can set this option only if the audio session category is
/// `playAndRecord` or `record`.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1616518-allowbluetooth
allowBluetooth,

/// An option that determines whether you can stream audio from this session
/// to Bluetooth devices that support the Advanced Audio Distribution Profile
/// (A2DP).
/// The system automatically routes to A2DP ports if you configure an app’s
/// audio session to use the `ambient`, `soloAmbient`, or `playback`
/// categories.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1771735-allowbluetootha2dp
allowBluetoothA2DP,

/// An option that determines whether you can stream audio from this session
/// to AirPlay devices.
/// You can only explicitly set this option if the audio session’s category is
/// set to `playAndRecord`.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1771736-allowairplay
allowAirPlay,

/// An option that determines whether audio from the session defaults to the
/// built-in speaker instead of the receiver.
/// You can set this option only when using the `playAndRecord` category.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/1616462-defaulttospeaker
defaultToSpeaker,

/// An option that indicates whether the system interrupts the audio session
/// when it mutes the built-in microphone.
/// If your app uses an audio session category that supports input and output,
/// such as `playAndRecord`, you can set this option to disable the default
/// behavior and continue using the session.
/// See: https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions/3727255-overridemutedmicrophoneinterrupt
overrideMutedMicrophoneInterruption,
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,9 @@ import 'package:flutter/foundation.dart';
class AudioContextConfig {
/// Normally, audio played will respect the devices configured preferences.
/// However, if you want to bypass that and flag the system to use the
/// built-in speakers, you can set this flag.
///
/// On android, it will set `audioManager.isSpeakerphoneOn`.
///
/// On iOS, it will either:
///
/// * set the `.defaultToSpeaker` option OR
/// * call `overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)`
///
/// Note that, on iOS, this forces the category to be `.playAndRecord`, and
/// thus is forbidden when [respectSilence] is set.
final bool forceSpeaker;
/// built-in speakers or the earpiece, you can set this flag.
/// See [AudioContextConfigRoute] for more details on the options.
final AudioContextConfigRoute route;

/// This flag determines how your audio interacts with other audio playing on
/// the device.
Expand Down Expand Up @@ -74,20 +65,20 @@ class AudioContextConfig {
final bool stayAwake;

AudioContextConfig({
this.forceSpeaker = true,
this.route = AudioContextConfigRoute.system,
this.duckAudio = false,
this.respectSilence = false,
this.stayAwake = true,
this.stayAwake = false,
});

AudioContextConfig copy({
bool? forceSpeaker,
AudioContextConfigRoute? route,
bool? duckAudio,
bool? respectSilence,
bool? stayAwake,
}) {
return AudioContextConfig(
forceSpeaker: forceSpeaker ?? this.forceSpeaker,
route: route ?? this.route,
duckAudio: duckAudio ?? this.duckAudio,
respectSilence: respectSilence ?? this.respectSilence,
stayAwake: stayAwake ?? this.stayAwake,
Expand All @@ -103,11 +94,13 @@ class AudioContextConfig {

AudioContextAndroid buildAndroid() {
return AudioContextAndroid(
isSpeakerphoneOn: forceSpeaker,
isSpeakerphoneOn: route == AudioContextConfigRoute.speaker,
stayAwake: stayAwake,
usageType: respectSilence
? AndroidUsageType.notificationRingtone
: AndroidUsageType.media,
: (route == AudioContextConfigRoute.earpiece
? AndroidUsageType.voiceCommunication
: AndroidUsageType.media),
audioFocus: duckAudio
? AndroidAudioFocus.gainTransientMayDuck
: AndroidAudioFocus.gain,
Expand All @@ -121,19 +114,48 @@ class AudioContextConfig {
return AudioContextIOS(
category: respectSilence
? AVAudioSessionCategory.ambient
: AVAudioSessionCategory.playback,
options: [AVAudioSessionOptions.mixWithOthers] +
(duckAudio ? [AVAudioSessionOptions.duckOthers] : []) +
(forceSpeaker ? [AVAudioSessionOptions.defaultToSpeaker] : []),
: (route == AudioContextConfigRoute.speaker
? AVAudioSessionCategory.playAndRecord
: (route == AudioContextConfigRoute.earpiece
? AVAudioSessionCategory.playAndRecord
: AVAudioSessionCategory.playback)),
options: (duckAudio
? [AVAudioSessionOptions.duckOthers]
: <AVAudioSessionOptions>[]) +
(route == AudioContextConfigRoute.speaker
? [AVAudioSessionOptions.defaultToSpeaker]
: []),
);
}

void validateIOS() {
// Please create a custom [AudioContextIOS] if the generic flags cannot
// represent your needs.
if (respectSilence && forceSpeaker) {
if (respectSilence && route == AudioContextConfigRoute.speaker) {
throw 'On iOS it is impossible to set both respectSilence and '
'forceSpeaker';
}
}
}

enum AudioContextConfigRoute {
/// Use the system's default route. This can be e.g. the built-in speaker, the
/// earpiece, or a bluetooth device.
system,

/// On android, it will set `AndroidUsageType.voiceCommunication`.
///
/// On iOS, it will set `AVAudioSessionCategory.playAndRecord`.
earpiece,

/// On android, it will set `audioManager.isSpeakerphoneOn`.
///
/// On iOS, it will either:
///
/// * set the `.defaultToSpeaker` option OR
/// * call `overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)`
///
/// Note that, on iOS, this forces the category to be `.playAndRecord`, and
/// thus is forbidden when [AudioContextConfig.respectSilence] is set.
speaker,
}
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ void main() {
final call = popLastCall();
expect(call.method, 'setAudioContext');
expect(call.args, {
'isSpeakerphoneOn': true,
'isSpeakerphoneOn': false,
'audioMode': 0,
'stayAwake': true,
'stayAwake': false,
'contentType': 2,
'usageType': 1,
'audioFocus': 1,
Expand All @@ -85,13 +85,7 @@ void main() {
await platform.setGlobalAudioContext(const AudioContext());
final call = popLastCall();
expect(call.method, 'setAudioContext');
expect(call.args, {
'category': 'playback',
'options': [
'mixWithOthers',
'defaultToSpeaker',
]
});
expect(call.args, {'category': 'playback', 'options': []});
});
});

Expand Down