From 737aa94f7edb076d622c34e498b90f17c9959e9c Mon Sep 17 00:00:00 2001 From: Gustl22 Date: Wed, 23 Aug 2023 01:34:12 +0200 Subject: [PATCH] refactor: Lint Swift (#1613) # Description Closes #1587 --- .github/workflows/test.yml | 7 + .swift-format | 4 + contributing.md | 6 +- .../audioplayers/example/ios/Podfile.lock | 14 +- .../example/ios/Runner/AppDelegate.swift | 2 +- .../audioplayers/example/macos/Podfile.lock | 2 +- .../SwiftAudioplayersDarwinPlugin.swift | 739 ++++++++++-------- .../darwin/Classes/Utils.swift | 52 +- .../darwin/Classes/WrappedMediaPlayer.swift | 444 +++++------ .../ios/Classes/AudioContext.swift | 199 ++--- .../macos/Classes/AudioContext.swift | 16 +- 11 files changed, 792 insertions(+), 693 deletions(-) create mode 100644 .swift-format diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3cd4101e3..ad0d15ce0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -108,6 +108,13 @@ jobs: DEFAULT_BRANCH: main VALIDATE_KOTLIN_ANDROID: true VALIDATE_CLANG_FORMAT: true + - name: Lint Swift + # TODO: check if swift-format can be integrated in super-linter, as soon as Alpine is supported + # https://github.com/apple/swift-docker/issues/231 + # https://github.com/super-linter/super-linter/pull/4568 + run: | + docker run --rm --workdir=/work --volume=$PWD:/work mtgto/swift-format:5.8 \ + lint --parallel --strict --recursive packages/audioplayers_darwin web: runs-on: ubuntu-latest diff --git a/.swift-format b/.swift-format new file mode 100644 index 000000000..5fccbe9ca --- /dev/null +++ b/.swift-format @@ -0,0 +1,4 @@ +{ + "_comment": "details can be found at: https://github.com/apple/swift-format/blob/main/Documentation/Configuration.md", + "version": 1 +} diff --git a/contributing.md b/contributing.md index 011f1af5a..eb4aa6f37 100644 --- a/contributing.md +++ b/contributing.md @@ -136,7 +136,11 @@ Once your feature got approved to start developing, feel free to send your PRs! * Start your PR title with a [conventional commit](https://www.conventionalcommits.org) type (feat:, fix: etc). * Your build must pass. Please make sure everything is green! - * Follow guidelines. For the Dart side, follow [Flame's official style guide](https://github.com/flame-engine/flame/blob/main/doc/development/style_guide.md). We don't have a code analyzer for the native side (yet!), but please follow the code around you to make it properly formatted and linted. There is nothing worse than badly formatted code! + * Follow guidelines. For the Dart side, follow [Flame's official style guide](https://github.com/flame-engine/flame/blob/main/doc/development/style_guide.md). + We also provide code linting and formatting for the native side, where we take the [Flutter's formatting](https://github.com/flutter/packages/blob/main/script/tool/lib/src/format_command.dart) as reference: + * C/C++: [Chromium coding style](https://chromium.googlesource.com/chromium/src/+/refs/heads/main/styleguide/c++/c++.md) via [clang-format](https://clang.llvm.org/docs/ClangFormatStyleOptions.html), available for [CLion](https://www.jetbrains.com/help/clion/clangformat-as-alternative-formatter.html) and [VSCode](https://code.visualstudio.com/docs/cpp/cpp-ide#_code-formatting) + * Kotlin: [Kotlin style guide](https://developer.android.com/kotlin/style-guide) via [ktlint](https://github.com/pinterest/ktlint) and [EditorConfig](https://editorconfig.org/), available for [IntelliJ](https://www.jetbrains.com/help/idea/editorconfig.html) and [VSCode](https://marketplace.visualstudio.com/items?itemName=EditorConfig.EditorConfig) + * Swift: [Google Swift Style Guide](https://google.github.io/swift/) via [swift-format](https://github.com/apple/swift-format), available for [VSCode](https://marketplace.visualstudio.com/items?itemName=vknabel.vscode-apple-swift-format) or CLI with [native installation](https://github.com/apple/swift-format#getting-swift-format) or [Docker](https://github.com/mtgto/docker-swift-format/tree/main) * Write clean, beautiful and easy to understand code, with comments if necessary and docs if applicable. * Update our README/getting started/feature parity table/any other docs accordingly to your change, making it clear which platforms are supported. * Try to support all platforms where it makes sense. This is a hard thing to ask, and we understand and we will merge PRs that only work on one platform as well. But if you have the time, please help us with feature parity. diff --git a/packages/audioplayers/example/ios/Podfile.lock b/packages/audioplayers/example/ios/Podfile.lock index 3c5db9d18..cde62747d 100644 --- a/packages/audioplayers/example/ios/Podfile.lock +++ b/packages/audioplayers/example/ios/Podfile.lock @@ -41,10 +41,10 @@ PODS: - path_provider_foundation (0.0.1): - Flutter - FlutterMacOS - - SDWebImage (5.13.4): - - SDWebImage/Core (= 5.13.4) - - SDWebImage/Core (5.13.4) - - SwiftyGif (5.4.3) + - SDWebImage (5.17.0): + - SDWebImage/Core (= 5.17.0) + - SDWebImage/Core (5.17.0) + - SwiftyGif (5.4.4) DEPENDENCIES: - audioplayers_darwin (from `.symlinks/plugins/audioplayers_darwin/ios`) @@ -79,9 +79,9 @@ SPEC CHECKSUMS: file_picker: ce3938a0df3cc1ef404671531facef740d03f920 Flutter: f04841e97a9d0b0a8025694d0796dd46242b2854 integration_test: 13825b8a9334a850581300559b8839134b124670 - path_provider_foundation: c68054786f1b4f3343858c1e1d0caaded73f0be9 - SDWebImage: e5cc87bf736e60f49592f307bdf9e157189298a3 - SwiftyGif: 6c3eafd0ce693cad58bb63d2b2fb9bacb8552780 + path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943 + SDWebImage: 750adf017a315a280c60fde706ab1e552a3ae4e9 + SwiftyGif: 93a1cc87bf3a51916001cf8f3d63835fb64c819f PODFILE CHECKSUM: ef19549a9bc3046e7bb7d2fab4d021637c0c58a3 diff --git a/packages/audioplayers/example/ios/Runner/AppDelegate.swift b/packages/audioplayers/example/ios/Runner/AppDelegate.swift index 70693e4a8..9074fee92 100644 --- a/packages/audioplayers/example/ios/Runner/AppDelegate.swift +++ b/packages/audioplayers/example/ios/Runner/AppDelegate.swift @@ -1,5 +1,5 @@ -import UIKit import Flutter +import UIKit @UIApplicationMain @objc class AppDelegate: FlutterAppDelegate { diff --git a/packages/audioplayers/example/macos/Podfile.lock b/packages/audioplayers/example/macos/Podfile.lock index 0945af2c7..3b27abfe6 100644 --- a/packages/audioplayers/example/macos/Podfile.lock +++ b/packages/audioplayers/example/macos/Podfile.lock @@ -22,7 +22,7 @@ EXTERNAL SOURCES: SPEC CHECKSUMS: audioplayers_darwin: dcad41de4fbd0099cb3749f7ab3b0cb8f70b810c FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24 - path_provider_foundation: c68054786f1b4f3343858c1e1d0caaded73f0be9 + path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943 PODFILE CHECKSUM: 353c8bcc5d5b0994e508d035b5431cfe18c1dea7 diff --git a/packages/audioplayers_darwin/darwin/Classes/SwiftAudioplayersDarwinPlugin.swift b/packages/audioplayers_darwin/darwin/Classes/SwiftAudioplayersDarwinPlugin.swift index 42e930dc7..6f3c6aa86 100644 --- a/packages/audioplayers_darwin/darwin/Classes/SwiftAudioplayersDarwinPlugin.swift +++ b/packages/audioplayers_darwin/darwin/Classes/SwiftAudioplayersDarwinPlugin.swift @@ -1,388 +1,465 @@ -import AVKit import AVFoundation +import AVKit #if os(iOS) -import Flutter -import UIKit -import MediaPlayer + import Flutter + import UIKit + import MediaPlayer #else -import FlutterMacOS -import AVFAudio + import FlutterMacOS + import AVFAudio #endif -let CHANNEL_NAME = "xyz.luan/audioplayers" -let GLOBAL_CHANNEL_NAME = "xyz.luan/audioplayers.global" +let channelName = "xyz.luan/audioplayers" +let globalChannelName = "xyz.luan/audioplayers.global" public class SwiftAudioplayersDarwinPlugin: NSObject, FlutterPlugin { - var registrar: FlutterPluginRegistrar - var binaryMessenger: FlutterBinaryMessenger - var methods: FlutterMethodChannel - var globalMethods: FlutterMethodChannel - var globalEvents: GlobalAudioPlayersStreamHandler - - var globalContext = AudioContext() - var players = [String: WrappedMediaPlayer]() - - init(registrar: FlutterPluginRegistrar, - binaryMessenger: FlutterBinaryMessenger, - methodChannel: FlutterMethodChannel, - globalMethodChannel: FlutterMethodChannel, - globalEventChannel: FlutterEventChannel) { - self.registrar = registrar - self.binaryMessenger = binaryMessenger - self.methods = methodChannel - self.globalMethods = globalMethodChannel - self.globalEvents = GlobalAudioPlayersStreamHandler() - - do { - try globalContext.apply() - } catch { - // ignore error on initialization - } - - super.init() - - self.globalMethods.setMethodCallHandler(self.handleGlobalMethodCall) - globalEventChannel.setStreamHandler(self.globalEvents); + var registrar: FlutterPluginRegistrar + var binaryMessenger: FlutterBinaryMessenger + var methods: FlutterMethodChannel + var globalMethods: FlutterMethodChannel + var globalEvents: GlobalAudioPlayersStreamHandler + + var globalContext = AudioContext() + var players = [String: WrappedMediaPlayer]() + + init( + registrar: FlutterPluginRegistrar, + binaryMessenger: FlutterBinaryMessenger, + methodChannel: FlutterMethodChannel, + globalMethodChannel: FlutterMethodChannel, + globalEventChannel: FlutterEventChannel + ) { + self.registrar = registrar + self.binaryMessenger = binaryMessenger + self.methods = methodChannel + self.globalMethods = globalMethodChannel + self.globalEvents = GlobalAudioPlayersStreamHandler() + + do { + try globalContext.apply() + } catch { + // ignore error on initialization } - public static func register(with registrar: FlutterPluginRegistrar) { - // apparently there is a bug in Flutter causing some inconsistency between Flutter and FlutterMacOS - // See: https://github.com/flutter/flutter/issues/118103 - #if os(iOS) - let binaryMessenger = registrar.messenger() - #else - let binaryMessenger = registrar.messenger - #endif - - let methods = FlutterMethodChannel(name: CHANNEL_NAME, binaryMessenger: binaryMessenger) - let globalMethods = FlutterMethodChannel(name: GLOBAL_CHANNEL_NAME, binaryMessenger: binaryMessenger) - let globalEvents = FlutterEventChannel(name: GLOBAL_CHANNEL_NAME + "/events", binaryMessenger: binaryMessenger) - - let instance = SwiftAudioplayersDarwinPlugin( - registrar: registrar, - binaryMessenger: binaryMessenger, - methodChannel: methods, - globalMethodChannel: globalMethods, - globalEventChannel: globalEvents) - registrar.addMethodCallDelegate(instance, channel: methods) + super.init() + + self.globalMethods.setMethodCallHandler(self.handleGlobalMethodCall) + globalEventChannel.setStreamHandler(self.globalEvents) + } + + public static func register(with registrar: FlutterPluginRegistrar) { + // apparently there is a bug in Flutter causing some inconsistency between Flutter and FlutterMacOS + // See: https://github.com/flutter/flutter/issues/118103 + #if os(iOS) + let binaryMessenger = registrar.messenger() + #else + let binaryMessenger = registrar.messenger + #endif + + let methods = FlutterMethodChannel(name: channelName, binaryMessenger: binaryMessenger) + let globalMethods = FlutterMethodChannel( + name: globalChannelName, binaryMessenger: binaryMessenger) + let globalEvents = FlutterEventChannel( + name: globalChannelName + "/events", binaryMessenger: binaryMessenger) + + let instance = SwiftAudioplayersDarwinPlugin( + registrar: registrar, + binaryMessenger: binaryMessenger, + methodChannel: methods, + globalMethodChannel: globalMethods, + globalEventChannel: globalEvents) + registrar.addMethodCallDelegate(instance, channel: methods) + } + + public func detachFromEngine(for registrar: FlutterPluginRegistrar) { + dispose() + } + + func dispose() { + for (_, player) in self.players { + player.dispose() } - - public func detachFromEngine(for registrar: FlutterPluginRegistrar) { - dispose() + self.players = [:] + } + + private func handleGlobalMethodCall(call: FlutterMethodCall, result: @escaping FlutterResult) { + let method = call.method + + guard let args = call.arguments as? [String: Any] else { + result( + FlutterError( + code: "DarwinAudioError", message: "Failed to parse call.arguments from Flutter.", + details: nil)) + return } - func dispose() { - for (_, player) in self.players { - player.dispose() + // global handlers (no playerId) + if method == "setAudioContext" { + do { + guard let context = try AudioContext.parse(args: args) else { + result( + FlutterError( + code: "DarwinAudioError", + message: "Error calling setAudioContext, context could not be parsed", details: nil)) + return } - self.players = [:] + globalContext = context + + try globalContext.apply() + } catch AudioPlayerError.warning(let warnMsg) { + globalEvents.onLog(message: warnMsg) + } catch { + result( + FlutterError( + code: "DarwinAudioError", message: "Error configuring global audio session: \(error)", + details: nil)) + } + } else if method == "emitLog" { + guard let message = args["message"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling emitLog, message cannot be null", + details: nil)) + return + } + globalEvents.onLog(message: message) + } else if method == "emitError" { + guard let code = args["code"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling emitError, code cannot be null", + details: nil)) + return + } + guard let message = args["message"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling emitError, message cannot be null", + details: nil)) + return + } + globalEvents.onError(code: code, message: message, details: nil) + } else { + result(FlutterMethodNotImplemented) + return } - private func handleGlobalMethodCall(call: FlutterMethodCall, result: @escaping FlutterResult) { - let method = call.method + // default result (bypass by adding `return` to your branch) + result(1) + } - guard let args = call.arguments as? [String: Any] else { - result(FlutterError(code: "DarwinAudioError", message: "Failed to parse call.arguments from Flutter.", details: nil)) - return - } - - // global handlers (no playerId) - if method == "setAudioContext" { - do { - guard let context = try AudioContext.parse(args: args) else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling setAudioContext, context could not be parsed", details: nil)) - return - } - globalContext = context - - try globalContext.apply() - } catch AudioPlayerError.warning(let warnMsg) { - globalEvents.onLog(message: warnMsg) - } catch { - result(FlutterError(code: "DarwinAudioError", message: "Error configuring global audio session: \(error)", details: nil)) - } - } else if method == "emitLog" { - guard let message = args["message"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling emitLog, message cannot be null", details: nil)) - return - } - globalEvents.onLog(message: message) - } else if method == "emitError" { - guard let code = args["code"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling emitError, code cannot be null", details: nil)) - return - } - guard let message = args["message"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling emitError, message cannot be null", details: nil)) - return - } - globalEvents.onError(code: code, message: message, details: nil) - } else { - result(FlutterMethodNotImplemented) - return - } + public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { + let method = call.method - // default result (bypass by adding `return` to your branch) - result(1) + guard let args = call.arguments as? [String: Any] else { + result( + FlutterError( + code: "DarwinAudioError", message: "Failed to parse call.arguments from Flutter.", + details: nil)) + return } - public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { - let method = call.method - - guard let args = call.arguments as? [String: Any] else { - result(FlutterError(code: "DarwinAudioError", message: "Failed to parse call.arguments from Flutter.", details: nil)) - return - } - - // player specific handlers - guard let playerId = args["playerId"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Call missing mandatory parameter playerId.", details: nil)) - return - } + // player specific handlers + guard let playerId = args["playerId"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Call missing mandatory parameter playerId.", + details: nil)) + return + } - if method == "create" { - self.createPlayer(playerId: playerId) - result(1) - return - } + if method == "create" { + self.createPlayer(playerId: playerId) + result(1) + return + } - guard let player = self.getPlayer(playerId: playerId) else { - result(FlutterError(code: "DarwinAudioError", message: "Player has not yet been created or has already been disposed.", details: nil)) - return - } + guard let player = self.getPlayer(playerId: playerId) else { + result( + FlutterError( + code: "DarwinAudioError", + message: "Player has not yet been created or has already been disposed.", details: nil)) + return + } - if method == "pause" { - player.pause() - } else if method == "resume" { - player.resume() - } else if method == "stop" { - player.stop() { - result(1) - } - return - } else if method == "release" { - player.release() { - result(1) - } - return - } else if method == "seek" { - guard let position = args["position"] as? Int else { - result(FlutterError(code: "DarwinAudioError", message: "Null position received on seek", details: nil)) - return - } - let time = toCMTime(millis: position) - player.seek(time: time) { - result(1) - } - return - } else if method == "setSourceUrl" { - let url: String? = args["url"] as? String - let isLocal: Bool = (args["isLocal"] as? Bool) ?? false - - if url == nil { - result(FlutterError(code: "DarwinAudioError", message: "Null URL received on setSourceUrl", details: nil)) - return - } - - player.setSourceUrl(url: url!, isLocal: isLocal, completer: { - player.eventHandler.onPrepared(isPrepared: true) - }, completerError: { - player.eventHandler.onError(code: "DarwinAudioError", message: "AVPlayerItem.Status.failed on setSourceUrl", details: nil) - }) - result(1) - return - } else if method == "setSourceBytes" { - result(FlutterError(code: "DarwinAudioError", message: "setSourceBytes is not currently implemented on iOS", details: nil)) - return - } else if method == "getDuration" { - let duration = player.getDuration() - result(duration) - } else if method == "setVolume" { - guard let volume = args["volume"] as? Double else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling setVolume, volume cannot be null", details: nil)) - return - } - - player.setVolume(volume: volume) - } else if method == "setBalance" { - player.eventHandler.onLog(message: "setBalance is not currently implemented on iOS") - result(0) - return - } else if method == "getCurrentPosition" { - let currentPosition = player.getCurrentPosition() - result(currentPosition) - return - } else if method == "setPlaybackRate" { - guard let playbackRate = args["playbackRate"] as? Double else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling setPlaybackRate, playbackRate cannot be null", details: nil)) - return - } - player.setPlaybackRate(playbackRate: playbackRate) - } else if method == "setReleaseMode" { - guard let releaseMode = args["releaseMode"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling setReleaseMode, releaseMode cannot be null", details: nil)) - return - } - // Note: there is no "release" on iOS; hence we only care if it's looping or not - let looping = releaseMode.hasSuffix("loop") - player.looping = looping - } else if method == "setPlayerMode" { - // no-op for darwin; only one player mode - } else if method == "setAudioContext" { - player.eventHandler.onLog(message: "iOS does not allow for player-specific audio contexts; `setAudioContext` will set the global audio context instead (like `global.setAudioContext`).") - do { - guard let context = try AudioContext.parse(args: args) else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling setAudioContext, context could not be parsed", details: nil)) - return - } - globalContext = context - - try globalContext.apply() - } catch AudioPlayerError.warning(let warnMsg) { - globalEvents.onLog(message: warnMsg) - } catch { - result(FlutterError(code: "DarwinAudioError", message: "Error configuring audio session: \(error)", details: nil)) - } - } else if method == "emitLog" { - guard let message = args["message"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling emitLog, message cannot be null", details: nil)) - return - } - player.eventHandler.onLog(message: message) - } else if method == "emitError" { - guard let code = args["code"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling emitError, code cannot be null", details: nil)) - return - } - guard let message = args["message"] as? String else { - result(FlutterError(code: "DarwinAudioError", message: "Error calling emitError, message cannot be null", details: nil)) - return - } - player.eventHandler.onError(code: code, message: message, details: nil) - } else if method == "dispose" { - player.dispose() { - self.players[playerId] = nil - result(1) - } - return - } else { - result(FlutterMethodNotImplemented) - return + if method == "pause" { + player.pause() + } else if method == "resume" { + player.resume() + } else if method == "stop" { + player.stop { + result(1) + } + return + } else if method == "release" { + player.release { + result(1) + } + return + } else if method == "seek" { + guard let position = args["position"] as? Int else { + result( + FlutterError( + code: "DarwinAudioError", message: "Null position received on seek", details: nil)) + return + } + let time = toCMTime(millis: position) + player.seek(time: time) { + result(1) + } + return + } else if method == "setSourceUrl" { + let url: String? = args["url"] as? String + let isLocal: Bool = (args["isLocal"] as? Bool) ?? false + + if url == nil { + result( + FlutterError( + code: "DarwinAudioError", message: "Null URL received on setSourceUrl", details: nil)) + return + } + + player.setSourceUrl( + url: url!, isLocal: isLocal, + completer: { + player.eventHandler.onPrepared(isPrepared: true) + }, + completerError: { + player.eventHandler.onError( + code: "DarwinAudioError", message: "AVPlayerItem.Status.failed on setSourceUrl", + details: nil) + }) + result(1) + return + } else if method == "setSourceBytes" { + result( + FlutterError( + code: "DarwinAudioError", message: "setSourceBytes is not currently implemented on iOS", + details: nil)) + return + } else if method == "getDuration" { + let duration = player.getDuration() + result(duration) + } else if method == "setVolume" { + guard let volume = args["volume"] as? Double else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling setVolume, volume cannot be null", + details: nil)) + return + } + + player.setVolume(volume: volume) + } else if method == "setBalance" { + player.eventHandler.onLog(message: "setBalance is not currently implemented on iOS") + result(0) + return + } else if method == "getCurrentPosition" { + let currentPosition = player.getCurrentPosition() + result(currentPosition) + return + } else if method == "setPlaybackRate" { + guard let playbackRate = args["playbackRate"] as? Double else { + result( + FlutterError( + code: "DarwinAudioError", + message: "Error calling setPlaybackRate, playbackRate cannot be null", details: nil)) + return + } + player.setPlaybackRate(playbackRate: playbackRate) + } else if method == "setReleaseMode" { + guard let releaseMode = args["releaseMode"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", + message: "Error calling setReleaseMode, releaseMode cannot be null", details: nil)) + return + } + // Note: there is no "release" on iOS; hence we only care if it's looping or not + let looping = releaseMode.hasSuffix("loop") + player.looping = looping + } else if method == "setPlayerMode" { + // no-op for darwin; only one player mode + } else if method == "setAudioContext" { + player.eventHandler.onLog( + message: + "iOS does not allow for player-specific audio contexts; `setAudioContext` will set the global audio context instead (like `global.setAudioContext`)." + ) + do { + guard let context = try AudioContext.parse(args: args) else { + result( + FlutterError( + code: "DarwinAudioError", + message: "Error calling setAudioContext, context could not be parsed", details: nil)) + return } - - // default result (bypass by adding `return` to your branch) + globalContext = context + + try globalContext.apply() + } catch AudioPlayerError.warning(let warnMsg) { + globalEvents.onLog(message: warnMsg) + } catch { + result( + FlutterError( + code: "DarwinAudioError", message: "Error configuring audio session: \(error)", + details: nil)) + } + } else if method == "emitLog" { + guard let message = args["message"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling emitLog, message cannot be null", + details: nil)) + return + } + player.eventHandler.onLog(message: message) + } else if method == "emitError" { + guard let code = args["code"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling emitError, code cannot be null", + details: nil)) + return + } + guard let message = args["message"] as? String else { + result( + FlutterError( + code: "DarwinAudioError", message: "Error calling emitError, message cannot be null", + details: nil)) + return + } + player.eventHandler.onError(code: code, message: message, details: nil) + } else if method == "dispose" { + player.dispose { + self.players[playerId] = nil result(1) + } + return + } else { + result(FlutterMethodNotImplemented) + return } - func createPlayer(playerId: String) { - let eventChannel = FlutterEventChannel(name: CHANNEL_NAME + "/events/" + playerId, binaryMessenger: self.binaryMessenger) - - let eventHandler = AudioPlayersStreamHandler() + // default result (bypass by adding `return` to your branch) + result(1) + } - eventChannel.setStreamHandler(eventHandler); + func createPlayer(playerId: String) { + let eventChannel = FlutterEventChannel( + name: channelName + "/events/" + playerId, binaryMessenger: self.binaryMessenger) - let newPlayer = WrappedMediaPlayer( - reference: self, - eventHandler: eventHandler - ) - players[playerId] = newPlayer - } - - func getPlayer(playerId: String) -> WrappedMediaPlayer? { - return players[playerId] - } + let eventHandler = AudioPlayersStreamHandler() - func controlAudioSession() { - let anyIsPlaying = players.values.contains { player in - player.isPlaying - } + eventChannel.setStreamHandler(eventHandler) - do { - try globalContext.activateAudioSession(active: anyIsPlaying) - } catch { - self.globalEvents.onError(code: "DarwinAudioError", message: "Error configuring audio session: \(error)", details: nil) - } - } -} + let newPlayer = WrappedMediaPlayer( + reference: self, + eventHandler: eventHandler + ) + players[playerId] = newPlayer + } -class AudioPlayersStreamHandler: NSObject, FlutterStreamHandler { - var sink: FlutterEventSink? + func getPlayer(playerId: String) -> WrappedMediaPlayer? { + return players[playerId] + } - public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { - self.sink = events - // events(FlutterEndOfEventStream) // when stream is over - return nil + func controlAudioSession() { + let anyIsPlaying = players.values.contains { player in + player.isPlaying } - public func onCancel(withArguments arguments: Any?) -> FlutterError? { - return nil + do { + try globalContext.activateAudioSession(active: anyIsPlaying) + } catch { + self.globalEvents.onError( + code: "DarwinAudioError", message: "Error configuring audio session: \(error)", details: nil + ) } + } +} - func onSeekComplete() { - if let eventSink = self.sink { - eventSink(["event": "audio.onSeekComplete"]) - } +class AudioPlayersStreamHandler: NSObject, FlutterStreamHandler { + var sink: FlutterEventSink? + + public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) + -> FlutterError? + { + self.sink = events + // events(FlutterEndOfEventStream) // when stream is over + return nil + } + + public func onCancel(withArguments arguments: Any?) -> FlutterError? { + return nil + } + + func onSeekComplete() { + if let eventSink = self.sink { + eventSink(["event": "audio.onSeekComplete"]) } + } - func onComplete() { - if let eventSink = self.sink { - eventSink(["event": "audio.onComplete"]) - } + func onComplete() { + if let eventSink = self.sink { + eventSink(["event": "audio.onComplete"]) } + } - func onCurrentPosition(millis: Int) { - if let eventSink = self.sink { - eventSink(["event": "audio.onCurrentPosition", "value": millis] as [String: Any]) - } + func onCurrentPosition(millis: Int) { + if let eventSink = self.sink { + eventSink(["event": "audio.onCurrentPosition", "value": millis] as [String: Any]) } + } - func onDuration(millis: Int) { - if let eventSink = self.sink { - eventSink(["event": "audio.onDuration", "value": millis] as [String: Any]) - } + func onDuration(millis: Int) { + if let eventSink = self.sink { + eventSink(["event": "audio.onDuration", "value": millis] as [String: Any]) } + } - func onPrepared(isPrepared: Bool) { - if let eventSink = self.sink { - eventSink(["event": "audio.onPrepared", "value": isPrepared] as [String: Any]) - } + func onPrepared(isPrepared: Bool) { + if let eventSink = self.sink { + eventSink(["event": "audio.onPrepared", "value": isPrepared] as [String: Any]) } + } - func onLog(message: String) { - if let eventSink = self.sink { - eventSink(["event": "audio.onLog", "value": message]) - } + func onLog(message: String) { + if let eventSink = self.sink { + eventSink(["event": "audio.onLog", "value": message]) } + } - func onError(code: String, message: String, details: Any?) { - if let eventSink = self.sink { - eventSink(FlutterError(code: code, message: message, details: details)) - } + func onError(code: String, message: String, details: Any?) { + if let eventSink = self.sink { + eventSink(FlutterError(code: code, message: message, details: details)) } + } } class GlobalAudioPlayersStreamHandler: NSObject, FlutterStreamHandler { - var sink: FlutterEventSink? - - public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { - self.sink = events - return nil + var sink: FlutterEventSink? + + public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) + -> FlutterError? + { + self.sink = events + return nil + } + + public func onCancel(withArguments arguments: Any?) -> FlutterError? { + return nil + } + + func onLog(message: String) { + if let eventSink = self.sink { + eventSink(["event": "audio.onLog", "value": message]) } + } - public func onCancel(withArguments arguments: Any?) -> FlutterError? { - return nil - } - - func onLog(message: String) { - if let eventSink = self.sink { - eventSink(["event": "audio.onLog", "value": message]) - } - } - - func onError(code: String, message: String, details: Any?) { - if let eventSink = self.sink { - eventSink(FlutterError(code: code, message: message, details: details)) - } + func onError(code: String, message: String, details: Any?) { + if let eventSink = self.sink { + eventSink(FlutterError(code: code, message: message, details: details)) } + } } diff --git a/packages/audioplayers_darwin/darwin/Classes/Utils.swift b/packages/audioplayers_darwin/darwin/Classes/Utils.swift index c0e802187..5d5f819b7 100644 --- a/packages/audioplayers_darwin/darwin/Classes/Utils.swift +++ b/packages/audioplayers_darwin/darwin/Classes/Utils.swift @@ -1,49 +1,49 @@ import AVKit extension String { - func deletingPrefix(_ prefix: String) -> String { - guard self.hasPrefix(prefix) else { - return self - } - return String(self.dropFirst(prefix.count)) + func deletingPrefix(_ prefix: String) -> String { + guard self.hasPrefix(prefix) else { + return self } + return String(self.dropFirst(prefix.count)) + } } func toCMTime(millis: Int) -> CMTime { - return toCMTime(millis: Float(millis)) + return toCMTime(millis: Float(millis)) } func toCMTime(millis: Double) -> CMTime { - return toCMTime(millis: Float(millis)) + return toCMTime(millis: Float(millis)) } func toCMTime(millis: Float) -> CMTime { - return CMTimeMakeWithSeconds(Float64(millis) / 1000, preferredTimescale: Int32(NSEC_PER_SEC)) + return CMTimeMakeWithSeconds(Float64(millis) / 1000, preferredTimescale: Int32(NSEC_PER_SEC)) } func fromCMTime(time: CMTime) -> Int { - guard CMTIME_IS_NUMERIC(time) else { - return 0 - } - let seconds: Float64 = CMTimeGetSeconds(time) - let milliseconds: Int = Int(seconds * 1000) - return milliseconds + guard CMTIME_IS_NUMERIC(time) else { + return 0 + } + let seconds: Float64 = CMTimeGetSeconds(time) + let milliseconds: Int = Int(seconds * 1000) + return milliseconds } class TimeObserver { - let player: AVPlayer - let observer: Any - - init( - player: AVPlayer, - observer: Any - ) { - self.player = player - self.observer = observer - } + let player: AVPlayer + let observer: Any + + init( + player: AVPlayer, + observer: Any + ) { + self.player = player + self.observer = observer + } } enum AudioPlayerError: Error { - case error(String) - case warning(String) + case error(String) + case warning(String) } diff --git a/packages/audioplayers_darwin/darwin/Classes/WrappedMediaPlayer.swift b/packages/audioplayers_darwin/darwin/Classes/WrappedMediaPlayer.swift index cebd2999d..9787a0efd 100644 --- a/packages/audioplayers_darwin/darwin/Classes/WrappedMediaPlayer.swift +++ b/packages/audioplayers_darwin/darwin/Classes/WrappedMediaPlayer.swift @@ -8,258 +8,262 @@ typealias Completer = () -> Void typealias CompleterError = () -> Void class WrappedMediaPlayer { - private(set) var eventHandler: AudioPlayersStreamHandler - private(set) var isPlaying: Bool - var looping: Bool - - private var reference: SwiftAudioplayersDarwinPlugin - private var player: AVPlayer? - private var playbackRate: Double - private var volume: Double - private var url: String? - - private var observers: [TimeObserver] - private var playerItemStatusObservation: NSKeyValueObservation? - - init( - reference: SwiftAudioplayersDarwinPlugin, - eventHandler: AudioPlayersStreamHandler, - player: AVPlayer? = nil, - playbackRate: Double = defaultPlaybackRate, - volume: Double = defaultVolume, - looping: Bool = defaultLooping, - url: String? = nil - ) { - self.reference = reference - self.eventHandler = eventHandler + private(set) var eventHandler: AudioPlayersStreamHandler + private(set) var isPlaying: Bool + var looping: Bool + + private var reference: SwiftAudioplayersDarwinPlugin + private var player: AVPlayer? + private var playbackRate: Double + private var volume: Double + private var url: String? + + private var observers: [TimeObserver] + private var playerItemStatusObservation: NSKeyValueObservation? + + init( + reference: SwiftAudioplayersDarwinPlugin, + eventHandler: AudioPlayersStreamHandler, + player: AVPlayer? = nil, + playbackRate: Double = defaultPlaybackRate, + volume: Double = defaultVolume, + looping: Bool = defaultLooping, + url: String? = nil + ) { + self.reference = reference + self.eventHandler = eventHandler + self.player = player + self.observers = [] + self.playerItemStatusObservation = nil + + self.isPlaying = false + self.playbackRate = playbackRate + self.volume = volume + self.looping = looping + self.url = url + } + + func setSourceUrl( + url: String, + isLocal: Bool, + completer: Completer? = nil, + completerError: CompleterError? = nil + ) { + let playbackStatus = player?.currentItem?.status + + if self.url != url || playbackStatus == .failed || playbackStatus == nil { + let playerItem = createPlayerItem(url, isLocal) + let player: AVPlayer + if let existingPlayer = self.player { + reset() + self.url = url + existingPlayer.replaceCurrentItem(with: playerItem) + player = existingPlayer + } else { + player = AVPlayer.init(playerItem: playerItem) + configParameters(player: player) + self.player = player self.observers = [] - self.playerItemStatusObservation = nil - - self.isPlaying = false - self.playbackRate = playbackRate - self.volume = volume - self.looping = looping self.url = url - } - func setSourceUrl( - url: String, - isLocal: Bool, - completer: Completer? = nil, - completerError: CompleterError? = nil - ) { - let playbackStatus = player?.currentItem?.status - - if self.url != url || playbackStatus == .failed || playbackStatus == nil { - let playerItem = createPlayerItem(url, isLocal) - let player: AVPlayer - if let existingPlayer = self.player { - reset() - self.url = url - existingPlayer.replaceCurrentItem(with: playerItem) - player = existingPlayer - } else { - player = AVPlayer.init(playerItem: playerItem) - configParameters(player: player) - - self.player = player - self.observers = [] - self.url = url - - setUpPositionObserver(player) - } - - setUpSoundCompletedObserver(player, playerItem) - setUpPlayerItemStatusObservation(playerItem, completer, completerError) - } else { - if playbackStatus == .readyToPlay { - completer?() - } - } - } + setUpPositionObserver(player) + } - func getDuration() -> Int? { - guard let duration = getDurationCMTime() else { - return nil - } - return fromCMTime(time: duration) + setUpSoundCompletedObserver(player, playerItem) + setUpPlayerItemStatusObservation(playerItem, completer, completerError) + } else { + if playbackStatus == .readyToPlay { + completer?() + } } + } - func getCurrentPosition() -> Int? { - guard let time = getCurrentCMTime() else { - return nil - } - return fromCMTime(time: time) + func getDuration() -> Int? { + guard let duration = getDurationCMTime() else { + return nil } + return fromCMTime(time: duration) + } - func pause() { - isPlaying = false - player?.pause() + func getCurrentPosition() -> Int? { + guard let time = getCurrentCMTime() else { + return nil } - - func resume() { - isPlaying = true - if let player = self.player { - configParameters(player: player) - if #available(iOS 10.0, macOS 10.12, *) { - player.playImmediately(atRate: Float(playbackRate)) - } else { - player.play() - } - updateDuration() - } - } - - func setVolume(volume: Double) { - self.volume = volume - player?.volume = Float(volume) + return fromCMTime(time: time) + } + + func pause() { + isPlaying = false + player?.pause() + } + + func resume() { + isPlaying = true + if let player = self.player { + configParameters(player: player) + if #available(iOS 10.0, macOS 10.12, *) { + player.playImmediately(atRate: Float(playbackRate)) + } else { + player.play() + } + updateDuration() } - - func setPlaybackRate(playbackRate: Double) { - self.playbackRate = playbackRate - player?.rate = Float(playbackRate) + } + + func setVolume(volume: Double) { + self.volume = volume + player?.volume = Float(volume) + } + + func setPlaybackRate(playbackRate: Double) { + self.playbackRate = playbackRate + player?.rate = Float(playbackRate) + } + + func seek(time: CMTime, completer: Completer? = nil) { + guard let currentItem = player?.currentItem else { + completer?() + return } - - func seek(time: CMTime, completer: Completer? = nil) { - guard let currentItem = player?.currentItem else { - completer?() - return - } - currentItem.seek(to: time) { - finished in - if !self.isPlaying { - self.player?.pause() - } - self.eventHandler.onSeekComplete() - if (finished) { - completer?() - } - } + currentItem.seek(to: time) { + finished in + if !self.isPlaying { + self.player?.pause() + } + self.eventHandler.onSeekComplete() + if finished { + completer?() + } } - - func stop(completer: Completer? = nil) { - pause() - seek(time: toCMTime(millis: 0), completer: completer) + } + + func stop(completer: Completer? = nil) { + pause() + seek(time: toCMTime(millis: 0), completer: completer) + } + + func release(completer: Completer? = nil) { + stop { + self.reset() + self.url = nil + completer?() } + } - func release(completer: Completer? = nil) { - stop { - self.reset() - self.url = nil - completer?() - } + func dispose(completer: Completer? = nil) { + release { + completer?() } - - func dispose(completer: Completer? = nil) { - release { - completer?() - } + } + + private func getDurationCMTime() -> CMTime? { + return player?.currentItem?.asset.duration + } + + private func getCurrentCMTime() -> CMTime? { + return player?.currentItem?.currentTime() + } + + private func createPlayerItem(_ url: String, _ isLocal: Bool) -> AVPlayerItem { + let parsedUrl = + isLocal ? URL.init(fileURLWithPath: url.deletingPrefix("file://")) : URL.init(string: url)! + let playerItem = AVPlayerItem.init(url: parsedUrl) + playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithm.timeDomain + return playerItem + } + + private func setUpPlayerItemStatusObservation( + _ playerItem: AVPlayerItem, _ completer: Completer?, _ completerError: CompleterError? + ) { + let newplayerItemStatusObservation = playerItem.observe(\AVPlayerItem.status) { + (playerItem, change) in + let status = playerItem.status + self.eventHandler.onLog(message: "player status: \(status), change: \(change)") + + switch playerItem.status { + case .readyToPlay: + self.updateDuration() + completer?() + case .failed: + self.reset() + completerError?() + default: + break + } } - private func getDurationCMTime() -> CMTime? { - return player?.currentItem?.asset.duration - } + playerItemStatusObservation?.invalidate() + playerItemStatusObservation = newplayerItemStatusObservation + } - private func getCurrentCMTime() -> CMTime? { - return player?.currentItem?.currentTime() + private func setUpPositionObserver(_ player: AVPlayer) { + let interval = toCMTime(millis: 200) + let observer = player.addPeriodicTimeObserver(forInterval: interval, queue: nil) { + [weak self] time in + self?.onTimeInterval(time: time) } - - private func createPlayerItem(_ url: String, _ isLocal: Bool) -> AVPlayerItem { - let parsedUrl = isLocal ? URL.init(fileURLWithPath: url.deletingPrefix("file://")) : URL.init(string: url)! - let playerItem = AVPlayerItem.init(url: parsedUrl) - playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithm.timeDomain - return playerItem + self.observers.append(TimeObserver(player: player, observer: observer)) + } + + private func setUpSoundCompletedObserver(_ player: AVPlayer, _ playerItem: AVPlayerItem) { + let observer = NotificationCenter.default.addObserver( + forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, + object: playerItem, + queue: nil + ) { + [weak self] (notification) in + self?.onSoundComplete() } + self.observers.append(TimeObserver(player: player, observer: observer)) + } - private func setUpPlayerItemStatusObservation(_ playerItem: AVPlayerItem, _ completer: Completer?, _ completerError: CompleterError?) { - let newplayerItemStatusObservation = playerItem.observe(\AVPlayerItem.status) { (playerItem, change) in - let status = playerItem.status - self.eventHandler.onLog(message: "player status: \(status), change: \(change)") - - switch playerItem.status { - case .readyToPlay: - self.updateDuration() - completer?() - case .failed: - self.reset() - completerError?() - default: - break - } - } - - playerItemStatusObservation?.invalidate() - playerItemStatusObservation = newplayerItemStatusObservation + private func configParameters(player: AVPlayer) { + if isPlaying { + player.volume = Float(volume) + player.rate = Float(playbackRate) } + } - private func setUpPositionObserver(_ player: AVPlayer) { - let interval = toCMTime(millis: 200) - let observer = player.addPeriodicTimeObserver(forInterval: interval, queue: nil) { - [weak self] time in - self?.onTimeInterval(time: time) - } - self.observers.append(TimeObserver(player: player, observer: observer)) + private func reset() { + playerItemStatusObservation?.invalidate() + playerItemStatusObservation = nil + for observer in observers { + NotificationCenter.default.removeObserver(observer.observer) } + observers = [] + player?.replaceCurrentItem(with: nil) + } - private func setUpSoundCompletedObserver(_ player: AVPlayer, _ playerItem: AVPlayerItem) { - let observer = NotificationCenter.default.addObserver( - forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, - object: playerItem, - queue: nil - ) { - [weak self] (notification) in - self?.onSoundComplete() - } - self.observers.append(TimeObserver(player: player, observer: observer)) + private func updateDuration() { + guard let duration = player?.currentItem?.asset.duration else { + return } - - private func configParameters(player: AVPlayer) { - if (isPlaying) { - player.volume = Float(volume) - player.rate = Float(playbackRate) - } + if CMTimeGetSeconds(duration) > 0 { + let millis = fromCMTime(time: duration) + eventHandler.onDuration(millis: millis) } + } - private func reset() { - playerItemStatusObservation?.invalidate() - playerItemStatusObservation = nil - for observer in observers { - NotificationCenter.default.removeObserver(observer.observer) - } - observers = [] - player?.replaceCurrentItem(with: nil) + private func onSoundComplete() { + if !isPlaying { + return } - private func updateDuration() { - guard let duration = player?.currentItem?.asset.duration else { - return - } - if CMTimeGetSeconds(duration) > 0 { - let millis = fromCMTime(time: duration) - eventHandler.onDuration(millis: millis) - } + seek(time: toCMTime(millis: 0)) { + if self.looping { + self.resume() + } else { + self.isPlaying = false + } } - private func onSoundComplete() { - if !isPlaying { - return - } - - seek(time: toCMTime(millis: 0)) { - if self.looping { - self.resume() - } else { - self.isPlaying = false - } - } - - reference.controlAudioSession() - eventHandler.onComplete() - } + reference.controlAudioSession() + eventHandler.onComplete() + } - private func onTimeInterval(time: CMTime) { - let millis = fromCMTime(time: time) - eventHandler.onCurrentPosition(millis: millis) - } + private func onTimeInterval(time: CMTime) { + let millis = fromCMTime(time: time) + eventHandler.onCurrentPosition(millis: millis) + } } diff --git a/packages/audioplayers_darwin/ios/Classes/AudioContext.swift b/packages/audioplayers_darwin/ios/Classes/AudioContext.swift index 9806b14c0..37f3c5612 100644 --- a/packages/audioplayers_darwin/ios/Classes/AudioContext.swift +++ b/packages/audioplayers_darwin/ios/Classes/AudioContext.swift @@ -1,114 +1,117 @@ import MediaPlayer struct AudioContext { - let category: AVAudioSession.Category - let options: [AVAudioSession.CategoryOptions] + let category: AVAudioSession.Category + let options: [AVAudioSession.CategoryOptions] - init() { - self.category = .playback - self.options = [] - } + init() { + self.category = .playback + self.options = [] + } - init( - category: AVAudioSession.Category, - options: [AVAudioSession.CategoryOptions] - ) { - self.category = category - self.options = options - } + init( + category: AVAudioSession.Category, + options: [AVAudioSession.CategoryOptions] + ) { + self.category = category + self.options = options + } - func activateAudioSession( - active: Bool - ) throws { - let session = AVAudioSession.sharedInstance() - try session.setActive(active) - } + func activateAudioSession( + active: Bool + ) throws { + let session = AVAudioSession.sharedInstance() + try session.setActive(active) + } - func apply() throws { - let session = AVAudioSession.sharedInstance() - let combinedOptions = options.reduce(AVAudioSession.CategoryOptions()) { - [$0, $1] - } - try session.setCategory(category, options: combinedOptions) + func apply() throws { + let session = AVAudioSession.sharedInstance() + let combinedOptions = options.reduce(AVAudioSession.CategoryOptions()) { + [$0, $1] } + try session.setCategory(category, options: combinedOptions) + } - static func parse(args: [String: Any]) throws -> AudioContext? { - guard let categoryString = args["category"] as! String? else { - throw AudioPlayerError.error("Null value received for category") - } - guard let category = try parseCategory(category: categoryString) else { - return nil - } - - guard let optionStrings = args["options"] as! [String]? else { - throw AudioPlayerError.error("Null value received for options") - } - let options = try optionStrings.compactMap { - try parseCategoryOption(option: $0) - } - if (optionStrings.count != options.count) { - return nil - } + static func parse(args: [String: Any]) throws -> AudioContext? { + guard let categoryString = args["category"] as! String? else { + throw AudioPlayerError.error("Null value received for category") + } + guard let category = try parseCategory(category: categoryString) else { + return nil + } - return AudioContext( - category: category, - options: options - ) + guard let optionStrings = args["options"] as! [String]? else { + throw AudioPlayerError.error("Null value received for options") + } + let options = try optionStrings.compactMap { + try parseCategoryOption(option: $0) } + if optionStrings.count != options.count { + return nil + } + + return AudioContext( + category: category, + options: options + ) + } - private static func parseCategory(category: String) throws -> AVAudioSession.Category? { - switch category { - case "ambient": - return .ambient - case "soloAmbient": - return .soloAmbient - case "playback": - return .playback - case "record": - return .record - case "playAndRecord": - return .playAndRecord - case "audioProcessing": - return .audioProcessing - case "multiRoute": - return .multiRoute - default: - throw AudioPlayerError.error("Invalid Category \(category)") - } + private static func parseCategory(category: String) throws -> AVAudioSession.Category? { + switch category { + case "ambient": + return .ambient + case "soloAmbient": + return .soloAmbient + case "playback": + return .playback + case "record": + return .record + case "playAndRecord": + return .playAndRecord + case "audioProcessing": + return .audioProcessing + case "multiRoute": + return .multiRoute + default: + throw AudioPlayerError.error("Invalid Category \(category)") } + } - private static func parseCategoryOption(option: String) throws -> AVAudioSession.CategoryOptions? { - switch option { - case "mixWithOthers": - return .mixWithOthers - case "duckOthers": - return .duckOthers - case "allowBluetooth": - return .allowBluetooth - case "defaultToSpeaker": - return .defaultToSpeaker - case "interruptSpokenAudioAndMixWithOthers": - return .interruptSpokenAudioAndMixWithOthers - case "allowBluetoothA2DP": - if #available(iOS 10.0, *) { - return .allowBluetoothA2DP - } else { - throw AudioPlayerError.warning("Category Option allowBluetoothA2DP is only available on iOS 10+") - } - case "allowAirPlay": - if #available(iOS 10.0, *) { - return .allowAirPlay - } else { - throw AudioPlayerError.warning("Category Option allowAirPlay is only available on iOS 10+") - } - case "overrideMutedMicrophoneInterruption": - if #available(iOS 14.5, *) { - return .overrideMutedMicrophoneInterruption - } else { - throw AudioPlayerError.warning("Category Option overrideMutedMicrophoneInterruption is only available on iOS 14.5+") - } - default: - throw AudioPlayerError.error("Invalid Category Option \(option)") - } + private static func parseCategoryOption(option: String) throws -> AVAudioSession.CategoryOptions? + { + switch option { + case "mixWithOthers": + return .mixWithOthers + case "duckOthers": + return .duckOthers + case "allowBluetooth": + return .allowBluetooth + case "defaultToSpeaker": + return .defaultToSpeaker + case "interruptSpokenAudioAndMixWithOthers": + return .interruptSpokenAudioAndMixWithOthers + case "allowBluetoothA2DP": + if #available(iOS 10.0, *) { + return .allowBluetoothA2DP + } else { + throw AudioPlayerError.warning( + "Category Option allowBluetoothA2DP is only available on iOS 10+") + } + case "allowAirPlay": + if #available(iOS 10.0, *) { + return .allowAirPlay + } else { + throw AudioPlayerError.warning("Category Option allowAirPlay is only available on iOS 10+") + } + case "overrideMutedMicrophoneInterruption": + if #available(iOS 14.5, *) { + return .overrideMutedMicrophoneInterruption + } else { + throw AudioPlayerError.warning( + "Category Option overrideMutedMicrophoneInterruption is only available on iOS 14.5+") + } + default: + throw AudioPlayerError.error("Invalid Category Option \(option)") } + } } diff --git a/packages/audioplayers_darwin/macos/Classes/AudioContext.swift b/packages/audioplayers_darwin/macos/Classes/AudioContext.swift index 1e4489037..7881c2d00 100644 --- a/packages/audioplayers_darwin/macos/Classes/AudioContext.swift +++ b/packages/audioplayers_darwin/macos/Classes/AudioContext.swift @@ -2,14 +2,14 @@ import MediaPlayer // no-op impl of AudioContext for macos struct AudioContext { - func activateAudioSession(active: Bool) throws { - } + func activateAudioSession(active: Bool) throws { + } - func apply() throws { - throw AudioPlayerError.warning("AudioContext configuration is not available on macOS") - } + func apply() throws { + throw AudioPlayerError.warning("AudioContext configuration is not available on macOS") + } - static func parse(args: [String: Any]) throws -> AudioContext? { - return AudioContext() - } + static func parse(args: [String: Any]) throws -> AudioContext? { + return AudioContext() + } }