diff --git a/README.md b/README.md index 4546a4df72..1527448758 100644 --- a/README.md +++ b/README.md @@ -272,6 +272,7 @@ var styles = StyleSheet.create({ * [minLoadRetryCount](#minLoadRetryCount) * [muted](#muted) * [paused](#paused) +* [pictureInPicture](#pictureinpicture) * [playInBackground](#playinbackground) * [playWhenInactive](#playwheninactive) * [poster](#poster) @@ -301,14 +302,17 @@ var styles = StyleSheet.create({ * [onFullscreenPlayerDidDismiss](#onfullscreenplayerdiddismiss) * [onLoad](#onload) * [onLoadStart](#onloadstart) +* [onPictureInPictureStatusChanged](#onpictureinpicturestatuschanged) * [onProgress](#onprogress) * [onSeek](#onseek) +* [onRestoreUserInterfaceForPictureInPictureStop](#onrestoreuserinterfaceforpictureinpicturestop) * [onTimedMetadata](#ontimedmetadata) ### Methods * [dismissFullscreenPlayer](#dismissfullscreenplayer) * [presentFullscreenPlayer](#presentfullscreenplayer) * [save](#save) +* [restoreUserInterfaceForPictureInPictureStop](#restoreuserinterfaceforpictureinpicturestop) * [seek](#seek) ### Configurable props @@ -502,6 +506,13 @@ Controls whether the media is paused Platforms: all +#### pictureInPicture +Determine whether the media should played as picture in picture. +* **false (default)** - Don't not play as picture in picture +* **true** - Play the media as picture in picture + +Platforms: iOS + #### playInBackground Determine whether the media should continue playing while the app is in the background. This allows customers to continue listening to the audio. * **false (default)** - Don't continue playing the media @@ -942,6 +953,22 @@ Example: Platforms: all +#### onPictureInPictureStatusChanged +Callback function that is called when picture in picture becomes active or inactive. + +Property | Type | Description +--- | --- | --- +isActive | boolean | Boolean indicating whether picture in picture is active + +Example: +``` +{ +isActive: true +} +``` + +Platforms: iOS + #### onProgress Callback function that is called every progressUpdateInterval seconds with info about which position the media is currently playing. @@ -985,6 +1012,13 @@ Both the currentTime & seekTime are reported because the video player may not se Platforms: Android ExoPlayer, Android MediaPlayer, iOS, Windows UWP +#### onRestoreUserInterfaceForPictureInPictureStop +Callback function that corresponds to Apple's [`restoreUserInterfaceForPictureInPictureStopWithCompletionHandler`](https://developer.apple.com/documentation/avkit/avpictureinpicturecontrollerdelegate/1614703-pictureinpicturecontroller?language=objc). Call `restoreUserInterfaceForPictureInPictureStopCompleted` inside of this function when done restoring the user interface. + +Payload: none + +Platforms: iOS + #### onTimedMetadata Callback function that is called when timed metadata becomes available @@ -1073,6 +1107,18 @@ Future: Platforms: iOS +#### restoreUserInterfaceForPictureInPictureStopCompleted +`restoreUserInterfaceForPictureInPictureStopCompleted(restored)` + +This function corresponds to the completion handler in Apple's [restoreUserInterfaceForPictureInPictureStop](https://developer.apple.com/documentation/avkit/avpictureinpicturecontrollerdelegate/1614703-pictureinpicturecontroller?language=objc). IMPORTANT: This function must be called after `onRestoreUserInterfaceForPictureInPictureStop` is called. + +Example: +``` +this.player.restoreUserInterfaceForPictureInPictureStopCompleted(true); +``` + +Platforms: iOS + #### seek() `seek(seconds)` diff --git a/Video.js b/Video.js index 65fda39e06..945fda4375 100644 --- a/Video.js +++ b/Video.js @@ -78,6 +78,10 @@ export default class Video extends Component { return await NativeModules.VideoManager.save(options, findNodeHandle(this._root)); } + restoreUserInterfaceForPictureInPictureStopCompleted = (restored) => { + this.setNativeProps({ restoreUserInterfaceForPIPStopCompletionHandler: restored }); + }; + _assignRoot = (component) => { this._root = component; }; @@ -198,6 +202,18 @@ export default class Video extends Component { } }; + _onPictureInPictureStatusChanged = (event) => { + if (this.props.onPictureInPictureStatusChanged) { + this.props.onPictureInPictureStatusChanged(event.nativeEvent); + } + }; + + _onRestoreUserInterfaceForPictureInPictureStop = (event) => { + if (this.props.onRestoreUserInterfaceForPictureInPictureStop) { + this.props.onRestoreUserInterfaceForPictureInPictureStop(); + } + }; + _onAudioFocusChanged = (event) => { if (this.props.onAudioFocusChanged) { this.props.onAudioFocusChanged(event.nativeEvent); @@ -282,6 +298,8 @@ export default class Video extends Component { onPlaybackRateChange: this._onPlaybackRateChange, onAudioFocusChanged: this._onAudioFocusChanged, onAudioBecomingNoisy: this._onAudioBecomingNoisy, + onPictureInPictureStatusChanged: this._onPictureInPictureStatusChanged, + onRestoreUserInterfaceForPictureInPictureStop: this._onRestoreUserInterfaceForPictureInPictureStop, }); const posterStyle = { @@ -405,6 +423,7 @@ Video.propTypes = { }), stereoPan: PropTypes.number, rate: PropTypes.number, + pictureInPicture: PropTypes.bool, playInBackground: PropTypes.bool, playWhenInactive: PropTypes.bool, ignoreSilentSwitch: PropTypes.oneOf(['ignore', 'obey']), @@ -436,6 +455,8 @@ Video.propTypes = { onPlaybackRateChange: PropTypes.func, onAudioFocusChanged: PropTypes.func, onAudioBecomingNoisy: PropTypes.func, + onPictureInPictureStatusChanged: PropTypes.func, + needsToRestoreUserInterfaceForPictureInPictureStop: PropTypes.func, onExternalPlaybackChange: PropTypes.func, /* Required by react-native */ diff --git a/ios/Video/RCTVideo.h b/ios/Video/RCTVideo.h index 05527a57fe..ad98db7ffa 100644 --- a/ios/Video/RCTVideo.h +++ b/ios/Video/RCTVideo.h @@ -16,7 +16,7 @@ #if __has_include() @interface RCTVideo : UIView #else -@interface RCTVideo : UIView +@interface RCTVideo : UIView #endif @property (nonatomic, copy) RCTBubblingEventBlock onVideoLoadStart; @@ -38,6 +38,8 @@ @property (nonatomic, copy) RCTBubblingEventBlock onPlaybackResume; @property (nonatomic, copy) RCTBubblingEventBlock onPlaybackRateChange; @property (nonatomic, copy) RCTBubblingEventBlock onVideoExternalPlaybackChange; +@property (nonatomic, copy) RCTBubblingEventBlock onPictureInPictureStatusChanged; +@property (nonatomic, copy) RCTBubblingEventBlock onRestoreUserInterfaceForPictureInPictureStop; - (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher NS_DESIGNATED_INITIALIZER; diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m index c4a055d25d..3b6d4e96e9 100644 --- a/ios/Video/RCTVideo.m +++ b/ios/Video/RCTVideo.m @@ -27,6 +27,8 @@ @implementation RCTVideo AVPlayer *_player; AVPlayerItem *_playerItem; NSDictionary *_source; + AVPictureInPictureController *_pipController; + void (^__strong _Nonnull _restoreUserInterfaceForPIPStopCompletionHandler)(BOOL); BOOL _playerItemObserversSet; BOOL _playerBufferEmpty; AVPlayerLayer *_playerLayer; @@ -64,6 +66,7 @@ @implementation RCTVideo BOOL _playbackStalled; BOOL _playInBackground; BOOL _playWhenInactive; + BOOL _pictureInPicture; NSString * _ignoreSilentSwitch; NSString * _resizeMode; BOOL _fullscreen; @@ -100,7 +103,9 @@ - (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher _playInBackground = false; _allowsExternalPlayback = YES; _playWhenInactive = false; + _pictureInPicture = false; _ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey + _restoreUserInterfaceForPIPStopCompletionHandler = NULL; #if __has_include() _videoCache = [RCTVideoCache sharedInstance]; #endif @@ -786,6 +791,40 @@ - (void)setPlayWhenInactive:(BOOL)playWhenInactive _playWhenInactive = playWhenInactive; } +- (void)setPictureInPicture:(BOOL)pictureInPicture +{ + if (_pictureInPicture == pictureInPicture) { + return; + } + + _pictureInPicture = pictureInPicture; + if (_pipController && _pictureInPicture && ![_pipController isPictureInPictureActive]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [_pipController startPictureInPicture]; + }); + } else if (_pipController && !_pictureInPicture && [_pipController isPictureInPictureActive]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [_pipController stopPictureInPicture]; + }); + } +} + +- (void)setRestoreUserInterfaceForPIPStopCompletionHandler:(BOOL)restore +{ + if (_restoreUserInterfaceForPIPStopCompletionHandler != NULL) { + _restoreUserInterfaceForPIPStopCompletionHandler(restore); + _restoreUserInterfaceForPIPStopCompletionHandler = NULL; + } +} + +- (void)setupPipController { + if (!_pipController && _playerLayer && [AVPictureInPictureController isPictureInPictureSupported]) { + // Create new controller passing reference to the AVPlayerLayer + _pipController = [[AVPictureInPictureController alloc] initWithPlayerLayer:_playerLayer]; + _pipController.delegate = self; + } +} + - (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch { _ignoreSilentSwitch = ignoreSilentSwitch; @@ -1240,6 +1279,8 @@ - (void)usePlayerLayer [self.layer addSublayer:_playerLayer]; self.layer.needsDisplayOnBoundsChange = YES; + + [self setupPipController]; } } @@ -1496,4 +1537,42 @@ - (NSString *)cacheDirectoryPath { return array[0]; } +#pragma mark - Picture in Picture + +- (void)pictureInPictureControllerDidStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController { + if (self.onPictureInPictureStatusChanged) { + self.onPictureInPictureStatusChanged(@{ + @"isActive": [NSNumber numberWithBool:false] + }); + } +} + +- (void)pictureInPictureControllerDidStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController { + if (self.onPictureInPictureStatusChanged) { + self.onPictureInPictureStatusChanged(@{ + @"isActive": [NSNumber numberWithBool:true] + }); + } +} + +- (void)pictureInPictureControllerWillStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController { + +} + +- (void)pictureInPictureControllerWillStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController { + +} + +- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController failedToStartPictureInPictureWithError:(NSError *)error { + +} + +- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:(void (^)(BOOL))completionHandler { + NSAssert(_restoreUserInterfaceForPIPStopCompletionHandler == NULL, @"restoreUserInterfaceForPIPStopCompletionHandler was not called after picture in picture was exited."); + if (self.onRestoreUserInterfaceForPictureInPictureStop) { + self.onRestoreUserInterfaceForPictureInPictureStop(@{}); + } + _restoreUserInterfaceForPIPStopCompletionHandler = completionHandler; +} + @end diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index 1ca1b5b402..a608f32e6b 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -32,6 +32,7 @@ - (dispatch_queue_t)methodQueue RCT_EXPORT_VIEW_PROPERTY(volume, float); RCT_EXPORT_VIEW_PROPERTY(playInBackground, BOOL); RCT_EXPORT_VIEW_PROPERTY(playWhenInactive, BOOL); +RCT_EXPORT_VIEW_PROPERTY(pictureInPicture, BOOL); RCT_EXPORT_VIEW_PROPERTY(ignoreSilentSwitch, NSString); RCT_EXPORT_VIEW_PROPERTY(rate, float); RCT_EXPORT_VIEW_PROPERTY(seek, NSDictionary); @@ -42,6 +43,7 @@ - (dispatch_queue_t)methodQueue RCT_EXPORT_VIEW_PROPERTY(filter, NSString); RCT_EXPORT_VIEW_PROPERTY(filterEnabled, BOOL); RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float); +RCT_EXPORT_VIEW_PROPERTY(restoreUserInterfaceForPIPStopCompletionHandler, BOOL); /* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */ RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock); RCT_EXPORT_VIEW_PROPERTY(onVideoLoad, RCTBubblingEventBlock); @@ -77,6 +79,8 @@ - (dispatch_queue_t)methodQueue } }]; } +RCT_EXPORT_VIEW_PROPERTY(onPictureInPictureStatusChanged, RCTBubblingEventBlock); +RCT_EXPORT_VIEW_PROPERTY(onRestoreUserInterfaceForPictureInPictureStop, RCTBubblingEventBlock); - (NSDictionary *)constantsToExport {