Skip to content

Commit

Permalink
Merge pull request #13 from tlenclos/brettpappas-patch-1
Browse files Browse the repository at this point in the history
Update ReactNativeAudioStreaming.m
  • Loading branch information
brettpappas committed Sep 1, 2016
2 parents fa4ad8d + 3cb1243 commit 0c28dec
Showing 1 changed file with 109 additions and 22 deletions.
131 changes: 109 additions & 22 deletions ios/ReactNativeAudioStreaming.m
Original file line number Diff line number Diff line change
Expand Up @@ -22,40 +22,44 @@ - (ReactNativeAudioStreaming *)init
{
self = [super init];
if (self) {
[self setSharedAudioSessionCategory];
self.audioPlayer = [[STKAudioPlayer alloc] initWithOptions:(STKAudioPlayerOptions){ .flushQueueOnSeek = YES }];
[self.audioPlayer setDelegate:self];
[self setSharedAudioSessionCategory];
[self registerAudioInterruptionNotifications];
[self registerRemoteControlEvents];
[self setNowPlayingInfo:true];
self.lastUrlString = @"";

[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:@selector(tick:) userInfo:nil repeats:YES];

NSLog(@"AudioPlayer initialized");
}

return self;
}


-(void) tick:(NSTimer*)timer
{
if (!self.audioPlayer) {
return;
}

if (self.audioPlayer.currentlyPlayingQueueItemId != nil) {
if (self.audioPlayer.currentlyPlayingQueueItemId != nil && self.audioPlayer.state == STKAudioPlayerStatePlaying) {
NSNumber *progress = [NSNumber numberWithFloat:self.audioPlayer.progress];
NSNumber *duration = [NSNumber numberWithFloat:self.audioPlayer.duration];
NSString *url = [NSString stringWithString:self.audioPlayer.currentlyPlayingQueueItemId];

[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent" body:@{
@"status": @"STREAMING",
@"progress": progress,
@"duration": duration
}];
@"status": @"STREAMING",
@"progress": progress,
@"duration": duration,
@"url": url,
}];
}
}


- (void)dealloc
{
[self unregisterAudioInterruptionNotifications];
Expand All @@ -71,6 +75,9 @@ - (void)dealloc
if (!self.audioPlayer) {
return;
}

[self activate];

if (self.audioPlayer.state == STKAudioPlayerStatePaused && [self.lastUrlString isEqualToString:streamUrl]) {
[self.audioPlayer resume];
} else {
Expand All @@ -81,13 +88,56 @@ - (void)dealloc
[self setNowPlayingInfo:true];
}

RCT_EXPORT_METHOD(seekToTime:(double) seconds)
{
if (!self.audioPlayer) {
return;
}

[self.audioPlayer seekToTime:seconds];
}

RCT_EXPORT_METHOD(goForward:(double) seconds)
{
if (!self.audioPlayer) {
return;
}

double newtime = self.audioPlayer.progress + seconds;

if (self.audioPlayer.duration < newtime) {
[self.audioPlayer stop];
[self setNowPlayingInfo:false];
}
else {
[self.audioPlayer seekToTime:newtime];
}
}

RCT_EXPORT_METHOD(goBack:(double) seconds)
{
if (!self.audioPlayer) {
return;
}

double newtime = self.audioPlayer.progress - seconds;

if (newtime < 0) {
[self.audioPlayer seekToTime:0.0];
}
else {
[self.audioPlayer seekToTime:newtime];
}
}

RCT_EXPORT_METHOD(pause)
{
if (!self.audioPlayer) {
return;
} else {
[self.audioPlayer pause];
[self setNowPlayingInfo:false];
[self deactivate];
}
}

Expand All @@ -96,6 +146,7 @@ - (void)dealloc
if (!self.audioPlayer) {
return;
} else {
[self activate];
[self.audioPlayer resume];
[self setNowPlayingInfo:true];
}
Expand All @@ -108,23 +159,32 @@ - (void)dealloc
} else {
[self.audioPlayer stop];
[self setNowPlayingInfo:false];
[self deactivate];
}
}

RCT_EXPORT_METHOD(getStatus: (RCTResponseSenderBlock) callback)
{
NSString *status = @"STOPPED";
NSNumber *duration = [NSNumber numberWithFloat:self.audioPlayer.duration];
NSNumber *progress = [NSNumber numberWithFloat:self.audioPlayer.progress];

if (!self.audioPlayer) {
callback(@[[NSNull null], @{@"status": @"ERROR"}]);
} else if ([self.audioPlayer state] == STKAudioPlayerStatePlaying) {
callback(@[[NSNull null], @{@"status": @"PLAYING"}]);
} else if ([self.audioPlayer state] == STKAudioPlayerStateBuffering) {
callback(@[[NSNull null], @{@"status": @"BUFFERING"}]);
} else {
callback(@[[NSNull null], @{@"status": @"STOPPED"}]);
status = @"ERROR";
}
else if ([self.audioPlayer state] == STKAudioPlayerStatePlaying) {
status = @"PLAYING";
}
else if ([self.audioPlayer state] == STKAudioPlayerStatePaused) {
status = @"PAUSED";
}
else if ([self.audioPlayer state] == STKAudioPlayerStateBuffering) {
status = @"BUFFERING";
}

callback(@[[NSNull null], @{@"status": status, @"progress": progress, @"duration": duration, @"url": self.lastUrlString}]);
}


#pragma mark - StreamingKit Audio Player


Expand Down Expand Up @@ -161,20 +221,23 @@ - (void)audioPlayer:(STKAudioPlayer *)audioPlayer didReadStreamMetadata:(NSDicti

- (void)audioPlayer:(STKAudioPlayer *)player stateChanged:(STKAudioPlayerState)state previousState:(STKAudioPlayerState)previousState
{
NSNumber *duration = [NSNumber numberWithFloat:self.audioPlayer.duration];
NSNumber *progress = [NSNumber numberWithFloat:self.audioPlayer.progress];

switch (state) {
case STKAudioPlayerStatePlaying:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"PLAYING"}];
body:@{@"status": @"PLAYING", @"progress": progress, @"duration": duration, @"url": self.lastUrlString}];
break;

case STKAudioPlayerStatePaused:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"PAUSED"}];
body:@{@"status": @"PAUSED", @"progress": progress, @"duration": duration, @"url": self.lastUrlString}];
break;

case STKAudioPlayerStateStopped:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"STOPPED"}];
body:@{@"status": @"STOPPED", @"progress": progress, @"duration": duration, @"url": self.lastUrlString}];
break;

case STKAudioPlayerStateBuffering:
Expand All @@ -195,19 +258,43 @@ - (void)audioPlayer:(STKAudioPlayer *)player stateChanged:(STKAudioPlayerState)s

#pragma mark - Audio Session


- (void)setSharedAudioSessionCategory
- (void)activate
{
NSError *categoryError = nil;

// Create shared session and set audio session category allowing background playback
[[AVAudioSession sharedInstance] setActive:YES error:&categoryError];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&categoryError];

if (categoryError) {
NSLog(@"Error setting category! %@", [categoryError description]);
}
}

- (void)deactivate
{
NSError *categoryError = nil;

[[AVAudioSession sharedInstance] setActive:NO error:&categoryError];

if (categoryError) {
NSLog(@"Error setting category! %@", [categoryError description]);
}
}

- (void)setSharedAudioSessionCategory
{
NSError *categoryError = nil;
self.isPlayingWithOthers = [[AVAudioSession sharedInstance] isOtherAudioPlaying];

[[AVAudioSession sharedInstance] setActive:NO error:&categoryError];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:&categoryError];
// [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&categoryError];

if (categoryError) {
NSLog(@"Error setting category! %@", [categoryError description]);
}
}

- (void)registerAudioInterruptionNotifications
{
// Register for audio interrupt notifications
Expand Down

0 comments on commit 0c28dec

Please sign in to comment.