Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
2971c85
Pause/resume video recording for Android
huulbaek Mar 21, 2019
3e87cac
Specify type
bparrishMines Aug 1, 2019
d99f9d0
Merge branch 'master' of github.com:flutter/plugins into huulbaek/pau…
bparrishMines Aug 1, 2019
474604d
Merge branch 'master' of github.com:flutter/plugins into huulbaek/pau…
bparrishMines Aug 5, 2019
423adcd
Add pausing and resuming to example app
bparrishMines Aug 6, 2019
0214093
iOS side of pausing/resuming
bparrishMines Aug 6, 2019
7216d4a
Merge branch 'master' of github.com:flutter/plugins into huulbaek/pau…
bparrishMines Aug 7, 2019
113cd55
More documentation
bparrishMines Aug 7, 2019
5664547
Version bump
bparrishMines Aug 7, 2019
9210c26
Merge branch 'master' of github.com:flutter/plugins into huulbaek/pau…
bparrishMines Aug 12, 2019
3f47c60
Add video pausing and resuming
bparrishMines Aug 14, 2019
5baee78
get pausing and recording to work for no audio
bparrishMines Aug 15, 2019
da26df1
It works
bparrishMines Aug 16, 2019
b56d1fb
Merge branch 'pauseresume' of github.com:huulbaek/plugins into huulba…
bparrishMines Aug 16, 2019
542ee3d
Formatting
bparrishMines Aug 16, 2019
5023773
Add test for pausing and resuming
bparrishMines Aug 19, 2019
6dbbf8d
Merge branch 'master' into pauseresume
bparrishMines Aug 19, 2019
ca47f38
Call success outside try catch block
bparrishMines Aug 19, 2019
c758b26
formatting
bparrishMines Aug 19, 2019
70e1b13
Merge branch 'pauseresume' of github.com:huulbaek/plugins into huulba…
bparrishMines Aug 19, 2019
8ff306f
Disable audio in test and call result on iOS
bparrishMines Aug 19, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/camera/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.5.4

* Add feature to pause and resume video recording.

## 0.5.3+1

* Fix too large request code for FragmentActivity users.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,38 @@ public void stopVideoRecording(@NonNull final Result result) {
}
}

public void pauseVideoRecording(@NonNull final Result result) {
if (!recordingVideo) {
result.success(null);
return;
}

try {
mediaRecorder.pause();
} catch (IllegalStateException e) {
result.error("videoRecordingFailed", e.getMessage(), null);
return;
}

result.success(null);
}

public void resumeVideoRecording(@NonNull final Result result) {
if (!recordingVideo) {
result.success(null);
return;
}

try {
mediaRecorder.resume();
} catch (IllegalStateException e) {
result.error("videoRecordingFailed", e.getMessage(), null);
return;
}

result.success(null);
}

public void startPreview() throws CameraAccessException {
createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,16 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result)
camera.stopVideoRecording(result);
break;
}
case "pauseVideoRecording":
{
camera.pauseVideoRecording(result);
break;
}
case "resumeVideoRecording":
{
camera.resumeVideoRecording(result);
break;
}
case "startImageStream":
{
try {
Expand Down
53 changes: 53 additions & 0 deletions packages/camera/example/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,19 @@ class _CameraExampleHomeState extends State<CameraExampleHome>
? onVideoRecordButtonPressed
: null,
),
IconButton(
icon: controller != null && controller.value.isRecordingPaused
? Icon(Icons.play_arrow)
: Icon(Icons.pause),
color: Colors.blue,
onPressed: controller != null &&
controller.value.isInitialized &&
controller.value.isRecordingVideo
? (controller != null && controller.value.isRecordingPaused
? onResumeButtonPressed
: onPauseButtonPressed)
: null,
),
IconButton(
icon: const Icon(Icons.stop),
color: Colors.red,
Expand Down Expand Up @@ -316,6 +329,20 @@ class _CameraExampleHomeState extends State<CameraExampleHome>
});
}

void onPauseButtonPressed() {
pauseVideoRecording().then((_) {
if (mounted) setState(() {});
showInSnackBar('Video recording paused');
});
}

void onResumeButtonPressed() {
resumeVideoRecording().then((_) {
if (mounted) setState(() {});
showInSnackBar('Video recording resumed');
});
}

Future<String> startVideoRecording() async {
if (!controller.value.isInitialized) {
showInSnackBar('Error: select a camera first.');
Expand Down Expand Up @@ -357,6 +384,32 @@ class _CameraExampleHomeState extends State<CameraExampleHome>
await _startVideoPlayer();
}

Future<void> pauseVideoRecording() async {
if (!controller.value.isRecordingVideo) {
return null;
}

try {
await controller.pauseVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
}

Future<void> resumeVideoRecording() async {
if (!controller.value.isRecordingVideo) {
return null;
}

try {
await controller.resumeVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
}

Future<void> _startVideoPlayer() async {
final VideoPlayerController vcontroller =
VideoPlayerController.file(File(videoPath));
Expand Down
56 changes: 56 additions & 0 deletions packages/camera/example/test_driver/camera.dart
Original file line number Diff line number Diff line change
Expand Up @@ -143,4 +143,60 @@ void main() {
}
}
});

test('Pause and resume video recording', () async {
final List<CameraDescription> cameras = await availableCameras();
if (cameras.isEmpty) {
return;
}

final CameraController controller = CameraController(
cameras[0],
ResolutionPreset.low,
enableAudio: false,
);

await controller.initialize();
await controller.prepareForVideoRecording();

final String filePath =
'${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.mp4';

int startPause;
int timePaused = 0;

await controller.startVideoRecording(filePath);
final int recordingStart = DateTime.now().millisecondsSinceEpoch;
sleep(const Duration(milliseconds: 500));

await controller.pauseVideoRecording();
startPause = DateTime.now().millisecondsSinceEpoch;
sleep(const Duration(milliseconds: 500));
await controller.resumeVideoRecording();
timePaused += DateTime.now().millisecondsSinceEpoch - startPause;

sleep(const Duration(milliseconds: 500));

await controller.pauseVideoRecording();
startPause = DateTime.now().millisecondsSinceEpoch;
sleep(const Duration(milliseconds: 500));
await controller.resumeVideoRecording();
timePaused += DateTime.now().millisecondsSinceEpoch - startPause;

sleep(const Duration(milliseconds: 500));

await controller.stopVideoRecording();
final int recordingTime =
DateTime.now().millisecondsSinceEpoch - recordingStart;

final File videoFile = File(filePath);
final VideoPlayerController videoController = VideoPlayerController.file(
videoFile,
);
await videoController.initialize();
final int duration = videoController.value.duration.inMilliseconds;
await videoController.dispose();

expect(duration, lessThan(recordingTime - timePaused));
});
}
110 changes: 105 additions & 5 deletions packages/camera/ios/Classes/CameraPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -180,10 +180,18 @@ @interface FLTCam : NSObject <FlutterTexture,
@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
@property(assign, nonatomic) BOOL isRecording;
@property(assign, nonatomic) BOOL isRecordingPaused;
@property(assign, nonatomic) BOOL videoIsDisconnected;
@property(assign, nonatomic) BOOL audioIsDisconnected;
@property(assign, nonatomic) BOOL isAudioSetup;
@property(assign, nonatomic) BOOL isStreamingImages;
@property(assign, nonatomic) ResolutionPreset resolutionPreset;
@property(assign, nonatomic) CMTime lastVideoSampleTime;
@property(assign, nonatomic) CMTime lastAudioSampleTime;
@property(assign, nonatomic) CMTime videoTimeOffset;
@property(assign, nonatomic) CMTime audioTimeOffset;
@property(nonatomic) CMMotionManager *motionManager;
@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
Expand Down Expand Up @@ -417,25 +425,89 @@ - (void)captureOutput:(AVCaptureOutput *)output
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
if (_isRecording) {
if (_isRecording && !_isRecordingPaused) {
if (_videoWriter.status == AVAssetWriterStatusFailed) {
_eventSink(@{
@"event" : @"error",
@"errorDescription" : [NSString stringWithFormat:@"%@", _videoWriter.error]
});
return;
}
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

CFRetain(sampleBuffer);
CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

if (_videoWriter.status != AVAssetWriterStatusWriting) {
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:lastSampleTime];
[_videoWriter startSessionAtSourceTime:currentSampleTime];
}

if (output == _captureVideoOutput) {
[self newVideoSample:sampleBuffer];
} else if (output == _audioOutput) {
if (_videoIsDisconnected) {
_videoIsDisconnected = NO;

if (_videoTimeOffset.value == 0) {
_videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
} else {
CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
_videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
}

return;
}

_lastVideoSampleTime = currentSampleTime;

CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset);
[_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime];
} else {
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);

if (dur.value > 0) {
currentSampleTime = CMTimeAdd(currentSampleTime, dur);
}

if (_audioIsDisconnected) {
_audioIsDisconnected = NO;

if (_audioTimeOffset.value == 0) {
_audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
} else {
CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
_audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
}

return;
}

_lastAudioSampleTime = currentSampleTime;

if (_audioTimeOffset.value != 0) {
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_audioTimeOffset];
}

[self newAudioSample:sampleBuffer];
}

CFRelease(sampleBuffer);
}
}

- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset {
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++) {
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}

- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer {
Expand Down Expand Up @@ -526,6 +598,11 @@ - (void)startVideoRecordingAtPath:(NSString *)path result:(FlutterResult)result
return;
}
_isRecording = YES;
_isRecordingPaused = NO;
_videoTimeOffset = CMTimeMake(0, 1);
_audioTimeOffset = CMTimeMake(0, 1);
_videoIsDisconnected = NO;
_audioIsDisconnected = NO;
result(nil);
} else {
_eventSink(@{@"event" : @"error", @"errorDescription" : @"Video is already recording!"});
Expand Down Expand Up @@ -556,6 +633,16 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result {
}
}

- (void)pauseVideoRecording {
_isRecordingPaused = YES;
_videoIsDisconnected = YES;
_audioIsDisconnected = YES;
}

- (void)resumeVideoRecording {
_isRecordingPaused = NO;
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
if (!_isStreamingImages) {
FlutterEventChannel *eventChannel =
Expand Down Expand Up @@ -608,6 +695,13 @@ - (BOOL)setupWriterForPath:(NSString *)path {
nil];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];

_videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
sourcePixelBufferAttributes:@{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)
}];

NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;

Expand Down Expand Up @@ -777,6 +871,12 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
} else if ([@"stopImageStream" isEqualToString:call.method]) {
[_camera stopImageStream];
result(nil);
} else if ([@"pauseVideoRecording" isEqualToString:call.method]) {
[_camera pauseVideoRecording];
result(nil);
} else if ([@"resumeVideoRecording" isEqualToString:call.method]) {
[_camera resumeVideoRecording];
result(nil);
} else {
NSDictionary *argsMap = call.arguments;
NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue;
Expand Down
Loading