Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/camera/camera/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.10.1

* Implements option to also stream when recording a video.

## 0.10.0+4

* Removes usage of `_ambiguate` method in example.
Expand Down
32 changes: 19 additions & 13 deletions packages/camera/camera/lib/src/camera_controller.dart
Original file line number Diff line number Diff line change
Expand Up @@ -459,12 +459,6 @@ class CameraController extends ValueNotifier<CameraValue> {
assert(defaultTargetPlatform == TargetPlatform.android ||
defaultTargetPlatform == TargetPlatform.iOS);
_throwIfNotInitialized('stopImageStream');
if (value.isRecordingVideo) {
throw CameraException(
'A video recording is already started.',
'stopImageStream was called while a video is being recorded.',
);
}
if (!value.isStreamingImages) {
throw CameraException(
'No camera is streaming images',
Expand All @@ -483,28 +477,35 @@ class CameraController extends ValueNotifier<CameraValue> {

/// Start a video recording.
///
/// You may optionally pass an [onAvailable] callback to also have the
/// video frames streamed to this callback.
///
/// The video is returned as a [XFile] after calling [stopVideoRecording].
/// Throws a [CameraException] if the capture fails.
Future<void> startVideoRecording() async {
Future<void> startVideoRecording(
{onLatestImageAvailable? onAvailable}) async {
_throwIfNotInitialized('startVideoRecording');
if (value.isRecordingVideo) {
throw CameraException(
'A video recording is already started.',
'startVideoRecording was called when a recording is already started.',
);
}
if (value.isStreamingImages) {
throw CameraException(
'A camera has started streaming images.',
'startVideoRecording was called while a camera was streaming images.',
);

Function(CameraImageData image)? streamCallback;
if (onAvailable != null) {
streamCallback = (CameraImageData imageData) {
onAvailable(CameraImage.fromPlatformInterface(imageData));
};
}

try {
await CameraPlatform.instance.startVideoRecording(_cameraId);
await CameraPlatform.instance.startVideoCapturing(
VideoCaptureOptions(_cameraId, streamCallback: streamCallback));
value = value.copyWith(
isRecordingVideo: true,
isRecordingPaused: false,
isStreamingImages: onAvailable != null,
recordingOrientation: Optional<DeviceOrientation>.of(
value.lockedCaptureOrientation ?? value.deviceOrientation));
} on PlatformException catch (e) {
Expand All @@ -523,6 +524,11 @@ class CameraController extends ValueNotifier<CameraValue> {
'stopVideoRecording was called when no video is recording.',
);
}

if (value.isStreamingImages) {
stopImageStream();
}

try {
final XFile file =
await CameraPlatform.instance.stopVideoRecording(_cameraId);
Expand Down
10 changes: 5 additions & 5 deletions packages/camera/camera/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing
Dart.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.10.0+4
version: 0.10.1

environment:
sdk: ">=2.14.0 <3.0.0"
Expand All @@ -21,10 +21,10 @@ flutter:
default_package: camera_web

dependencies:
camera_android: ^0.10.0
camera_avfoundation: ^0.9.7+1
camera_platform_interface: ^2.2.0
camera_web: ^0.3.0
camera_android: ^0.10.1
camera_avfoundation: ^0.10.0
camera_platform_interface: ^2.3.1
camera_web: ^0.3.1
flutter:
sdk: flutter
flutter_plugin_android_lifecycle: ^2.0.2
Expand Down
63 changes: 44 additions & 19 deletions packages/camera/camera/test/camera_image_stream_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ void main() {
);
});

test('stopImageStream() throws $CameraException when recording videos',
test('stopImageStream() throws $CameraException when not streaming images',
() async {
final CameraController cameraController = CameraController(
const CameraDescription(
Expand All @@ -140,50 +140,61 @@ void main() {
ResolutionPreset.max);
await cameraController.initialize();

await cameraController.startImageStream((CameraImage image) => null);
cameraController.value =
cameraController.value.copyWith(isRecordingVideo: true);
expect(
cameraController.stopImageStream,
throwsA(isA<CameraException>().having(
(CameraException error) => error.description,
'A video recording is already started.',
'stopImageStream was called while a video is being recorded.',
'No camera is streaming images',
'stopImageStream was called when no camera is streaming images.',
)));
});

test('stopImageStream() throws $CameraException when not streaming images',
() async {
test('stopImageStream() intended behaviour', () async {
final CameraController cameraController = CameraController(
const CameraDescription(
name: 'cam',
lensDirection: CameraLensDirection.back,
sensorOrientation: 90),
ResolutionPreset.max);
await cameraController.initialize();
await cameraController.startImageStream((CameraImage image) => null);
await cameraController.stopImageStream();

expect(
cameraController.stopImageStream,
throwsA(isA<CameraException>().having(
(CameraException error) => error.description,
'No camera is streaming images',
'stopImageStream was called when no camera is streaming images.',
)));
expect(mockPlatform.streamCallLog,
<String>['onStreamedFrameAvailable', 'listen', 'cancel']);
});

test('stopImageStream() intended behaviour', () async {
test('startVideoRecording() can stream images', () async {
final CameraController cameraController = CameraController(
const CameraDescription(
name: 'cam',
lensDirection: CameraLensDirection.back,
sensorOrientation: 90),
ResolutionPreset.max);

await cameraController.initialize();

cameraController.startVideoRecording(
onAvailable: (CameraImage image) => null);

expect(mockPlatform.streamCallLog,
<String>['startVideoRecording with stream']);
});

test('startVideoRecording() by default does not stream', () async {
final CameraController cameraController = CameraController(
const CameraDescription(
name: 'cam',
lensDirection: CameraLensDirection.back,
sensorOrientation: 90),
ResolutionPreset.max);

await cameraController.initialize();
await cameraController.startImageStream((CameraImage image) => null);
await cameraController.stopImageStream();

cameraController.startVideoRecording();

expect(mockPlatform.streamCallLog,
<String>['onStreamedFrameAvailable', 'listen', 'cancel']);
<String>['startVideoRecording without stream']);
});
}

Expand All @@ -203,6 +214,20 @@ class MockStreamingCameraPlatform extends MockCameraPlatform {
return _streamController!.stream;
}

@override
Future<XFile> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) {
streamCallLog.add('startVideoRecording');
return super
.startVideoRecording(cameraId, maxVideoDuration: maxVideoDuration);
}

@override
Future<void> startVideoCapturing(VideoCaptureOptions options) {
streamCallLog.add('startVideoCapturing');
return super.startVideoCapturing(options);
}

void _onFrameStreamListen() {
streamCallLog.add('listen');
}
Expand Down
3 changes: 2 additions & 1 deletion packages/camera/camera/test/camera_preview_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@ class FakeController extends ValueNotifier<CameraValue>
Future<void> startImageStream(onLatestImageAvailable onAvailable) async {}

@override
Future<void> startVideoRecording() async {}
Future<void> startVideoRecording(
{onLatestImageAvailable? onAvailable}) async {}

@override
Future<void> stopImageStream() async {}
Expand Down
30 changes: 6 additions & 24 deletions packages/camera/camera/test/camera_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -336,30 +336,6 @@ void main() {
)));
});

test(
'startVideoRecording() throws $CameraException when already streaming images',
() async {
final CameraController cameraController = CameraController(
const CameraDescription(
name: 'cam',
lensDirection: CameraLensDirection.back,
sensorOrientation: 90),
ResolutionPreset.max);

await cameraController.initialize();

cameraController.value =
cameraController.value.copyWith(isStreamingImages: true);

expect(
cameraController.startVideoRecording(),
throwsA(isA<CameraException>().having(
(CameraException error) => error.description,
'A camera has started streaming images.',
'startVideoRecording was called while a camera was streaming images.',
)));
});

test('getMaxZoomLevel() throws $CameraException when uninitialized',
() async {
final CameraController cameraController = CameraController(
Expand Down Expand Up @@ -1459,6 +1435,12 @@ class MockCameraPlatform extends Mock
{Duration? maxVideoDuration}) =>
Future<XFile>.value(mockVideoRecordingXFile);

@override
Future<void> startVideoCapturing(VideoCaptureOptions options) {
return startVideoRecording(options.cameraId,
maxVideoDuration: options.maxDuration);
}

@override
Future<void> lockCaptureOrientation(
int? cameraId, DeviceOrientation? orientation) async =>
Expand Down
4 changes: 4 additions & 0 deletions packages/camera/camera_android/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.10.1

* Implements option to also stream when recording a video.

## 0.10.0+4

* Upgrades `androidx.annotation` version to 1.5.0.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -522,6 +522,21 @@ private void refreshPreviewCaptureSession(
}
}

private void startCapture(boolean record, boolean stream) throws CameraAccessException {
List<Surface> surfaces = new ArrayList<>();
Runnable successCallback = null;
if (record) {
surfaces.add(mediaRecorder.getSurface());
successCallback = () -> mediaRecorder.start();
}
if (stream) {
surfaces.add(imageStreamReader.getSurface());
}

createCaptureSession(
CameraDevice.TEMPLATE_RECORD, successCallback, surfaces.toArray(new Surface[0]));
}

public void takePicture(@NonNull final Result result) {
// Only take one picture at a time.
if (cameraCaptureCallback.getCameraState() != CameraState.STATE_PREVIEW) {
Expand Down Expand Up @@ -731,29 +746,17 @@ private void unlockAutoFocus() {
dartMessenger.error(flutterResult, errorCode, errorMessage, null));
}

public void startVideoRecording(@NonNull Result result) {
final File outputDir = applicationContext.getCacheDir();
try {
captureFile = File.createTempFile("REC", ".mp4", outputDir);
} catch (IOException | SecurityException e) {
result.error("cannotCreateFile", e.getMessage(), null);
return;
}
try {
prepareMediaRecorder(captureFile.getAbsolutePath());
} catch (IOException e) {
recordingVideo = false;
captureFile = null;
result.error("videoRecordingFailed", e.getMessage(), null);
return;
public void startVideoRecording(
@NonNull Result result, @Nullable EventChannel imageStreamChannel) {
prepareRecording(result);

if (imageStreamChannel != null) {
setStreamHandler(imageStreamChannel);
}
// Re-create autofocus feature so it's using video focus mode now.
cameraFeatures.setAutoFocus(
cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));

recordingVideo = true;
try {
createCaptureSession(
CameraDevice.TEMPLATE_RECORD, () -> mediaRecorder.start(), mediaRecorder.getSurface());
startCapture(true, imageStreamChannel != null);
result.success(null);
} catch (CameraAccessException e) {
recordingVideo = false;
Expand Down Expand Up @@ -1073,21 +1076,10 @@ public void startPreview() throws CameraAccessException {

public void startPreviewWithImageStream(EventChannel imageStreamChannel)
throws CameraAccessException {
createCaptureSession(CameraDevice.TEMPLATE_RECORD, imageStreamReader.getSurface());
Log.i(TAG, "startPreviewWithImageStream");

imageStreamChannel.setStreamHandler(
new EventChannel.StreamHandler() {
@Override
public void onListen(Object o, EventChannel.EventSink imageStreamSink) {
setImageStreamImageAvailableListener(imageStreamSink);
}
setStreamHandler(imageStreamChannel);

@Override
public void onCancel(Object o) {
imageStreamReader.setOnImageAvailableListener(null, backgroundHandler);
}
});
startCapture(false, true);
Log.i(TAG, "startPreviewWithImageStream");
}

/**
Expand Down Expand Up @@ -1117,6 +1109,42 @@ public void onError(String errorCode, String errorMessage) {
cameraCaptureCallback.setCameraState(CameraState.STATE_PREVIEW);
}

private void prepareRecording(@NonNull Result result) {
final File outputDir = applicationContext.getCacheDir();
try {
captureFile = File.createTempFile("REC", ".mp4", outputDir);
} catch (IOException | SecurityException e) {
result.error("cannotCreateFile", e.getMessage(), null);
return;
}
try {
prepareMediaRecorder(captureFile.getAbsolutePath());
} catch (IOException e) {
recordingVideo = false;
captureFile = null;
result.error("videoRecordingFailed", e.getMessage(), null);
return;
}
// Re-create autofocus feature so it's using video focus mode now.
cameraFeatures.setAutoFocus(
cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));
}

private void setStreamHandler(EventChannel imageStreamChannel) {
imageStreamChannel.setStreamHandler(
new EventChannel.StreamHandler() {
@Override
public void onListen(Object o, EventChannel.EventSink imageStreamSink) {
setImageStreamImageAvailableListener(imageStreamSink);
}

@Override
public void onCancel(Object o) {
imageStreamReader.setOnImageAvailableListener(null, backgroundHandler);
}
});
}

private void setImageStreamImageAvailableListener(final EventChannel.EventSink imageStreamSink) {
imageStreamReader.setOnImageAvailableListener(
reader -> {
Expand Down
Loading