Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
## 0.9.20+4

* Migrates `setVideoFormat`,`stopVideoRecording`, and `stopImageStream` methods to Swift.
* Migrates stopping accelerometer updates to Swift.
* Migrates `setDescriptionWhileRecording` method to Swift.
* Adds `createConnection` method implementation to Swift.

## 0.9.20+3

* Migrates `setZoomLevel` and `setFlashMode` methods to Swift.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@ import CoreMotion
#endif

final class DefaultCamera: FLTCam, Camera {
override var videoFormat: FourCharCode {
didSet {
captureVideoOutput.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: videoFormat)
]
}
}

override var deviceOrientation: UIDeviceOrientation {
get { super.deviceOrientation }
set {
Expand Down Expand Up @@ -52,6 +60,34 @@ final class DefaultCamera: FLTCam, Camera {
details: error.domain)
}

private static func createConnection(
captureDevice: FLTCaptureDevice,
videoFormat: FourCharCode,
captureDeviceInputFactory: FLTCaptureDeviceInputFactory
) throws -> (FLTCaptureInput, FLTCaptureVideoDataOutput, AVCaptureConnection) {
// Setup video capture input.
let captureVideoInput = try captureDeviceInputFactory.deviceInput(with: captureDevice)

// Setup video capture output.
let captureVideoOutput = FLTDefaultCaptureVideoDataOutput(
captureVideoOutput: AVCaptureVideoDataOutput())
captureVideoOutput.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: videoFormat as Any
]
captureVideoOutput.alwaysDiscardsLateVideoFrames = true

// Setup video capture connection.
let connection = AVCaptureConnection(
inputPorts: captureVideoInput.ports,
output: captureVideoOutput.avOutput)

if captureDevice.position == .front {
connection.isVideoMirrored = true
}

return (captureVideoInput, captureVideoOutput, connection)
}

func reportInitializationState() {
// Get all the state on the current thread, not the main thread.
let state = FCPPlatformCameraState.make(
Expand Down Expand Up @@ -96,6 +132,37 @@ final class DefaultCamera: FLTCam, Camera {
isRecordingPaused = false
}

func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) {
if isRecording {
isRecording = false

// When `isRecording` is true `startWriting` was already called so `videoWriter.status`
// is always either `.writing` or `.failed` and `finishWriting` does not throw exceptions so
// there is no need to check `videoWriter.status`
videoWriter?.finishWriting {
if self.videoWriter?.status == .completed {
self.updateOrientation()
completion(self.videoRecordingPath, nil)
self.videoRecordingPath = nil
} else {
completion(
nil,
FlutterError(
code: "IOError",
message: "AVAssetWriter could not finish writing!",
details: nil))
}
}
} else {
let error = NSError(
domain: NSCocoaErrorDomain,
code: URLError.resourceUnavailable.rawValue,
userInfo: [NSLocalizedDescriptionKey: "Video is not recording!"]
)
completion(nil, DefaultCamera.flutterErrorFromNSError(error))
}
}

func lockCaptureOrientation(_ pigeonOrientation: FCPPlatformDeviceOrientation) {
let orientation = FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation)
if lockedCaptureOrientation != orientation {
Expand Down Expand Up @@ -341,6 +408,94 @@ final class DefaultCamera: FLTCam, Camera {
isPreviewPaused = false
}

func setDescriptionWhileRecording(
_ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void
) {
guard isRecording else {
completion(
FlutterError(
code: "setDescriptionWhileRecordingFailed",
message: "Device was not recording",
details: nil))
return
}

captureDevice = captureDeviceFactory(cameraName)

let oldConnection = captureVideoOutput.connection(withMediaType: .video)

// Stop video capture from the old output.
captureVideoOutput.setSampleBufferDelegate(nil, queue: nil)

// Remove the old video capture connections.
videoCaptureSession.beginConfiguration()
videoCaptureSession.removeInput(captureVideoInput)
videoCaptureSession.removeOutput(captureVideoOutput.avOutput)

let newConnection: AVCaptureConnection

do {
(captureVideoInput, captureVideoOutput, newConnection) = try DefaultCamera.createConnection(
captureDevice: captureDevice,
videoFormat: videoFormat,
captureDeviceInputFactory: captureDeviceInputFactory)

captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)
} catch {
completion(
FlutterError(
code: "VideoError",
message: "Unable to create video connection",
details: nil))
return
}

// Keep the same orientation the old connections had.
if let oldConnection = oldConnection, newConnection.isVideoOrientationSupported {
newConnection.videoOrientation = oldConnection.videoOrientation
}

// Add the new connections to the session.
if !videoCaptureSession.canAddInput(captureVideoInput) {
completion(
FlutterError(
code: "VideoError",
message: "Unable to switch video input",
details: nil))
}
videoCaptureSession.addInputWithNoConnections(captureVideoInput)

if !videoCaptureSession.canAddOutput(captureVideoOutput.avOutput) {
completion(
FlutterError(
code: "VideoError",
message: "Unable to switch video output",
details: nil))
}
videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput)

if !videoCaptureSession.canAddConnection(newConnection) {
completion(
FlutterError(
code: "VideoError",
message: "Unable to switch video connection",
details: nil))
}
videoCaptureSession.addConnection(newConnection)
videoCaptureSession.commitConfiguration()

completion(nil)
}

func stopImageStream() {
if isStreamingImages {
isStreamingImages = false
imageStreamHandler = nil
} else {
reportErrorMessage("Images from camera are not streaming!")
}
}

func captureOutput(
_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
Expand Down Expand Up @@ -591,4 +746,8 @@ final class DefaultCamera: FLTCam, Camera {
}
}
}

deinit {
motionManager.stopAccelerometerUpdates()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
#import "./include/camera_avfoundation/FLTCam.h"
#import "./include/camera_avfoundation/FLTCam_Test.h"

@import CoreMotion;
@import Flutter;
#import <libkern/OSAtomic.h>

Expand Down Expand Up @@ -33,28 +32,21 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings;
@property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper;

@property(readonly, nonatomic) NSObject<FLTCaptureInput> *captureVideoInput;
@property(readonly, nonatomic) CGSize captureSize;
@property(strong, nonatomic)
NSObject<FLTAssetWriterInputPixelBufferAdaptor> *assetWriterPixelBufferAdaptor;
@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
@property(strong, nonatomic) NSString *videoRecordingPath;
@property(assign, nonatomic) BOOL isAudioSetup;

@property(nonatomic) CMMotionManager *motionManager;
/// All FLTCam's state access and capture session related operations should be on run on this queue.
@property(strong, nonatomic) dispatch_queue_t captureSessionQueue;
/// The queue on which captured photos (not videos) are written to disk.
/// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation.
@property(strong, nonatomic) dispatch_queue_t photoIOQueue;
/// A wrapper for CMVideoFormatDescriptionGetDimensions.
/// Allows for alternate implementations in tests.
@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat;
/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
@property(nonatomic, copy) CaptureDeviceFactory captureDeviceFactory;
@property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory;
@property(readonly, nonatomic) NSObject<FLTCaptureDeviceInputFactory> *captureDeviceInputFactory;
@property(nonatomic, copy) AssetWriterFactory assetWriterFactory;
@property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory;
/// Reports the given error message to the Dart side of the plugin.
Expand Down Expand Up @@ -193,12 +185,6 @@ - (AVCaptureConnection *)createConnection:(NSError **)error {
return connection;
}

- (void)setVideoFormat:(OSType)videoFormat {
_videoFormat = videoFormat;
_captureVideoOutput.videoSettings =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
}

- (void)updateOrientation {
if (_isRecording) {
return;
Expand Down Expand Up @@ -426,10 +412,6 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
return bestFormat;
}

- (void)dealloc {
[_motionManager stopAccelerometerUpdates];
}

/// Main logic to setup the video recording.
- (void)setUpVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion {
NSError *error;
Expand Down Expand Up @@ -481,90 +463,6 @@ - (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))com
}
}

- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
FlutterError *_Nullable))completion {
if (_isRecording) {
_isRecording = NO;

// when _isRecording is YES startWriting was already called so _videoWriter.status
// is always either AVAssetWriterStatusWriting or AVAssetWriterStatusFailed and
// finishWritingWithCompletionHandler does not throw exception so there is no need
// to check _videoWriter.status
[_videoWriter finishWritingWithCompletionHandler:^{
if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
[self updateOrientation];
completion(self->_videoRecordingPath, nil);
self->_videoRecordingPath = nil;
} else {
completion(nil, [FlutterError errorWithCode:@"IOError"
message:@"AVAssetWriter could not finish writing!"
details:nil]);
}
}];
} else {
NSError *error =
[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorResourceUnavailable
userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
completion(nil, FlutterErrorFromNSError(error));
}
}

- (void)setDescriptionWhileRecording:(NSString *)cameraName
withCompletion:(void (^)(FlutterError *_Nullable))completion {
if (!_isRecording) {
completion([FlutterError errorWithCode:@"setDescriptionWhileRecordingFailed"
message:@"Device was not recording"
details:nil]);
return;
}

_captureDevice = self.captureDeviceFactory(cameraName);

NSObject<FLTCaptureConnection> *oldConnection =
[_captureVideoOutput connectionWithMediaType:AVMediaTypeVideo];

// Stop video capture from the old output.
[_captureVideoOutput setSampleBufferDelegate:nil queue:nil];

// Remove the old video capture connections.
[_videoCaptureSession beginConfiguration];
[_videoCaptureSession removeInput:_captureVideoInput];
[_videoCaptureSession removeOutput:_captureVideoOutput.avOutput];

NSError *error = nil;
AVCaptureConnection *newConnection = [self createConnection:&error];
if (error) {
completion(FlutterErrorFromNSError(error));
return;
}

// Keep the same orientation the old connections had.
if (oldConnection && newConnection.isVideoOrientationSupported) {
newConnection.videoOrientation = oldConnection.videoOrientation;
}

// Add the new connections to the session.
if (![_videoCaptureSession canAddInput:_captureVideoInput])
completion([FlutterError errorWithCode:@"VideoError"
message:@"Unable switch video input"
details:nil]);
[_videoCaptureSession addInputWithNoConnections:_captureVideoInput];
if (![_videoCaptureSession canAddOutput:_captureVideoOutput.avOutput])
completion([FlutterError errorWithCode:@"VideoError"
message:@"Unable switch video output"
details:nil]);
[_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput.avOutput];
if (![_videoCaptureSession canAddConnection:newConnection])
completion([FlutterError errorWithCode:@"VideoError"
message:@"Unable switch video connection"
details:nil]);
[_videoCaptureSession addConnection:newConnection];
[_videoCaptureSession commitConfiguration];

completion(nil);
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
completion:(void (^)(FlutterError *))completion {
[self startImageStreamWithMessenger:messenger
Expand Down Expand Up @@ -612,15 +510,6 @@ - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messen
}
}

- (void)stopImageStream {
if (_isStreamingImages) {
_isStreamingImages = NO;
_imageStreamHandler = nil;
} else {
[self reportErrorMessage:@"Images from camera are not streaming!"];
}
}

- (BOOL)setupWriterForPath:(NSString *)path {
NSError *error = nil;
NSURL *outputURL;
Expand Down
Loading