From d4592e671efaae1bac9e506708bf9c0af6984d4b Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Wed, 11 Dec 2024 12:10:57 +0100 Subject: [PATCH 1/8] feat: Add `depth` to `Frame` types --- package/src/types/Frame.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/package/src/types/Frame.ts b/package/src/types/Frame.ts index c9e6bb2310..9c9876c141 100644 --- a/package/src/types/Frame.ts +++ b/package/src/types/Frame.ts @@ -64,6 +64,15 @@ export interface Frame { */ readonly pixelFormat: PixelFormat + /** + * Represents the depth data of this Frame, if the Camera is configured to stream depth data. + */ + readonly depth?: { + readonly width: number + readonly height: number + toArrayBuffer(): ArrayBuffer + } + /** * Get the underlying data of the Frame as a uint8 array buffer. * From ebcce9ff2d7a5c02df0004e319516251bf8e6f4f Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Wed, 11 Dec 2024 12:12:08 +0100 Subject: [PATCH 2/8] Update Podfile.lock --- example/ios/Podfile.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 53ecd57cf4..9a08834c6b 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -1768,16 +1768,16 @@ PODS: - ReactCommon/turbomodule/core - Yoga - SocketRocket (0.7.0) - - VisionCamera (4.6.1): - - VisionCamera/Core (= 4.6.1) - - VisionCamera/FrameProcessors (= 4.6.1) - - VisionCamera/React (= 4.6.1) - - VisionCamera/Core (4.6.1) - - VisionCamera/FrameProcessors (4.6.1): + - VisionCamera (4.6.3): + - VisionCamera/Core (= 4.6.3) + - VisionCamera/FrameProcessors (= 4.6.3) + - VisionCamera/React (= 4.6.3) + - VisionCamera/Core (4.6.3) + - VisionCamera/FrameProcessors (4.6.3): - React - React-callinvoker - react-native-worklets-core - - VisionCamera/React (4.6.1): + - VisionCamera/React (4.6.3): - React-Core - VisionCamera/FrameProcessors - Yoga (0.0.0) @@ -2097,9 +2097,9 @@ SPEC CHECKSUMS: RNStaticSafeAreaInsets: 055ddbf5e476321720457cdaeec0ff2ba40ec1b8 RNVectorIcons: 6382277afab3c54658e9d555ee0faa7a37827136 SocketRocket: abac6f5de4d4d62d24e11868d7a2f427e0ef940d - VisionCamera: ec141897a88c2e95e8b83cf97b8e4db801e02fd6 - Yoga: 055f92ad73f8c8600a93f0e25ac0b2344c3b07e6 + VisionCamera: 88df4dae7196c93ecd331f105f0e5d7d95702cb3 + Yoga: aa3df615739504eebb91925fc9c58b4922ea9a08 PODFILE CHECKSUM: 2ad84241179871ca890f7c65c855d117862f1a68 -COCOAPODS: 1.16.2 +COCOAPODS: 1.15.2 From 7d2b43b20fb6d1c8bf7d906e0803c9f07113e934 Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Thu, 12 Dec 2024 13:56:40 +0100 Subject: [PATCH 3/8] feat: Enable depth and synchronize if `enableDepth` is true --- package/ios/Core/CameraConfiguration.swift | 1 + .../Core/CameraSession+Configuration.swift | 27 ++++++++++++++++--- package/ios/Core/CameraSession.swift | 10 ++++++- package/ios/React/CameraView.swift | 3 ++- 4 files changed, 35 insertions(+), 6 deletions(-) diff --git a/package/ios/Core/CameraConfiguration.swift b/package/ios/Core/CameraConfiguration.swift index a4b94dd377..6dea95508f 100644 --- a/package/ios/Core/CameraConfiguration.swift +++ b/package/ios/Core/CameraConfiguration.swift @@ -182,6 +182,7 @@ final class CameraConfiguration { var enableBufferCompression = false var enableHdr = false var enableFrameProcessor = false + var enableDepth = false } /** diff --git a/package/ios/Core/CameraSession+Configuration.swift b/package/ios/Core/CameraSession+Configuration.swift index ecd2a94daf..1abad0ba50 100644 --- a/package/ios/Core/CameraSession+Configuration.swift +++ b/package/ios/Core/CameraSession+Configuration.swift @@ -54,7 +54,7 @@ extension CameraSession { // pragma MARK: Outputs /** - Configures all outputs (`photo` + `video` + `codeScanner`) + Configures all outputs (`photo` + `video` + `depth` + `codeScanner`) */ func configureOutputs(configuration: CameraConfiguration) throws { VisionLogger.log(level: .info, message: "Configuring Outputs...") @@ -65,7 +65,9 @@ extension CameraSession { } photoOutput = nil videoOutput = nil + depthOutput = nil codeScannerOutput = nil + outputSynchronizer = nil // Photo Output if case let .enabled(photo) = configuration.photo { @@ -97,7 +99,7 @@ extension CameraSession { } // Video Output + Frame Processor - if case .enabled = configuration.video { + if case let .enabled(video) = configuration.video { VisionLogger.log(level: .info, message: "Adding Video Data output...") // 1. Add @@ -107,8 +109,7 @@ extension CameraSession { } captureSession.addOutput(videoOutput) - // 2. Configure - videoOutput.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue) + // 2. Configure Video videoOutput.alwaysDiscardsLateVideoFrames = true if configuration.isMirrored { // 2.1. If mirroring is enabled, mirror all connections along the vertical axis @@ -119,6 +120,24 @@ extension CameraSession { VisionLogger.log(level: .info, message: "AVCaptureVideoDataOutput will rotate Frames to \(videoOutput.orientation)...") } } + + // 3. Configure Depth + if video.enableDepth { + // Video is synchronized with depth data - use a joined delegate! + // 3.1. Create depth output + let depthOutput = AVCaptureDepthDataOutput() + depthOutput.alwaysDiscardsLateDepthData = true + depthOutput.isFilteringEnabled = false + // 3.2. Set up a synchronizer between video and depth data + outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [depthOutput, videoOutput]) + outputSynchronizer!.setDelegate(self, queue: CameraQueues.videoQueue) + // 3.3. Add depth output to session + captureSession.addOutput(depthOutput) + self.depthOutput = depthOutput + } else { + // Video is the only output - use it's own delegate + videoOutput.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue) + } self.videoOutput = videoOutput } diff --git a/package/ios/Core/CameraSession.swift b/package/ios/Core/CameraSession.swift index 10b0f3399c..f27409d45c 100644 --- a/package/ios/Core/CameraSession.swift +++ b/package/ios/Core/CameraSession.swift @@ -13,7 +13,7 @@ import Foundation A fully-featured Camera Session supporting preview, video, photo, frame processing, and code scanning outputs. All changes to the session have to be controlled via the `configure` function. */ -final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { +final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureDataOutputSynchronizerDelegate { // Configuration private var isInitialized = false var configuration: CameraConfiguration? @@ -27,7 +27,9 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat var photoOutput: AVCapturePhotoOutput? var videoOutput: AVCaptureVideoDataOutput? var audioOutput: AVCaptureAudioDataOutput? + var depthOutput: AVCaptureDepthDataOutput? var codeScannerOutput: AVCaptureMetadataOutput? + var outputSynchronizer: AVCaptureDataOutputSynchronizer? // State var metadataProvider = MetadataProvider() var recordingSession: RecordingSession? @@ -275,6 +277,12 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat break } } + + func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) { + for frame in synchronizedDataCollection { + // TODO: Cast to CMSampleBuffer! + } + } private final func onVideoFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) { if let recordingSession { diff --git a/package/ios/React/CameraView.swift b/package/ios/React/CameraView.swift index c773975353..ce051f0afd 100644 --- a/package/ios/React/CameraView.swift +++ b/package/ios/React/CameraView.swift @@ -211,7 +211,8 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat config.video = .enabled(config: CameraConfiguration.Video(pixelFormat: getPixelFormat(), enableBufferCompression: enableBufferCompression, enableHdr: videoHdr, - enableFrameProcessor: enableFrameProcessor)) + enableFrameProcessor: enableFrameProcessor, + enableDepth: enableDepthData)) } else { config.video = .disabled } From 66d0a62c9a0516e3e998376354c0f78fe4161edb Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 13 Dec 2024 14:09:40 +0100 Subject: [PATCH 4/8] fix: Set up depth stream properly --- package/ios/Core/CameraSession+Configuration.swift | 6 +++--- package/src/devices/getCameraFormat.ts | 10 ++++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/package/ios/Core/CameraSession+Configuration.swift b/package/ios/Core/CameraSession+Configuration.swift index 1abad0ba50..de9db1e2d7 100644 --- a/package/ios/Core/CameraSession+Configuration.swift +++ b/package/ios/Core/CameraSession+Configuration.swift @@ -128,11 +128,11 @@ extension CameraSession { let depthOutput = AVCaptureDepthDataOutput() depthOutput.alwaysDiscardsLateDepthData = true depthOutput.isFilteringEnabled = false - // 3.2. Set up a synchronizer between video and depth data + // 3.2. Add depth output to session + captureSession.addOutput(depthOutput) + // 3.3. Set up a synchronizer between video and depth data outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [depthOutput, videoOutput]) outputSynchronizer!.setDelegate(self, queue: CameraQueues.videoQueue) - // 3.3. Add depth output to session - captureSession.addOutput(depthOutput) self.depthOutput = depthOutput } else { // Video is the only output - use it's own delegate diff --git a/package/src/devices/getCameraFormat.ts b/package/src/devices/getCameraFormat.ts index ec48e951b8..d6e72316f0 100644 --- a/package/src/devices/getCameraFormat.ts +++ b/package/src/devices/getCameraFormat.ts @@ -74,6 +74,10 @@ export interface FormatFilter { * you might want to choose a different auto-focus system. */ autoFocusSystem?: AutoFocusSystem + /** + * Specifies whether to prefer formats that support depth data capture. + */ + depth?: boolean } type FilterWithPriority = { @@ -236,6 +240,12 @@ export function getCameraFormat(device: CameraDevice, filters: FormatFilter[]): if (format.autoFocusSystem === filter.autoFocusSystem.target) rightPoints += filter.autoFocusSystem.priority } + // Find depth data + if (filter.depth != null) { + if (bestFormat.supportsDepthCapture) leftPoints += filter.depth.priority + if (format.supportsDepthCapture) rightPoints += filter.depth.priority + } + if (rightPoints > leftPoints) bestFormat = format } From 230b49de49d3ed745f1f735d0f96f925d8c5bd26 Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 13 Dec 2024 14:21:57 +0100 Subject: [PATCH 5/8] feat: Support `frame.depth` --- package/ios/Core/CameraSessionDelegate.swift | 2 +- package/ios/FrameProcessors/Frame.h | 3 ++- package/ios/FrameProcessors/Frame.m | 8 +++++++- package/ios/FrameProcessors/FrameHostObject.mm | 10 ++++++++++ package/ios/React/CameraView.swift | 2 +- package/src/types/Frame.ts | 1 - 6 files changed, 21 insertions(+), 5 deletions(-) diff --git a/package/ios/Core/CameraSessionDelegate.swift b/package/ios/Core/CameraSessionDelegate.swift index 07ec0a7410..d73830fed2 100644 --- a/package/ios/Core/CameraSessionDelegate.swift +++ b/package/ios/Core/CameraSessionDelegate.swift @@ -44,7 +44,7 @@ protocol CameraSessionDelegate: AnyObject { /** Called for every frame (if video or frameProcessor is enabled) */ - func onFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) + func onFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool, depthBuffer: CMSampleBuffer?) /** Called whenever a QR/Barcode has been scanned. Only if the CodeScanner Output is enabled */ diff --git a/package/ios/FrameProcessors/Frame.h b/package/ios/FrameProcessors/Frame.h index a925b8813a..3fbe6d6b86 100644 --- a/package/ios/FrameProcessors/Frame.h +++ b/package/ios/FrameProcessors/Frame.h @@ -16,13 +16,14 @@ NS_ASSUME_NONNULL_BEGIN @interface Frame : NSObject -- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation isMirrored:(BOOL)isMirrored; +- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation isMirrored:(BOOL)isMirrored depthData:(nullable CMSampleBufferRef)depth; - (instancetype)init NS_UNAVAILABLE; - (void)incrementRefCount; - (void)decrementRefCount; @property(nonatomic, readonly) CMSampleBufferRef buffer; +@property(nonatomic, readonly, nullable) CMSampleBufferRef depth; @property(nonatomic, readonly) UIImageOrientation orientation; @property(nonatomic, readonly) NSString* pixelFormat; diff --git a/package/ios/FrameProcessors/Frame.m b/package/ios/FrameProcessors/Frame.m index 663e19f4f7..edb2fc0b89 100644 --- a/package/ios/FrameProcessors/Frame.m +++ b/package/ios/FrameProcessors/Frame.m @@ -14,14 +14,16 @@ @implementation Frame { CMSampleBufferRef _Nonnull _buffer; UIImageOrientation _orientation; BOOL _isMirrored; + CMSampleBufferRef _Nullable _depth; } -- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation isMirrored:(BOOL)isMirrored { +- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation isMirrored:(BOOL)isMirrored depthData:(nullable CMSampleBufferRef)depth { self = [super init]; if (self) { _buffer = buffer; _orientation = orientation; _isMirrored = isMirrored; + _depth = depth; } return self; } @@ -47,6 +49,10 @@ - (CMSampleBufferRef)buffer { return _buffer; } +- (nullable CMSampleBufferRef)depth { + return _depth; +} + - (BOOL)isValid { return _buffer != nil && CFGetRetainCount(_buffer) > 0 && CMSampleBufferIsValid(_buffer); } diff --git a/package/ios/FrameProcessors/FrameHostObject.mm b/package/ios/FrameProcessors/FrameHostObject.mm index 67bbfe51df..4b8cdd0068 100644 --- a/package/ios/FrameProcessors/FrameHostObject.mm +++ b/package/ios/FrameProcessors/FrameHostObject.mm @@ -74,6 +74,16 @@ if (name == "planesCount") { return jsi::Value((double)_frame.planesCount); } + if (name == "depth") { + if (_frame.depth == nil) { + return jsi::Value::undefined(); + } + jsi::Object object(runtime); + CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(_frame.depth); + object.setProperty(runtime, "width", jsi::Value(static_cast(CVPixelBufferGetWidth(imageBuffer)))); + object.setProperty(runtime, "height", jsi::Value(static_cast(CVPixelBufferGetHeight(imageBuffer)))); + return object; + } // Internal methods if (name == "incrementRefCount") { diff --git a/package/ios/React/CameraView.swift b/package/ios/React/CameraView.swift index ce051f0afd..fa869eefb3 100644 --- a/package/ios/React/CameraView.swift +++ b/package/ios/React/CameraView.swift @@ -363,7 +363,7 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat ]) } - func onFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) { + func onFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool, depthBuffer: CMSampleBuffer?) { // Update latest frame that can be used for snapshot capture latestVideoFrame = Snapshot(imageBuffer: sampleBuffer, orientation: orientation) diff --git a/package/src/types/Frame.ts b/package/src/types/Frame.ts index 9c9876c141..f5aba3ca8e 100644 --- a/package/src/types/Frame.ts +++ b/package/src/types/Frame.ts @@ -70,7 +70,6 @@ export interface Frame { readonly depth?: { readonly width: number readonly height: number - toArrayBuffer(): ArrayBuffer } /** From 9a6f2471bf886d1dacc1bc078db18ab0389b2107 Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 13 Dec 2024 14:22:11 +0100 Subject: [PATCH 6/8] fix: Fix protocol call --- package/ios/React/CameraView.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package/ios/React/CameraView.swift b/package/ios/React/CameraView.swift index fa869eefb3..8f885c4e9f 100644 --- a/package/ios/React/CameraView.swift +++ b/package/ios/React/CameraView.swift @@ -375,7 +375,8 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat // Call Frame Processor let frame = Frame(buffer: sampleBuffer, orientation: orientation.imageOrientation, - isMirrored: isMirrored) + isMirrored: isMirrored, + depthData: depthBuffer) frameProcessor.call(frame) } #endif From d17988db7d0fd0701476b0d3c74aacf15e5dac3c Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 13 Dec 2024 14:22:22 +0100 Subject: [PATCH 7/8] fix: Actually set depth data --- package/ios/Core/CameraSession.swift | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/package/ios/Core/CameraSession.swift b/package/ios/Core/CameraSession.swift index f27409d45c..d35f71fabd 100644 --- a/package/ios/Core/CameraSession.swift +++ b/package/ios/Core/CameraSession.swift @@ -279,12 +279,23 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat } func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) { - for frame in synchronizedDataCollection { - // TODO: Cast to CMSampleBuffer! + guard let videoOutput else { return } + guard let videoData = synchronizedDataCollection.synchronizedData(for: videoOutput) as? AVCaptureSynchronizedSampleBufferData else { return } + + if let depthOutput { + // We have depth data as well + guard let depthData = synchronizedDataCollection.synchronizedData(for: videoOutput) as? AVCaptureSynchronizedSampleBufferData else { return } + onVideoFrame(sampleBuffer: videoData.sampleBuffer, + orientation: videoOutput.orientation, + isMirrored: videoOutput.isMirrored, + depthData: depthData.sampleBuffer) + } else { + // We only have video data + onVideoFrame(sampleBuffer: videoData.sampleBuffer, orientation: videoOutput.orientation, isMirrored: videoOutput.isMirrored) } } - private final func onVideoFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) { + private final func onVideoFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool, depthData: CMSampleBuffer? = nil) { if let recordingSession { do { // Write the Video Buffer to the .mov/.mp4 file @@ -298,7 +309,10 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat if let delegate { // Call Frame Processor (delegate) for every Video Frame - delegate.onFrame(sampleBuffer: sampleBuffer, orientation: orientation, isMirrored: isMirrored) + delegate.onFrame(sampleBuffer: sampleBuffer, + orientation: orientation, + isMirrored: isMirrored, + depthBuffer: depthData) } } From 0635e3197bb6ec63d916581291f2d66e3662f7ae Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 13 Dec 2024 14:27:15 +0100 Subject: [PATCH 8/8] Format --- package/ios/Core/CameraSession+Configuration.swift | 2 +- package/ios/Core/CameraSession.swift | 12 ++++++++---- package/ios/FrameProcessors/Frame.h | 5 ++++- package/ios/FrameProcessors/Frame.m | 5 ++++- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/package/ios/Core/CameraSession+Configuration.swift b/package/ios/Core/CameraSession+Configuration.swift index de9db1e2d7..33dcc5dce4 100644 --- a/package/ios/Core/CameraSession+Configuration.swift +++ b/package/ios/Core/CameraSession+Configuration.swift @@ -120,7 +120,7 @@ extension CameraSession { VisionLogger.log(level: .info, message: "AVCaptureVideoDataOutput will rotate Frames to \(videoOutput.orientation)...") } } - + // 3. Configure Depth if video.enableDepth { // Video is synchronized with depth data - use a joined delegate! diff --git a/package/ios/Core/CameraSession.swift b/package/ios/Core/CameraSession.swift index d35f71fabd..bfea30b623 100644 --- a/package/ios/Core/CameraSession.swift +++ b/package/ios/Core/CameraSession.swift @@ -13,7 +13,11 @@ import Foundation A fully-featured Camera Session supporting preview, video, photo, frame processing, and code scanning outputs. All changes to the session have to be controlled via the `configure` function. */ -final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureDataOutputSynchronizerDelegate { +final class CameraSession: + NSObject, + AVCaptureVideoDataOutputSampleBufferDelegate, + AVCaptureAudioDataOutputSampleBufferDelegate, + AVCaptureDataOutputSynchronizerDelegate { // Configuration private var isInitialized = false var configuration: CameraConfiguration? @@ -277,11 +281,11 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat break } } - - func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) { + + func dataOutputSynchronizer(_: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) { guard let videoOutput else { return } guard let videoData = synchronizedDataCollection.synchronizedData(for: videoOutput) as? AVCaptureSynchronizedSampleBufferData else { return } - + if let depthOutput { // We have depth data as well guard let depthData = synchronizedDataCollection.synchronizedData(for: videoOutput) as? AVCaptureSynchronizedSampleBufferData else { return } diff --git a/package/ios/FrameProcessors/Frame.h b/package/ios/FrameProcessors/Frame.h index 3fbe6d6b86..2cacdbd9e3 100644 --- a/package/ios/FrameProcessors/Frame.h +++ b/package/ios/FrameProcessors/Frame.h @@ -16,7 +16,10 @@ NS_ASSUME_NONNULL_BEGIN @interface Frame : NSObject -- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation isMirrored:(BOOL)isMirrored depthData:(nullable CMSampleBufferRef)depth; +- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer + orientation:(UIImageOrientation)orientation + isMirrored:(BOOL)isMirrored + depthData:(nullable CMSampleBufferRef)depth; - (instancetype)init NS_UNAVAILABLE; - (void)incrementRefCount; diff --git a/package/ios/FrameProcessors/Frame.m b/package/ios/FrameProcessors/Frame.m index edb2fc0b89..08e828d54c 100644 --- a/package/ios/FrameProcessors/Frame.m +++ b/package/ios/FrameProcessors/Frame.m @@ -17,7 +17,10 @@ @implementation Frame { CMSampleBufferRef _Nullable _depth; } -- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation isMirrored:(BOOL)isMirrored depthData:(nullable CMSampleBufferRef)depth { +- (instancetype)initWithBuffer:(CMSampleBufferRef)buffer + orientation:(UIImageOrientation)orientation + isMirrored:(BOOL)isMirrored + depthData:(nullable CMSampleBufferRef)depth { self = [super init]; if (self) { _buffer = buffer;