-
-
Notifications
You must be signed in to change notification settings - Fork 1.2k
/
Copy pathCameraView.swift
393 lines (335 loc) · 11.5 KB
/
CameraView.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
//
// CameraView.swift
// mrousavy
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
// TODOs for the CameraView which are currently too hard to implement either because of AVFoundation's limitations, or my brain capacity
//
// CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
//
// CameraView+TakePhoto
// TODO: Photo HDR
// MARK: - CameraView
public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegate, FpsSampleCollectorDelegate {
// pragma MARK: React Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enablePortraitEffectsMatteDelivery = false
@objc var enableBufferCompression = false
@objc var isMirrored = false
// use cases
@objc var photo = false
@objc var video = false
@objc var audio = false
@objc var enableFrameProcessor = false
@objc var codeScannerOptions: NSDictionary?
@objc var pixelFormat: NSString?
@objc var enableLocation = false
@objc var preview = true {
didSet {
updatePreview()
}
}
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var minFps: NSNumber?
@objc var maxFps: NSNumber?
@objc var videoHdr = false
@objc var photoHdr = false
@objc var photoQualityBalance: NSString?
@objc var lowLightBoost = false
@objc var outputOrientation: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 1.0 // in "factor"
@objc var exposure: NSNumber = 0.0
@objc var videoStabilizationMode: NSString?
@objc var resizeMode: NSString = "cover" {
didSet {
updatePreview()
}
}
// events
@objc var onInitializedEvent: RCTDirectEventBlock?
@objc var onErrorEvent: RCTDirectEventBlock?
@objc var onStartedEvent: RCTDirectEventBlock?
@objc var onStoppedEvent: RCTDirectEventBlock?
@objc var onPreviewStartedEvent: RCTDirectEventBlock?
@objc var onPreviewStoppedEvent: RCTDirectEventBlock?
@objc var onShutterEvent: RCTDirectEventBlock?
@objc var onPreviewOrientationChangedEvent: RCTDirectEventBlock?
@objc var onOutputOrientationChangedEvent: RCTDirectEventBlock?
@objc var onViewReadyEvent: RCTDirectEventBlock?
@objc var onAverageFpsChangedEvent: RCTDirectEventBlock?
@objc var onCodeScannedEvent: RCTDirectEventBlock?
// zoom
@objc var enableZoomGesture = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc public var frameProcessor: FrameProcessor?
#endif
// pragma MARK: Internal Properties
var cameraSession = CameraSession()
var previewView: PreviewView?
var isMounted = false
private var currentConfigureCall: DispatchTime?
private let fpsSampleCollector = FpsSampleCollector()
// CameraView+Zoom
var pinchGestureRecognizer: UIPinchGestureRecognizer?
var pinchScaleOffset: CGFloat = 1.0
// CameraView+TakeSnapshot
var latestVideoFrame: Snapshot?
// pragma MARK: Setup
override public init(frame: CGRect) {
super.init(frame: frame)
cameraSession.delegate = self
fpsSampleCollector.delegate = self
updatePreview()
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented.")
}
override public func willMove(toSuperview newSuperview: UIView?) {
super.willMove(toSuperview: newSuperview)
if newSuperview != nil {
fpsSampleCollector.start()
if !isMounted {
isMounted = true
onViewReadyEvent?(nil)
}
} else {
fpsSampleCollector.stop()
}
}
override public func layoutSubviews() {
if let previewView {
previewView.frame = frame
previewView.bounds = bounds
}
}
func getPixelFormat() -> PixelFormat {
// TODO: Use ObjC RCT enum parser for this
if let pixelFormat = pixelFormat as? String {
do {
return try PixelFormat(jsValue: pixelFormat)
} catch {
if let error = error as? CameraError {
onError(error)
} else {
onError(.unknown(message: error.localizedDescription, cause: error as NSError))
}
}
}
return .yuv
}
func getTorch() -> Torch {
// TODO: Use ObjC RCT enum parser for this
if let torch = try? Torch(jsValue: torch) {
return torch
}
return .off
}
func getPhotoQualityBalance() -> QualityBalance {
if let photoQualityBalance = photoQualityBalance as? String,
let balance = try? QualityBalance(jsValue: photoQualityBalance) {
return balance
}
return .balanced
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
VisionLogger.log(level: .info, message: "Updating \(changedProps.count) props: [\(changedProps.joined(separator: ", "))]")
let now = DispatchTime.now()
currentConfigureCall = now
cameraSession.configure { [self] config in
// Check if we're still the latest call to configure { ... }
guard currentConfigureCall == now else {
// configure waits for a lock, and if a new call to update() happens in the meantime we can drop this one.
// this works similar to how React implemented concurrent rendering, the newer call to update() has higher priority.
VisionLogger.log(level: .info, message: "A new configure { ... } call arrived, aborting this one...")
throw CameraConfiguration.AbortThrow.abort
}
// Input Camera Device
config.cameraId = cameraId as? String
config.isMirrored = isMirrored
// Photo
if photo {
config.photo = .enabled(config: CameraConfiguration.Photo(qualityBalance: getPhotoQualityBalance(),
enableDepthData: enableDepthData,
enablePortraitEffectsMatte: enablePortraitEffectsMatteDelivery))
} else {
config.photo = .disabled
}
// Video/Frame Processor
if video || enableFrameProcessor {
config.video = .enabled(config: CameraConfiguration.Video(pixelFormat: getPixelFormat(),
enableBufferCompression: enableBufferCompression,
enableHdr: videoHdr,
enableFrameProcessor: enableFrameProcessor))
} else {
config.video = .disabled
}
// Audio
if audio {
config.audio = .enabled(config: CameraConfiguration.Audio())
} else {
config.audio = .disabled
}
// Code Scanner
if let codeScannerOptions {
let options = try CodeScannerOptions(fromJsValue: codeScannerOptions)
config.codeScanner = .enabled(config: CameraConfiguration.CodeScanner(options: options))
} else {
config.codeScanner = .disabled
}
// Location tagging
config.enableLocation = enableLocation && isActive
// Video Stabilization
if let jsVideoStabilizationMode = videoStabilizationMode as? String {
let videoStabilizationMode = try VideoStabilizationMode(jsValue: jsVideoStabilizationMode)
config.videoStabilizationMode = videoStabilizationMode
} else {
config.videoStabilizationMode = .off
}
// Orientation
if let jsOrientation = outputOrientation as? String {
let outputOrientation = try OutputOrientation(jsValue: jsOrientation)
config.outputOrientation = outputOrientation
} else {
config.outputOrientation = .device
}
// Format
if let jsFormat = format {
let format = try CameraDeviceFormat(jsValue: jsFormat)
config.format = format
} else {
config.format = nil
}
// Side-Props
config.minFps = minFps?.int32Value
config.maxFps = maxFps?.int32Value
config.enableLowLightBoost = lowLightBoost
config.torch = try Torch(jsValue: torch)
// Zoom
config.zoom = zoom.doubleValue
// Exposure
config.exposure = exposure.floatValue
// isActive
config.isActive = isActive
}
// Store `zoom` offset for native pinch-gesture
if changedProps.contains("zoom") {
pinchScaleOffset = zoom.doubleValue
}
// Prevent phone from going to sleep
UIApplication.shared.isIdleTimerDisabled = isActive
}
func updatePreview() {
if preview && previewView == nil {
// Create PreviewView and add it
previewView = cameraSession.createPreviewView(frame: frame)
previewView!.delegate = self
addSubview(previewView!)
} else if !preview && previewView != nil {
// Remove PreviewView and destroy it
previewView?.removeFromSuperview()
previewView = nil
}
if let previewView {
// Update resizeMode from React
let parsed = try? ResizeMode(jsValue: resizeMode as String)
previewView.resizeMode = parsed ?? .cover
}
}
// pragma MARK: Event Invokers
func onError(_ error: CameraError) {
VisionLogger.log(level: .error, message: "Invoking onError(): \(error.message)")
var causeDictionary: [String: Any]?
if case let .unknown(_, cause) = error,
let cause = cause {
causeDictionary = [
"code": cause.code,
"domain": cause.domain,
"message": cause.description,
"details": cause.userInfo,
]
}
onErrorEvent?([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
func onSessionInitialized() {
onInitializedEvent?([:])
}
func onCameraStarted() {
onStartedEvent?([:])
}
func onCameraStopped() {
onStoppedEvent?([:])
}
func onPreviewStarted() {
onPreviewStartedEvent?([:])
}
func onPreviewStopped() {
onPreviewStoppedEvent?([:])
}
func onCaptureShutter(shutterType: ShutterType) {
onShutterEvent?([
"type": shutterType.jsValue,
])
}
func onOutputOrientationChanged(_ outputOrientation: Orientation) {
onOutputOrientationChangedEvent?([
"outputOrientation": outputOrientation.jsValue,
])
}
func onPreviewOrientationChanged(_ previewOrientation: Orientation) {
onPreviewOrientationChangedEvent?([
"previewOrientation": previewOrientation.jsValue,
])
}
func onFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) {
// Update latest frame that can be used for snapshot capture
latestVideoFrame = Snapshot(imageBuffer: sampleBuffer, orientation: orientation)
// Notify FPS Collector that we just had a Frame
fpsSampleCollector.onTick()
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
if let frameProcessor = frameProcessor {
// Call Frame Processor
let frame = Frame(buffer: sampleBuffer,
orientation: orientation.imageOrientation,
isMirrored: isMirrored)
frameProcessor.call(frame)
}
#endif
}
func onCodeScanned(codes: [CameraSession.Code], scannerFrame: CameraSession.CodeScannerFrame) {
onCodeScannedEvent?([
"codes": codes.map { $0.toJSValue() },
"frame": scannerFrame.toJSValue(),
])
}
func onAverageFpsChanged(averageFps: Double) {
onAverageFpsChangedEvent?([
"averageFps": averageFps,
])
}
}