-
Notifications
You must be signed in to change notification settings - Fork 49
/
ImageSnap.m
285 lines (224 loc) · 9.42 KB
/
ImageSnap.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
//
// ImageSnap.m
// ImageSnap
//
// Created by Robert Harder on 9/10/09.
//
#import "ImageSnap.h"
NSString *const VERSION = @"0.2.16";
@interface ImageSnap()
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic, strong) AVCaptureStillImageOutput *captureStillImageOutput;
@property (nonatomic, assign) CVImageBufferRef currentImageBuffer;
@property (nonatomic, strong) AVCaptureConnection *videoConnection;
@property (nonatomic, strong) NSDateFormatter *dateFormatter;
#if OS_OBJECT_HAVE_OBJC_SUPPORT == 1
@property (nonatomic, strong) dispatch_queue_t imageQueue;
@property (nonatomic, strong) dispatch_semaphore_t semaphore;
#else
@property (nonatomic, assign) dispatch_queue_t imageQueue;
@property (nonatomic, assign) dispatch_semaphore_t semaphore;
#endif
@end
@implementation ImageSnap
#pragma mark - Object Lifecycle
- (instancetype)init {
self = [super init];
if (self) {
_dateFormatter = [NSDateFormatter new];
_dateFormatter.dateFormat = @"yyyy-MM-dd_HH-mm-ss.SSS";
_imageQueue = dispatch_queue_create("Image Queue", NULL);
_semaphore = dispatch_semaphore_create(0);
}
return self;
}
- (void)dealloc {
[self.captureSession stopRunning];
CVBufferRelease(self.currentImageBuffer);
}
#pragma mark - Public Interface
+(void)setVerbose:(BOOL) verbose{
g_verbose = verbose;
}
+(void)setQuiet:(BOOL) quiet{
g_quiet = quiet;
}
/**
* Returns all attached AVCaptureDevice objects that have video.
* This includes video-only devices (AVMediaTypeVideo) and
* audio/video devices (AVMediaTypeMuxed).
*
* @return array of video devices
*/
+ (NSArray *)videoDevices {
NSMutableArray *results = [NSMutableArray new];
[results addObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]];
[results addObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]];
return results;
}
// Returns the default video device or nil if none found.
+ (AVCaptureDevice *)defaultVideoDevice {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (device == nil) {
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed];
}
return device;
}
// Returns the named capture device or nil if not found.
+ (AVCaptureDevice *)deviceNamed:(NSString *)name {
AVCaptureDevice *result = nil;
NSArray *devices = [ImageSnap videoDevices];
// First check for exact name match
for (AVCaptureDevice *device in devices) {
if ([name isEqualToString:device.localizedName]) {
result = device;
}
}
// If there is no exact match, then try for a substring match
if(result == nil){
for (AVCaptureDevice *device in devices) {
if ([device.localizedName containsString:name]) {
result = device;
}
}
}
return result;
}
- (void)saveSingleSnapshotFrom:(AVCaptureDevice *)device
toFile:(NSString *)path
withWarmup:(NSNumber *)warmup
withTimelapse:(NSNumber *)timelapse
withLimit:(NSNumber *)limit {
double interval = timelapse == nil ? -1 : timelapse.doubleValue;
double timelapseCount = limit == nil ? ULLONG_MAX : limit.doubleValue;
verbose("Starting device...");
verbose("Device started.\n");
if (warmup == nil) {
// Skip warmup
verbose("Skipping warmup period.\n");
} else {
double delay = warmup.doubleValue;
verbose("Delaying %.2lf seconds for warmup...", delay);
NSDate *now = [[NSDate alloc] init];
[[NSRunLoop currentRunLoop] runUntilDate:[now dateByAddingTimeInterval:warmup.doubleValue]];
verbose("Warmup complete.\n");
}
if (interval > 0) {
verbose("Time lapse: snapping every %.2lf seconds to current directory.\n", interval);
// Loop indefinitely taking pictures.
// If the filename exists, skip to the next number.
// Mostly the purpose of this is to support interrupted captures.
// If you already took 100 pictures and have to restart the program,
// this will ensure that you pick up at 101.
NSString *fileNameWithSeq;
NSFileManager *fileManager = [NSFileManager defaultManager];
if(path == nil){
path = @"./";
}
path = [path stringByStandardizingPath];
[fileManager createDirectoryAtPath:path
withIntermediateDirectories:true
attributes:nil
error:nil];
console("Saving images to %s\n", [path UTF8String]);
for (unsigned long long seq = 1; seq < ULLONG_MAX ; seq++) { // 64 bit counter - a lot of pictures
if (seq > timelapseCount) {
dispatch_semaphore_signal(self->_semaphore); // hacky, ensures we have a semaphore to wait for below
break;
}
fileNameWithSeq = [path stringByAppendingPathComponent:[self fileNameWithSequenceNumber:seq]];
if(![fileManager fileExistsAtPath:fileNameWithSeq]){
// capture and write
[self takeSnapshotWithFilename:fileNameWithSeq]; // Capture a frame
// sleep
[[NSRunLoop currentRunLoop] runUntilDate:[[NSDate date] dateByAddingTimeInterval:interval]];
} // end if: file does not already exist
else{
verbose("Skipping %s\n", [fileNameWithSeq UTF8String]);
}
} // end for: loop indefinitely
} else {
[self takeSnapshotWithFilename:path]; // Capture a frame
}
dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_FOREVER);
[self stopSession];
}
- (void)setUpSessionWithDevice:(AVCaptureDevice *)device {
NSError *error;
// Create the capture session
self.captureSession = [AVCaptureSession new];
if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetPhoto]) {
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
}
// Create input object from the device
self.captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!error && [self.captureSession canAddInput:self.captureDeviceInput]) {
[self.captureSession addInput:self.captureDeviceInput];
}
self.captureStillImageOutput = [AVCaptureStillImageOutput new];
// self.captureStillImageOutput.outputSettings = @{ AVVideoCodecKey : AVVideoCodecJPEG}; // Deprecated
if ([self.captureSession canAddOutput:self.captureStillImageOutput]) {
[self.captureSession addOutput:self.captureStillImageOutput];
}
for (AVCaptureConnection *connection in self.captureStillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([port.mediaType isEqual:AVMediaTypeVideo] ) {
self.videoConnection = connection;
break;
}
}
if (self.videoConnection) { break; }
}
if ([self.captureSession canAddOutput:self.captureStillImageOutput]) {
[self.captureSession addOutput:self.captureStillImageOutput];
}
[self.captureSession startRunning];
}
#pragma mark - Internal Methods
/**
* Returns current snapshot or nil if there is a problem
* or session is not started.
*/
- (void)takeSnapshotWithFilename:(NSString *)filename {
__weak __typeof__(filename) weakFilename = filename;
[self.captureStillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoConnection
completionHandler:
^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
dispatch_async(self.imageQueue, ^{
[imageData writeToFile:weakFilename atomically:YES];
dispatch_semaphore_signal(self->_semaphore);
});
console("%s\n", [filename UTF8String]);
}];
}
/**
* Blocks until session is stopped.
*/
- (void)stopSession {
verbose("Stopping session...\n" );
// Make sure we've stopped
while (self.captureSession != nil) {
verbose("\tCaptureSession != nil\n");
verbose("\tStopping CaptureSession...");
[self.captureSession stopRunning];
verbose("Done.\n");
if ([self.captureSession isRunning]) {
verbose("[captureSession isRunning]");
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
} else {
verbose("\tShutting down 'stopSession(..)'" );
self.captureSession = nil;
self.captureDeviceInput = nil;
self.captureStillImageOutput = nil;
}
}
}
- (NSString *)fileNameWithSequenceNumber:(unsigned long)sequenceNumber{
// NSDate *now = [NSDate date];
// NSString *nowstr = [self.dateFormatter stringFromDate:now];
// return [NSString stringWithFormat:@"snapshot-%05lu-%s.jpg", sequenceNumber, nowstr.UTF8String];
return [NSString stringWithFormat:@"snapshot-%05lu.jpg", sequenceNumber];
}
@end