-
Notifications
You must be signed in to change notification settings - Fork 0
/
VideoCompressor.m
133 lines (129 loc) · 7.06 KB
/
VideoCompressor.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
//
// VideoCompressor.m
// teemoApp
//
// Created by Muhammad Zeeshan on 12/05/2017.
// Copyright © 2017 Logicon. All rights reserved.
//
#import "VideoCompressor.h"
#import <stdlib.h>
#import "teemoApp-Swift.h"
@implementation VideoCompressor
- (void)exportAsynchronouslyWithCompletionHandler:(NSString *)outputUrl inputurl:(NSString*)inputUrl
completion:(void (^)(BOOL success))completionBlock{
NSError *error = nil;
AVAssetWriter *videoWriter = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:outputUrl] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:inputUrl] options:nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:_videoSettings];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
NSLog(@"%f",CMTimeGetSeconds(videoTrack.timeRange.duration));
videoWriterInput.transform = [videoTrack preferredTransform];
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:&error];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
NSLog(@"buffer time:%f",CMTimeGetSeconds(pts)/CMTimeGetSeconds(videoTrack.timeRange.duration));
double progress = CMTimeGetSeconds(pts)/CMTimeGetSeconds(videoTrack.timeRange.duration);
[Utility updateProgressWithValue:progress];
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
[reader cancelReading];
break;
}
}
else {
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusFailed:
[videoWriter cancelWriting];
completionBlock(NO);
break;
case AVAssetReaderStatusReading:
completionBlock(NO);
break;
case AVAssetReaderStatusCancelled:
completionBlock(NO);
break;
case AVAssetReaderStatusUnknown:
completionBlock(NO);
break;
case AVAssetReaderStatusCompleted:
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(nextBuffer = [readerOutput copyNextSampleBuffer])) {
if (nextBuffer) {
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}else{
[audioWriterInput markAsFinished];
switch ([audioReader status]) {
case AVAssetReaderStatusFailed:
[videoWriter cancelWriting];
completionBlock(NO);
break;
case AVAssetReaderStatusReading:
break;
case AVAssetReaderStatusCancelled:
completionBlock(NO);
break;
case AVAssetReaderStatusUnknown:
completionBlock(NO);
break;
case AVAssetReaderStatusCompleted:
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(@"completed");
completionBlock(YES);
}];
break;
}
}
}
}];
break;
}
break;
}
}
}
];
NSLog(@"Write Ended");
}
@end