Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
## NEXT
## 0.9.13+3

* Ignore audio samples until the first video sample arrives.
* Fixes unawaited_futures violations.

## 0.9.13+2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,8 @@ extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessi
/// @return a test sample buffer.
extern CMSampleBufferRef FLTCreateTestSampleBuffer(void);

/// Creates a test audio sample buffer.
/// @return a test audio sample buffer.
extern CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void);

NS_ASSUME_NONNULL_END
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,22 @@ CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
CFRelease(formatDescription);
return sampleBuffer;
}

CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void) {
CMBlockBufferRef blockBuffer;
CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, 100, kCFAllocatorDefault, NULL, 0,
100, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer);

CMFormatDescriptionRef formatDescription;
AudioStreamBasicDescription basicDescription = {44100, kAudioFormatLinearPCM, 0, 1, 1, 1, 1, 8};
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &basicDescription, 0, NULL, 0, NULL, NULL,
&formatDescription);

CMSampleBufferRef sampleBuffer;
CMAudioSampleBufferCreateReadyWithPacketDescriptions(
kCFAllocatorDefault, blockBuffer, formatDescription, 1, kCMTimeZero, NULL, &sampleBuffer);

CFRelease(blockBuffer);
CFRelease(formatDescription);
return sampleBuffer;
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,98 @@

/// Includes test cases related to sample buffer handling for FLTCam class.
@interface FLTCamSampleBufferTests : XCTestCase

@property(readonly, nonatomic) dispatch_queue_t captureSessionQueue;
@property(readonly, nonatomic) FLTCam *camera;
@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer;
@end

@implementation FLTCamSampleBufferTests

- (void)setUp {
_captureSessionQueue = dispatch_queue_create("testing", NULL);
_camera = FLTCreateCamWithCaptureSessionQueue(_captureSessionQueue);
_sampleBuffer = FLTCreateTestSampleBuffer();
}

- (void)tearDown {
CFRelease(_sampleBuffer);
}

- (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue {
dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
XCTAssertEqual(captureSessionQueue, cam.captureVideoOutput.sampleBufferCallbackQueue,
XCTAssertEqual(_captureSessionQueue, _camera.captureVideoOutput.sampleBufferCallbackQueue,
@"Sample buffer callback queue must be the capture session queue.");
}

- (void)testCopyPixelBuffer {
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL));
CMSampleBufferRef capturedSampleBuffer = FLTCreateTestSampleBuffer();
CVPixelBufferRef capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer);
CVPixelBufferRef capturedPixelBuffer = CMSampleBufferGetImageBuffer(_sampleBuffer);
// Mimic sample buffer callback when captured a new video sample
[cam captureOutput:cam.captureVideoOutput
didOutputSampleBuffer:capturedSampleBuffer
[_camera captureOutput:_camera.captureVideoOutput
didOutputSampleBuffer:_sampleBuffer
fromConnection:OCMClassMock([AVCaptureConnection class])];
CVPixelBufferRef deliveriedPixelBuffer = [cam copyPixelBuffer];
CVPixelBufferRef deliveriedPixelBuffer = [_camera copyPixelBuffer];
XCTAssertEqual(deliveriedPixelBuffer, capturedPixelBuffer,
@"FLTCam must deliver the latest captured pixel buffer to copyPixelBuffer API.");
CFRelease(capturedSampleBuffer);
CFRelease(deliveriedPixelBuffer);
}

- (void)testFirstAppendedSampleShouldBeVideo {
id connectionMock = OCMClassMock([AVCaptureConnection class]);

id writerMock = OCMClassMock([AVAssetWriter class]);
OCMStub([writerMock alloc]).andReturn(writerMock);
OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]])
.andReturn(writerMock);
__block AVAssetWriterStatus status = AVAssetWriterStatusUnknown;
OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) {
status = AVAssetWriterStatusWriting;
});
OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&status];
});

__block NSString *writtenSamples = @"";

id videoMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]);
OCMStub([videoMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY
sourcePixelBufferAttributes:OCMOCK_ANY])
.andReturn(videoMock);
OCMStub([videoMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
writtenSamples = [writtenSamples stringByAppendingString:@"v"];
});

id audioMock = OCMClassMock([AVAssetWriterInput class]);
OCMStub([audioMock assetWriterInputWithMediaType:[OCMArg isEqual:AVMediaTypeAudio]
outputSettings:OCMOCK_ANY])
.andReturn(audioMock);
OCMStub([audioMock isReadyForMoreMediaData]).andReturn(YES);
OCMStub([audioMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) {
writtenSamples = [writtenSamples stringByAppendingString:@"a"];
});

FLTThreadSafeFlutterResult *result =
[[FLTThreadSafeFlutterResult alloc] initWithResult:^(id result){
}];
[_camera startVideoRecordingWithResult:result];

char *samples = "aaavava";

CMSampleBufferRef audioSampleBuffer = FLTCreateTestAudioSampleBuffer();
for (int i = 0; i < strlen(samples); i++) {
if (samples[i] == 'v') {
[_camera captureOutput:_camera.captureVideoOutput
didOutputSampleBuffer:_sampleBuffer
fromConnection:connectionMock];
} else {
[_camera captureOutput:nil
didOutputSampleBuffer:audioSampleBuffer
fromConnection:connectionMock];
}
}
CFRelease(audioSampleBuffer);

XCTAssertEqualObjects(writtenSamples, @"vava", @"First appended sample must be video.");
}

@end
5 changes: 5 additions & 0 deletions packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,11 @@ - (void)captureOutput:(AVCaptureOutput *)output
return;
}

// ignore audio samples until the first video sample arrives to avoid black frames
if (_videoWriter.status != AVAssetWriterStatusWriting && output != _captureVideoOutput) {
return;
}

CFRetain(sampleBuffer);
CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_avfoundation/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.13+2
version: 0.9.13+3

environment:
sdk: ">=2.18.0 <4.0.0"
Expand Down