Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.9.13+5

* Ignores audio samples until the first video sample arrives.

## 0.9.13+4

* Adds pub topics to package metadata.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,8 @@ extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessi
/// @return a test sample buffer.
extern CMSampleBufferRef FLTCreateTestSampleBuffer(void);

/// Creates a test audio sample buffer.
/// @return a test audio sample buffer.
extern CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void);

NS_ASSUME_NONNULL_END
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,22 @@ CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
CFRelease(formatDescription);
return sampleBuffer;
}

CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void) {
CMBlockBufferRef blockBuffer;
CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, 100, kCFAllocatorDefault, NULL, 0,
100, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer);

CMFormatDescriptionRef formatDescription;
AudioStreamBasicDescription basicDescription = {44100, kAudioFormatLinearPCM, 0, 1, 1, 1, 1, 8};
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &basicDescription, 0, NULL, 0, NULL, NULL,
&formatDescription);

CMSampleBufferRef sampleBuffer;
CMAudioSampleBufferCreateReadyWithPacketDescriptions(
kCFAllocatorDefault, blockBuffer, formatDescription, 1, kCMTimeZero, NULL, &sampleBuffer);

CFRelease(blockBuffer);
CFRelease(formatDescription);
return sampleBuffer;
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,63 @@ - (void)testCopyPixelBuffer {
CFRelease(deliveriedPixelBuffer);
}

- (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples {
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL));
CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer();
CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer();

id connectionMock = OCMClassMock([AVCaptureConnection class]);

id writerMock = OCMClassMock([AVAssetWriter class]);
OCMStub([writerMock alloc]).andReturn(writerMock);
OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]])
.andReturn(writerMock);
__block AVAssetWriterStatus status = AVAssetWriterStatusUnknown;
OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) {
status = AVAssetWriterStatusWriting;
});
OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&status];
});

__block NSArray *writtenSamples = @[];

id videoMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]);
OCMStub([videoMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY
sourcePixelBufferAttributes:OCMOCK_ANY])
.andReturn(videoMock);
OCMStub([videoMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
writtenSamples = [writtenSamples arrayByAddingObject:@"video"];
});

id audioMock = OCMClassMock([AVAssetWriterInput class]);
OCMStub([audioMock assetWriterInputWithMediaType:[OCMArg isEqual:AVMediaTypeAudio]
outputSettings:OCMOCK_ANY])
.andReturn(audioMock);
OCMStub([audioMock isReadyForMoreMediaData]).andReturn(YES);
OCMStub([audioMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) {
writtenSamples = [writtenSamples arrayByAddingObject:@"audio"];
});

FLTThreadSafeFlutterResult *result =
[[FLTThreadSafeFlutterResult alloc] initWithResult:^(id result){
}];
[cam startVideoRecordingWithResult:result];

[cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
[cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
[cam captureOutput:cam.captureVideoOutput
didOutputSampleBuffer:videoSample
fromConnection:connectionMock];
[cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];

NSArray *expectedSamples = @[ @"video", @"audio" ];
XCTAssertEqualObjects(writtenSamples, expectedSamples, @"First appended sample must be video.");

CFRelease(videoSample);
CFRelease(audioSample);
}

@end
6 changes: 6 additions & 0 deletions packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,12 @@ - (void)captureOutput:(AVCaptureOutput *)output
return;
}

// ignore audio samples until the first video sample arrives to avoid black frames
// https://github.com/flutter/flutter/issues/57831
if (_videoWriter.status != AVAssetWriterStatusWriting && output != _captureVideoOutput) {
return;
}

CFRetain(sampleBuffer);
CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_avfoundation/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.13+4
version: 0.9.13+5

environment:
sdk: ">=2.19.0 <4.0.0"
Expand Down