[camera_avfoundation] ignore audio samples until first video sample (#4587)

Samples from ios video and audio input should be regarded as arriving from independent streams because in fact they are going from independent sources. Video is going from instance `AVCaptureVideoDataOutput` while audio from `AVCaptureAudioDataOutput`. Therefore it is possible that one of them will be initialised soon or later than the other and the first few samples will be only audio samples so the video file will start without image data and will be black until the first video sample arrives. This patch addresses this by ignoring audio samples until the first video sample arrives (alternatively it could wait until both video and audio samples are actively arriving).

Fix https://github.com/flutter/flutter/issues/57831
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index e8bd622..011d9c5 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.9.13+5
+
+* Ignores audio samples until the first video sample arrives.
+
 ## 0.9.13+4
 
 * Adds pub topics to package metadata.
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
index f2d4611..0c7e62f 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
@@ -15,4 +15,8 @@
 /// @return a test sample buffer.
 extern CMSampleBufferRef FLTCreateTestSampleBuffer(void);
 
+/// Creates a test audio sample buffer.
+/// @return a test audio sample buffer.
+extern CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void);
+
 NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
index b42aa34..bb98f7c 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
@@ -47,3 +47,22 @@
   CFRelease(formatDescription);
   return sampleBuffer;
 }
+
+CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void) {
+  CMBlockBufferRef blockBuffer;
+  CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, 100, kCFAllocatorDefault, NULL, 0,
+                                     100, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer);
+
+  CMFormatDescriptionRef formatDescription;
+  AudioStreamBasicDescription basicDescription = {44100, kAudioFormatLinearPCM, 0, 1, 1, 1, 1, 8};
+  CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &basicDescription, 0, NULL, 0, NULL, NULL,
+                                 &formatDescription);
+
+  CMSampleBufferRef sampleBuffer;
+  CMAudioSampleBufferCreateReadyWithPacketDescriptions(
+      kCFAllocatorDefault, blockBuffer, formatDescription, 1, kCMTimeZero, NULL, &sampleBuffer);
+
+  CFRelease(blockBuffer);
+  CFRelease(formatDescription);
+  return sampleBuffer;
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
index 94426ab..6f0a4ed 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
@@ -38,4 +38,63 @@
   CFRelease(deliveriedPixelBuffer);
 }
 
+- (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples {
+  FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL));
+  CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer();
+  CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer();
+
+  id connectionMock = OCMClassMock([AVCaptureConnection class]);
+
+  id writerMock = OCMClassMock([AVAssetWriter class]);
+  OCMStub([writerMock alloc]).andReturn(writerMock);
+  OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]])
+      .andReturn(writerMock);
+  __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown;
+  OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) {
+    status = AVAssetWriterStatusWriting;
+  });
+  OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) {
+    [invocation setReturnValue:&status];
+  });
+
+  __block NSArray *writtenSamples = @[];
+
+  id videoMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]);
+  OCMStub([videoMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY
+                                                sourcePixelBufferAttributes:OCMOCK_ANY])
+      .andReturn(videoMock);
+  OCMStub([videoMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero])
+      .ignoringNonObjectArgs()
+      .andDo(^(NSInvocation *invocation) {
+        writtenSamples = [writtenSamples arrayByAddingObject:@"video"];
+      });
+
+  id audioMock = OCMClassMock([AVAssetWriterInput class]);
+  OCMStub([audioMock assetWriterInputWithMediaType:[OCMArg isEqual:AVMediaTypeAudio]
+                                    outputSettings:OCMOCK_ANY])
+      .andReturn(audioMock);
+  OCMStub([audioMock isReadyForMoreMediaData]).andReturn(YES);
+  OCMStub([audioMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) {
+    writtenSamples = [writtenSamples arrayByAddingObject:@"audio"];
+  });
+
+  FLTThreadSafeFlutterResult *result =
+      [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id result){
+      }];
+  [cam startVideoRecordingWithResult:result];
+
+  [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
+  [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
+  [cam captureOutput:cam.captureVideoOutput
+      didOutputSampleBuffer:videoSample
+             fromConnection:connectionMock];
+  [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
+
+  NSArray *expectedSamples = @[ @"video", @"audio" ];
+  XCTAssertEqualObjects(writtenSamples, expectedSamples, @"First appended sample must be video.");
+
+  CFRelease(videoSample);
+  CFRelease(audioSample);
+}
+
 @end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
index 31bffc9..e0f0300 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
@@ -500,6 +500,12 @@
       return;
     }
 
+    // ignore audio samples until the first video sample arrives to avoid black frames
+    // https://github.com/flutter/flutter/issues/57831
+    if (_videoWriter.status != AVAssetWriterStatusWriting && output != _captureVideoOutput) {
+      return;
+    }
+
     CFRetain(sampleBuffer);
     CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
 
diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml
index d986235..e13e957 100644
--- a/packages/camera/camera_avfoundation/pubspec.yaml
+++ b/packages/camera/camera_avfoundation/pubspec.yaml
@@ -2,7 +2,7 @@
 description: iOS implementation of the camera plugin.
 repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.13+4
+version: 0.9.13+5
 
 environment:
   sdk: ">=2.19.0 <4.0.0"