[camera] Fixed a crash when streaming on iOS (#4520)

diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md
index d19b500..03b9293 100644
--- a/packages/camera/camera/CHANGELOG.md
+++ b/packages/camera/camera/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.9.4+18
+
+* Fixes a crash in iOS when streaming on low-performance devices.
+
 ## 0.9.4+17
 
 * Removes obsolete information from README, and adds OS support table.
diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
index 6c17150..37f56d0 100644
--- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
@@ -15,6 +15,7 @@
 		25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; };
 		334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; };
 		3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
+		788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; };
 		978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
 		97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
 		97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
@@ -70,6 +71,7 @@
 		1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; };
 		3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
 		59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
+		788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = "<group>"; };
 		7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
 		7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
 		7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
@@ -142,6 +144,7 @@
 				F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */,
 				E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */,
 				E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */,
+				788A065927B0E02900533D74 /* StreamingTest.m */,
 			);
 			path = RunnerTests;
 			sourceTree = "<group>";
@@ -416,6 +419,7 @@
 				E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */,
 				334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */,
 				E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
+				788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
 				E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
 				E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */,
 				E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */,
diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m
new file mode 100644
index 0000000..1843cce
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m
@@ -0,0 +1,85 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import camera.Test;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+#import "CameraTestUtils.h"
+
+@interface StreamingTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer;
+@end
+
+@implementation StreamingTests
+
+- (void)setUp {
+  dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
+  _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
+  _sampleBuffer = FLTCreateTestSampleBuffer();
+}
+
+- (void)tearDown {
+  CFRelease(_sampleBuffer);
+}
+
+- (void)testExceedMaxStreamingPendingFramesCount {
+  XCTestExpectation *streamingExpectation = [self
+      expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"];
+
+  id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
+  OCMStub([handlerMock eventSink]).andReturn(^(id event) {
+    [streamingExpectation fulfill];
+  });
+
+  id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
+  [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];
+
+  XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
+                                                                       object:_camera
+                                                                expectedValue:@YES];
+  XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
+  XCTAssertEqual(result, XCTWaiterResultCompleted);
+
+  streamingExpectation.expectedFulfillmentCount = 4;
+  for (int i = 0; i < 10; i++) {
+    [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
+  }
+
+  [self waitForExpectationsWithTimeout:1.0 handler:nil];
+}
+
+- (void)testReceivedImageStreamData {
+  XCTestExpectation *streamingExpectation =
+      [self expectationWithDescription:
+                @"Must be able to call the handler again when receivedImageStreamData is called"];
+
+  id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
+  OCMStub([handlerMock eventSink]).andReturn(^(id event) {
+    [streamingExpectation fulfill];
+  });
+
+  id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
+  [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];
+
+  XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
+                                                                       object:_camera
+                                                                expectedValue:@YES];
+  XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
+  XCTAssertEqual(result, XCTWaiterResultCompleted);
+
+  streamingExpectation.expectedFulfillmentCount = 5;
+  for (int i = 0; i < 10; i++) {
+    [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
+  }
+
+  [_camera receivedImageStreamData];
+  [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
+
+  [self waitForExpectationsWithTimeout:1.0 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m
index 634aa69..c0a3833 100644
--- a/packages/camera/camera/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera/ios/Classes/CameraPlugin.m
@@ -162,6 +162,9 @@
   } else if ([@"stopImageStream" isEqualToString:call.method]) {
     [_camera stopImageStream];
     [result sendSuccess];
+  } else if ([@"receivedImageStreamData" isEqualToString:call.method]) {
+    [_camera receivedImageStreamData];
+    [result sendSuccess];
   } else {
     NSDictionary *argsMap = call.arguments;
     NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;
diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h
index 0cd135e..8a5dafa 100644
--- a/packages/camera/camera/ios/Classes/FLTCam.h
+++ b/packages/camera/camera/ios/Classes/FLTCam.h
@@ -62,6 +62,14 @@
 - (void)applyFocusMode;
 
 /**
+ * Acknowledges the receipt of one image stream frame.
+ *
+ * This should be called each time a frame is received. Failing to call it may
+ * cause later frames to be dropped instead of streamed.
+ */
+- (void)receivedImageStreamData;
+
+/**
  * Applies FocusMode on the AVCaptureDevice.
  *
  * If the @c focusMode is set to FocusModeAuto the AVCaptureDevice is configured to use
diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m
index 30c177b..7af505b 100644
--- a/packages/camera/camera/ios/Classes/FLTCam.m
+++ b/packages/camera/camera/ios/Classes/FLTCam.m
@@ -10,14 +10,6 @@
 @import CoreMotion;
 #import <libkern/OSAtomic.h>
 
-@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
-// The queue on which `eventSink` property should be accessed
-@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
-// `eventSink` property should be accessed on `captureSessionQueue`.
-// The block itself should be invoked on the main queue.
-@property FlutterEventSink eventSink;
-@end
-
 @implementation FLTImageStreamHandler
 
 - (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
@@ -68,7 +60,13 @@
 @property(assign, nonatomic) BOOL videoIsDisconnected;
 @property(assign, nonatomic) BOOL audioIsDisconnected;
 @property(assign, nonatomic) BOOL isAudioSetup;
-@property(assign, nonatomic) BOOL isStreamingImages;
+
+/// Number of frames currently pending processing.
+@property(assign, nonatomic) int streamingPendingFramesCount;
+
+/// Maximum number of frames pending processing.
+@property(assign, nonatomic) int maxStreamingPendingFramesCount;
+
 @property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
 @property(assign, nonatomic) CMTime lastVideoSampleTime;
 @property(assign, nonatomic) CMTime lastAudioSampleTime;
@@ -135,6 +133,11 @@
   _videoFormat = kCVPixelFormatType_32BGRA;
   _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];
 
+  // To limit memory consumption, limit the number of frames pending processing.
+  // After some testing, 4 was determined to be the best maximum value.
+  // https://github.com/flutter/plugins/pull/4520#discussion_r766335637
+  _maxStreamingPendingFramesCount = 4;
+
   NSError *localError = nil;
   _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
                                                              error:&localError];
@@ -401,7 +404,8 @@
   }
   if (_isStreamingImages) {
     FlutterEventSink eventSink = _imageStreamHandler.eventSink;
-    if (eventSink) {
+    if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) {
+      self.streamingPendingFramesCount++;
       CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
       // Must lock base address before accessing the pixel data
       CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
@@ -898,6 +902,13 @@
 }
 
 - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
+  [self startImageStreamWithMessenger:messenger
+                   imageStreamHandler:[[FLTImageStreamHandler alloc]
+                                          initWithCaptureSessionQueue:_captureSessionQueue]];
+}
+
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
+                   imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler {
   if (!_isStreamingImages) {
     FlutterEventChannel *eventChannel =
         [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream"
@@ -905,12 +916,12 @@
     FLTThreadSafeEventChannel *threadSafeEventChannel =
         [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
 
-    _imageStreamHandler =
-        [[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue];
+    _imageStreamHandler = imageStreamHandler;
     [threadSafeEventChannel setStreamHandler:_imageStreamHandler
                                   completion:^{
                                     dispatch_async(self->_captureSessionQueue, ^{
                                       self.isStreamingImages = YES;
+                                      self.streamingPendingFramesCount = 0;
                                     });
                                   }];
   } else {
@@ -928,6 +939,10 @@
   }
 }
 
+- (void)receivedImageStreamData {
+  self.streamingPendingFramesCount--;
+}
+
 - (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
   CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
 
diff --git a/packages/camera/camera/ios/Classes/FLTCam_Test.h b/packages/camera/camera/ios/Classes/FLTCam_Test.h
index a1f9f2b..19e2842 100644
--- a/packages/camera/camera/ios/Classes/FLTCam_Test.h
+++ b/packages/camera/camera/ios/Classes/FLTCam_Test.h
@@ -5,6 +5,19 @@
 #import "FLTCam.h"
 #import "FLTSavePhotoDelegate.h"
 
+@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
+
+/// The queue on which `eventSink` property should be accessed.
+@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
+
+/// The event sink to stream camera events to Dart.
+///
+/// The property should only be accessed on `captureSessionQueue`.
+/// The block itself should be invoked on the main queue.
+@property FlutterEventSink eventSink;
+
+@end
+
 // APIs exposed for unit testing.
 @interface FLTCam ()
 
@@ -14,6 +27,9 @@
 /// The output for photo capturing. Exposed setter for unit tests.
 @property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
 
+/// True when images from the camera are being streamed.
+@property(assign, nonatomic) BOOL isStreamingImages;
+
 /// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the
 /// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the
 /// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo
@@ -38,4 +54,8 @@
                captureSessionQueue:(dispatch_queue_t)captureSessionQueue
                              error:(NSError **)error;
 
+/// Start streaming images.
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
+                   imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler;
+
 @end
diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart
index 30e6221..1492ca1 100644
--- a/packages/camera/camera/lib/src/camera_controller.dart
+++ b/packages/camera/camera/lib/src/camera_controller.dart
@@ -448,6 +448,13 @@
     _imageStreamSubscription =
         cameraEventChannel.receiveBroadcastStream().listen(
       (dynamic imageData) {
+        if (defaultTargetPlatform == TargetPlatform.iOS) {
+          try {
+            _channel.invokeMethod<void>('receivedImageStreamData');
+          } on PlatformException catch (e) {
+            throw CameraException(e.code, e.message);
+          }
+        }
         onAvailable(
             CameraImage.fromPlatformData(imageData as Map<dynamic, dynamic>));
       },
diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml
index 0e684a7..064eb91 100644
--- a/packages/camera/camera/pubspec.yaml
+++ b/packages/camera/camera/pubspec.yaml
@@ -4,7 +4,7 @@
   Dart.
 repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.4+17
+version: 0.9.4+18
 
 environment:
   sdk: ">=2.14.0 <3.0.0"