[camera] Fix ImageStream ImageFormatGroup is Ignored in iOS (#4519)

diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md
index cb560f5..1ec2957 100644
--- a/packages/camera/camera/CHANGELOG.md
+++ b/packages/camera/camera/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.9.4+8
+
+* Fixes a bug where ImageFormatGroup was ignored in `startImageStream` on iOS.
+
 ## 0.9.4+7
 
 * Fixes a crash in iOS when passing null queue pointer into AVFoundation API due to race condition.  
diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart
index d09300a..3af291a 100644
--- a/packages/camera/camera/example/integration_test/camera_test.dart
+++ b/packages/camera/camera/example/integration_test/camera_test.dart
@@ -240,4 +240,56 @@
     },
     skip: !Platform.isAndroid,
   );
+
+  /// Start streaming with specifying the ImageFormatGroup.
+  Future<CameraImage> startStreaming(List<CameraDescription> cameras,
+      ImageFormatGroup? imageFormatGroup) async {
+    final CameraController controller = CameraController(
+      cameras.first,
+      ResolutionPreset.low,
+      enableAudio: false,
+      imageFormatGroup: imageFormatGroup,
+    );
+
+    await controller.initialize();
+    final _completer = Completer<CameraImage>();
+
+    await controller.startImageStream((CameraImage image) {
+      if (!_completer.isCompleted) {
+        Future(() async {
+          await controller.stopImageStream();
+          await controller.dispose();
+        }).then((value) {
+          _completer.complete(image);
+        });
+      }
+    });
+    return _completer.future;
+  }
+
+  testWidgets(
+    'iOS image streaming with imageFormatGroup',
+    (WidgetTester tester) async {
+      final List<CameraDescription> cameras = await availableCameras();
+      if (cameras.isEmpty) {
+        return;
+      }
+
+      var _image = await startStreaming(cameras, null);
+      expect(_image, isNotNull);
+      expect(_image.format.group, ImageFormatGroup.bgra8888);
+      expect(_image.planes.length, 1);
+
+      _image = await startStreaming(cameras, ImageFormatGroup.yuv420);
+      expect(_image, isNotNull);
+      expect(_image.format.group, ImageFormatGroup.yuv420);
+      expect(_image.planes.length, 2);
+
+      _image = await startStreaming(cameras, ImageFormatGroup.bgra8888);
+      expect(_image, isNotNull);
+      expect(_image.format.group, ImageFormatGroup.bgra8888);
+      expect(_image.planes.length, 1);
+    },
+    skip: !Platform.isIOS,
+  );
 }
diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m
index 5d70652..e79a783 100644
--- a/packages/camera/camera/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera/ios/Classes/CameraPlugin.m
@@ -355,6 +355,8 @@
 @property(assign, nonatomic) CMTime lastAudioSampleTime;
 @property(assign, nonatomic) CMTime videoTimeOffset;
 @property(assign, nonatomic) CMTime audioTimeOffset;
+// Format used for video and image streaming.
+@property(assign, nonatomic) FourCharCode videoFormat;
 @property(nonatomic) CMMotionManager *motionManager;
 @property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
 @end
@@ -365,8 +367,6 @@
   dispatch_queue_t _captureSessionQueue;
   UIDeviceOrientation _deviceOrientation;
 }
-// Format used for video and image streaming.
-FourCharCode videoFormat = kCVPixelFormatType_32BGRA;
 NSString *const errorMethod = @"error";
 
 - (instancetype)initWithCameraName:(NSString *)cameraName
@@ -391,6 +391,7 @@
   _focusMode = FocusModeAuto;
   _lockedCaptureOrientation = UIDeviceOrientationUnknown;
   _deviceOrientation = orientation;
+  _videoFormat = kCVPixelFormatType_32BGRA;
 
   NSError *localError = nil;
   _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
@@ -403,7 +404,7 @@
 
   _captureVideoOutput = [AVCaptureVideoDataOutput new];
   _captureVideoOutput.videoSettings =
-      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
+      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)};
   [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
   [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
 
@@ -441,6 +442,12 @@
   [_captureSession stopRunning];
 }
 
+- (void)setVideoFormat:(OSType)videoFormat {
+  _videoFormat = videoFormat;
+  _captureVideoOutput.videoSettings =
+      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
+}
+
 - (void)setDeviceOrientation:(UIDeviceOrientation)orientation {
   if (_deviceOrientation == orientation) {
     return;
@@ -680,7 +687,7 @@
       NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
       imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
       imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
-      imageBuffer[@"format"] = @(videoFormat);
+      imageBuffer[@"format"] = @(_videoFormat);
       imageBuffer[@"planes"] = planes;
       imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]];
       Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]);
@@ -1246,7 +1253,7 @@
   _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
       assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
                                  sourcePixelBufferAttributes:@{
-                                   (NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)
+                                   (NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)
                                  }];
 
   NSParameterAssert(_videoWriterInput);
@@ -1464,7 +1471,7 @@
     NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;
     if ([@"initialize" isEqualToString:call.method]) {
       NSString *videoFormatValue = ((NSString *)argsMap[@"imageFormatGroup"]);
-      videoFormat = getVideoFormatFromString(videoFormatValue);
+      [_camera setVideoFormat:getVideoFormatFromString(videoFormatValue)];
 
       __weak CameraPlugin *weakSelf = self;
       _camera.onFrameAvailable = ^{
diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml
index e5a5f76..0c670b4 100644
--- a/packages/camera/camera/pubspec.yaml
+++ b/packages/camera/camera/pubspec.yaml
@@ -4,7 +4,7 @@
   Dart.
 repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.4+7
+version: 0.9.4+8
 
 environment:
   sdk: ">=2.14.0 <3.0.0"