[camera] Finish converting iOS to Pigeon (#6601)

Converts all remaining Dart->host communication in the iOS implementation to use Pigeon. Given the boilerplate nature of many of the changes, it seemed easiest to just do the remaining calls all at once now that the structure is in place.

Some high-level notes:
- Many methods used to send the `cameraId` without it ever being used on the native side, so the Pigeon versions do not send them.
- `ThreadSafeTextureRegistry` is removed because I discovered that it was masking a bug, so was more trouble than it was worth (see inline comments in PR).
- A number of enums have been removed in favor of using the Pigeon-generated enums to pass data from the plugin class to `FLTCam`.
- In many cases where the completion callback (previously `result`) was being passed to `FLTCam` in a call, only to have it always just call `result(nil)`, that's now done in the plugin class since it's easier to reason about completions being called when they aren't passed around. (Long term we should consider moving almost all of the rest out, and using `FlutterError*` out params that the plugin class passes to `completion`, but that is more surgery than I wanted to do in this PR.)

Completes the iOS portion of https://github.com/flutter/flutter/issues/117905
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index 584c95f..5012f7d 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,8 @@
+## 0.9.16
+
+* Converts Dart-to-host communcation to Pigeon.
+* Fixes a race condition in camera disposal.
+
 ## 0.9.15+4
 
 * Converts host-to-Dart communcation to Pigeon.
@@ -121,11 +126,11 @@
 
 ## 0.9.8+5
 
-* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set. 
+* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set.
 
 ## 0.9.8+4
 
-* Fixes a crash due to sending orientation change events when the engine is torn down. 
+* Fixes a crash due to sending orientation change events when the engine is torn down.
 
 ## 0.9.8+3
 
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
index dc00e49..2ed7645 100644
--- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
@@ -29,7 +29,6 @@
 		E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */; };
 		E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */; };
 		E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */; };
-		E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */; };
 		E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; };
 		E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; };
 		E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; };
@@ -95,7 +94,6 @@
 		E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCamPhotoCaptureTests.m; sourceTree = "<group>"; };
 		E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FLTCamSampleBufferTests.m; sourceTree = "<group>"; };
 		E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPermissionTests.m; sourceTree = "<group>"; };
-		E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeTextureRegistryTests.m; sourceTree = "<group>"; };
 		E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = "<group>"; };
 		E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = "<group>"; };
 		E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = "<group>"; };
@@ -132,7 +130,6 @@
 				03BB766C2665316900CE5A93 /* Info.plist */,
 				033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */,
 				E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */,
-				E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */,
 				E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */,
 				E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */,
 				E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */,
@@ -449,7 +446,6 @@
 				E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
 				788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
 				E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
-				E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */,
 				E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */,
 				E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */,
 			);
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
index bc3713b..226d6bf 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
@@ -18,22 +18,25 @@
       [self expectationWithDescription:@"dispose's result block must be called"];
   XCTestExpectation *createExpectation =
       [self expectationWithDescription:@"create's result block must be called"];
-  FlutterMethodCall *disposeCall = [FlutterMethodCall methodCallWithMethodName:@"dispose"
-                                                                     arguments:nil];
-  FlutterMethodCall *createCall = [FlutterMethodCall
-      methodCallWithMethodName:@"create"
-                     arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
   // Mimic a dispose call followed by a create call, which can be triggered by slightly dragging the
   // home bar, causing the app to be inactive, and immediately regain active.
-  [camera handleMethodCall:disposeCall
-                    result:^(id _Nullable result) {
-                      [disposeExpectation fulfill];
-                    }];
-  [camera createCameraOnSessionQueueWithCreateMethodCall:createCall
-                                                  result:^(id _Nullable result) {
-                                                    [createExpectation fulfill];
-                                                  }];
-  [self waitForExpectationsWithTimeout:1 handler:nil];
+  [camera disposeCamera:0
+             completion:^(FlutterError *_Nullable error) {
+               [disposeExpectation fulfill];
+             }];
+  [camera createCameraOnSessionQueueWithName:@"acamera"
+                                    settings:[FCPPlatformMediaSettings
+                                                 makeWithResolutionPreset:
+                                                     FCPPlatformResolutionPresetMedium
+                                                          framesPerSecond:nil
+                                                             videoBitrate:nil
+                                                             audioBitrate:nil
+                                                              enableAudio:YES]
+                                  completion:^(NSNumber *_Nullable result,
+                                               FlutterError *_Nullable error) {
+                                    [createExpectation fulfill];
+                                  }];
+  [self waitForExpectationsWithTimeout:30 handler:nil];
   // `captureSessionQueue` must not be nil after `create` call. Otherwise a nil
   // `captureSessionQueue` passed into `AVCaptureVideoDataOutput::setSampleBufferDelegate:queue:`
   // API will cause a crash.
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
index 5773040..d13f5a7 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
@@ -114,11 +114,9 @@
   [_camera setValue:_mockDevice forKey:@"captureDevice"];
 
   // Run test
-  [_camera
-      setFocusPointWithResult:^(id _Nullable result) {
-      }
-                            x:1
-                            y:1];
+  [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1]
+          withCompletion:^(FlutterError *_Nullable error){
+          }];
 
   // Verify the focus point of interest has been set
   OCMVerify([_mockDevice setFocusPointOfInterest:CGPointMake(1, 1)]);
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
index 423b8e8..55fc44e 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
@@ -28,22 +28,24 @@
   OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
 
   // Set up method call
-  FlutterMethodCall *call = [FlutterMethodCall
-      methodCallWithMethodName:@"create"
-                     arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
-
-  __block id resultValue;
-  [camera createCameraOnSessionQueueWithCreateMethodCall:call
-                                                  result:^(id _Nullable result) {
-                                                    resultValue = result;
-                                                    [expectation fulfill];
-                                                  }];
-  [self waitForExpectationsWithTimeout:1 handler:nil];
+  __block NSNumber *resultValue;
+  [camera createCameraOnSessionQueueWithName:@"acamera"
+                                    settings:[FCPPlatformMediaSettings
+                                                 makeWithResolutionPreset:
+                                                     FCPPlatformResolutionPresetMedium
+                                                          framesPerSecond:nil
+                                                             videoBitrate:nil
+                                                             audioBitrate:nil
+                                                              enableAudio:YES]
+                                  completion:^(NSNumber *_Nullable result,
+                                               FlutterError *_Nullable error) {
+                                    resultValue = result;
+                                    [expectation fulfill];
+                                  }];
+  [self waitForExpectationsWithTimeout:30 handler:nil];
 
   // Verify the result
-  NSDictionary *dictionaryResult = (NSDictionary *)resultValue;
-  XCTAssertNotNil(dictionaryResult);
-  XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
+  XCTAssertNotNil(resultValue);
 }
 
 @end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
index 2ce7b86..96ae19f 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
@@ -16,16 +16,14 @@
 - (void)testPausePreviewWithResult_shouldPausePreview {
   FLTCam *camera = [[FLTCam alloc] init];
 
-  [camera pausePreviewWithResult:^(id _Nullable result){
-  }];
+  [camera pausePreview];
   XCTAssertTrue(camera.isPreviewPaused);
 }
 
 - (void)testResumePreviewWithResult_shouldResumePreview {
   FLTCam *camera = [[FLTCam alloc] init];
 
-  [camera resumePreviewWithResult:^(id _Nullable result){
-  }];
+  [camera resumePreview];
   XCTAssertFalse(camera.isPreviewPaused);
 }
 
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
index 14ced24..5b865d4 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
@@ -15,68 +15,39 @@
 
 #pragma mark - flash mode tests
 
-- (void)testFLTGetFLTFlashModeForString {
-  XCTAssertEqual(FLTFlashModeOff, FLTGetFLTFlashModeForString(@"off"));
-  XCTAssertEqual(FLTFlashModeAuto, FLTGetFLTFlashModeForString(@"auto"));
-  XCTAssertEqual(FLTFlashModeAlways, FLTGetFLTFlashModeForString(@"always"));
-  XCTAssertEqual(FLTFlashModeTorch, FLTGetFLTFlashModeForString(@"torch"));
-  XCTAssertEqual(FLTFlashModeInvalid, FLTGetFLTFlashModeForString(@"unknown"));
-}
-
-- (void)testFLTGetAVCaptureFlashModeForFLTFlashMode {
-  XCTAssertEqual(AVCaptureFlashModeOff, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeOff));
-  XCTAssertEqual(AVCaptureFlashModeAuto, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAuto));
-  XCTAssertEqual(AVCaptureFlashModeOn, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAlways));
-  XCTAssertEqual(-1, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeTorch));
-}
-
-#pragma mark - exposure mode tests
-
-- (void)testFCPGetExposureModeForString {
-  XCTAssertEqual(FCPPlatformExposureModeAuto, FCPGetExposureModeForString(@"auto"));
-  XCTAssertEqual(FCPPlatformExposureModeLocked, FCPGetExposureModeForString(@"locked"));
-}
-
-#pragma mark - focus mode tests
-
-- (void)testFLTGetFLTFocusModeForString {
-  XCTAssertEqual(FCPPlatformFocusModeAuto, FCPGetFocusModeForString(@"auto"));
-  XCTAssertEqual(FCPPlatformFocusModeLocked, FCPGetFocusModeForString(@"locked"));
-}
-
-#pragma mark - resolution preset tests
-
-- (void)testFLTGetFLTResolutionPresetForString {
-  XCTAssertEqual(FLTResolutionPresetVeryLow, FLTGetFLTResolutionPresetForString(@"veryLow"));
-  XCTAssertEqual(FLTResolutionPresetLow, FLTGetFLTResolutionPresetForString(@"low"));
-  XCTAssertEqual(FLTResolutionPresetMedium, FLTGetFLTResolutionPresetForString(@"medium"));
-  XCTAssertEqual(FLTResolutionPresetHigh, FLTGetFLTResolutionPresetForString(@"high"));
-  XCTAssertEqual(FLTResolutionPresetVeryHigh, FLTGetFLTResolutionPresetForString(@"veryHigh"));
-  XCTAssertEqual(FLTResolutionPresetUltraHigh, FLTGetFLTResolutionPresetForString(@"ultraHigh"));
-  XCTAssertEqual(FLTResolutionPresetMax, FLTGetFLTResolutionPresetForString(@"max"));
-  XCTAssertEqual(FLTResolutionPresetInvalid, FLTGetFLTResolutionPresetForString(@"unknown"));
+- (void)testFCPGetAVCaptureFlashModeForPigeonFlashMode {
+  XCTAssertEqual(AVCaptureFlashModeOff,
+                 FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeOff));
+  XCTAssertEqual(AVCaptureFlashModeAuto,
+                 FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeAuto));
+  XCTAssertEqual(AVCaptureFlashModeOn,
+                 FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeAlways));
+  XCTAssertThrows(FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeTorch));
 }
 
 #pragma mark - video format tests
 
-- (void)testFLTGetVideoFormatFromString {
-  XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"bgra8888"));
+- (void)testFCPGetPixelFormatForPigeonFormat {
+  XCTAssertEqual(kCVPixelFormatType_32BGRA,
+                 FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroupBgra8888));
   XCTAssertEqual(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
-                 FLTGetVideoFormatFromString(@"yuv420"));
-  XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"unknown"));
+                 FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroupYuv420));
 }
 
 #pragma mark - device orientation tests
 
-- (void)testFLTGetUIDeviceOrientationForString {
+- (void)testFCPGetUIDeviceOrientationForPigeonDeviceOrientation {
   XCTAssertEqual(UIDeviceOrientationPortraitUpsideDown,
-                 FLTGetUIDeviceOrientationForString(@"portraitDown"));
+                 FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+                     FCPPlatformDeviceOrientationPortraitDown));
   XCTAssertEqual(UIDeviceOrientationLandscapeLeft,
-                 FLTGetUIDeviceOrientationForString(@"landscapeLeft"));
+                 FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+                     FCPPlatformDeviceOrientationLandscapeLeft));
   XCTAssertEqual(UIDeviceOrientationLandscapeRight,
-                 FLTGetUIDeviceOrientationForString(@"landscapeRight"));
-  XCTAssertEqual(UIDeviceOrientationPortrait, FLTGetUIDeviceOrientationForString(@"portraitUp"));
-  XCTAssertEqual(UIDeviceOrientationUnknown, FLTGetUIDeviceOrientationForString(@"unknown"));
+                 FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+                     FCPPlatformDeviceOrientationLandscapeRight));
+  XCTAssertEqual(UIDeviceOrientationPortrait, FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+                                                  FCPPlatformDeviceOrientationPortraitUp));
 }
 
 - (void)testFLTGetStringForUIDeviceOrientation {
@@ -93,12 +64,4 @@
                  FCPGetPigeonDeviceOrientationForOrientation(-1));
 }
 
-#pragma mark - file format tests
-
-- (void)testFLTGetFileFormatForString {
-  XCTAssertEqual(FCPFileFormatJPEG, FCPGetFileFormatFromString(@"jpg"));
-  XCTAssertEqual(FCPFileFormatHEIF, FCPGetFileFormatFromString(@"heif"));
-  XCTAssertEqual(FCPFileFormatInvalid, FCPGetFileFormatFromString(@"unknown"));
-}
-
 @end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m
index a5130ad..28f8d5d 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m
@@ -30,7 +30,8 @@
   OCMExpect([captureDeviceMock lockForConfiguration:NULL]).andReturn(YES);
   OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
 
-  FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, @"max", captureDeviceMock,
+  FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, FCPPlatformResolutionPresetMax,
+                                           captureDeviceMock,
                                            ^CMVideoDimensions(AVCaptureDeviceFormat *format) {
                                              CMVideoDimensions videoDimensions;
                                              videoDimensions.width = 1;
@@ -53,7 +54,7 @@
 
   OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
 
-  FLTCreateCamWithVideoCaptureSession(videoSessionMock, @"max");
+  FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetMax);
 
   OCMVerifyAll(videoSessionMock);
 }
@@ -70,7 +71,7 @@
   // Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession.
   OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
 
-  FLTCreateCamWithVideoCaptureSession(videoSessionMock, @"ultraHigh");
+  FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetUltraHigh);
 
   OCMVerifyAll(videoSessionMock);
 }
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m
index 3177fe4..1962a6b 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m
@@ -9,134 +9,11 @@
 #import <OCMock/OCMock.h>
 #import "CameraTestUtils.h"
 
-static const char *gTestResolutionPreset = "medium";
+static const FCPPlatformResolutionPreset gTestResolutionPreset = FCPPlatformResolutionPresetMedium;
 static const int gTestFramesPerSecond = 15;
 static const int gTestVideoBitrate = 200000;
 static const int gTestAudioBitrate = 32000;
-static const bool gTestEnableAudio = YES;
-
-@interface CameraCreateWithMediaSettingsParseTests : XCTestCase
-@end
-
-/// Expect that optional positive numbers can be parsed
-@implementation CameraCreateWithMediaSettingsParseTests
-
-- (FlutterError *)failingTestWithArguments:(NSDictionary *)arguments {
-  CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
-
-  XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
-
-  // Set up method call
-  FlutterMethodCall *call = [FlutterMethodCall methodCallWithMethodName:@"create"
-                                                              arguments:arguments];
-
-  __block id resultValue;
-  [camera createCameraOnSessionQueueWithCreateMethodCall:call
-                                                  result:^(id _Nullable result) {
-                                                    resultValue = result;
-                                                    [expectation fulfill];
-                                                  }];
-  [self waitForExpectationsWithTimeout:1 handler:nil];
-
-  // Verify the result
-  XCTAssertNotNil(resultValue);
-  XCTAssertTrue([resultValue isKindOfClass:[FlutterError class]]);
-  return (FlutterError *)resultValue;
-}
-
-- (void)goodTestWithArguments:(NSDictionary *)arguments {
-  CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
-
-  XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
-
-  // Set up mocks for initWithCameraName method
-  id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
-  OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]])
-      .andReturn([AVCaptureInput alloc]);
-
-  id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]);
-  OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock);
-  OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
-
-  // Set up method call
-  FlutterMethodCall *call = [FlutterMethodCall
-      methodCallWithMethodName:@"create"
-                     arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
-
-  __block id resultValue;
-  [camera createCameraOnSessionQueueWithCreateMethodCall:call
-                                                  result:^(id _Nullable result) {
-                                                    resultValue = result;
-                                                    [expectation fulfill];
-                                                  }];
-  [self waitForExpectationsWithTimeout:1 handler:nil];
-
-  // Verify the result
-  XCTAssertNotNil(resultValue);
-  XCTAssertFalse([resultValue isKindOfClass:[FlutterError class]]);
-  NSDictionary *dictionaryResult = (NSDictionary *)resultValue;
-  XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectNegativeIntNumbers {
-  FlutterError *error =
-      [self failingTestWithArguments:@{@"fps" : @(-1), @"resolutionPreset" : @"medium"}];
-  XCTAssertEqualObjects(error.message, @"fps should be a positive number",
-                        "should reject negative int number");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectNegativeFloatingPointNumbers {
-  FlutterError *error =
-      [self failingTestWithArguments:@{@"fps" : @(-3.7), @"resolutionPreset" : @"medium"}];
-  XCTAssertEqualObjects(error.message, @"fps should be a positive number",
-                        "should reject negative floating point number");
-}
-
-- (void)testCameraCreateWithMediaSettings_nanShouldBeParsedAsNil {
-  FlutterError *error =
-      [self failingTestWithArguments:@{@"fps" : @(NAN), @"resolutionPreset" : @"medium"}];
-  XCTAssertEqualObjects(error.message, @"fps should not be a nan", "should reject NAN");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldNotRejectNilArguments {
-  [self goodTestWithArguments:@{@"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptNull {
-  [self goodTestWithArguments:@{@"fps" : [NSNull null], @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptPositiveDecimalNumbers {
-  [self goodTestWithArguments:@{@"fps" : @(5), @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptPositiveFloatingPointNumbers {
-  [self goodTestWithArguments:@{@"fps" : @(3.7), @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectWrongVideoBitrate {
-  FlutterError *error =
-      [self failingTestWithArguments:@{@"videoBitrate" : @(-1), @"resolutionPreset" : @"medium"}];
-  XCTAssertEqualObjects(error.message, @"videoBitrate should be a positive number",
-                        "should reject wrong video bitrate");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectWrongAudioBitrate {
-  FlutterError *error =
-      [self failingTestWithArguments:@{@"audioBitrate" : @(-1), @"resolutionPreset" : @"medium"}];
-  XCTAssertEqualObjects(error.message, @"audioBitrate should be a positive number",
-                        "should reject wrong audio bitrate");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptGoodVideoBitrate {
-  [self goodTestWithArguments:@{@"videoBitrate" : @(200000), @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptGoodAudioBitrate {
-  [self goodTestWithArguments:@{@"audioBitrate" : @(32000), @"resolutionPreset" : @"medium"}];
-}
-
-@end
+static const BOOL gTestEnableAudio = YES;
 
 @interface CameraSettingsTests : XCTestCase
 @end
@@ -255,11 +132,12 @@
 
 /// Expect that FPS, video and audio bitrate are passed to camera device and asset writer.
 - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter {
-  FLTCamMediaSettings *settings =
-      [[FLTCamMediaSettings alloc] initWithFramesPerSecond:@(gTestFramesPerSecond)
-                                              videoBitrate:@(gTestVideoBitrate)
-                                              audioBitrate:@(gTestAudioBitrate)
-                                               enableAudio:gTestEnableAudio];
+  FCPPlatformMediaSettings *settings =
+      [FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset
+                                         framesPerSecond:@(gTestFramesPerSecond)
+                                            videoBitrate:@(gTestVideoBitrate)
+                                            audioBitrate:@(gTestAudioBitrate)
+                                             enableAudio:gTestEnableAudio];
   TestMediaSettingsAVWrapper *injectedWrapper =
       [[TestMediaSettingsAVWrapper alloc] initWithTestCase:self];
 
@@ -275,9 +153,10 @@
                     timeout:1
                enforceOrder:YES];
 
-  [camera startVideoRecordingWithResult:^(id _Nullable result){
-
-  }];
+  [camera
+      startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+      }
+                  messengerForStreaming:nil];
 
   [self waitForExpectations:@[
     injectedWrapper.audioSettingsExpectation, injectedWrapper.videoSettingsExpectation
@@ -300,28 +179,25 @@
   OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
 
   // Set up method call
-  FlutterMethodCall *call =
-      [FlutterMethodCall methodCallWithMethodName:@"create"
-                                        arguments:@{
-                                          @"resolutionPreset" : @(gTestResolutionPreset),
-                                          @"enableAudio" : @(gTestEnableAudio),
-                                          @"fps" : @(gTestFramesPerSecond),
-                                          @"videoBitrate" : @(gTestVideoBitrate),
-                                          @"audioBitrate" : @(gTestAudioBitrate)
-                                        }];
+  FCPPlatformMediaSettings *mediaSettings =
+      [FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset
+                                         framesPerSecond:@(gTestFramesPerSecond)
+                                            videoBitrate:@(gTestVideoBitrate)
+                                            audioBitrate:@(gTestAudioBitrate)
+                                             enableAudio:gTestEnableAudio];
 
-  __block id resultValue;
-  [camera createCameraOnSessionQueueWithCreateMethodCall:call
-                                                  result:^(id _Nullable result) {
-                                                    resultValue = result;
-                                                    [expectation fulfill];
-                                                  }];
-  [self waitForExpectationsWithTimeout:1 handler:nil];
+  __block NSNumber *resultValue;
+  [camera createCameraOnSessionQueueWithName:@"acamera"
+                                    settings:mediaSettings
+                                  completion:^(NSNumber *result, FlutterError *error) {
+                                    XCTAssertNil(error);
+                                    resultValue = result;
+                                    [expectation fulfill];
+                                  }];
+  [self waitForExpectationsWithTimeout:30 handler:nil];
 
   // Verify the result
-  NSDictionary *dictionaryResult = (NSDictionary *)resultValue;
-  XCTAssertNotNil(dictionaryResult);
-  XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
+  XCTAssertNotNil(resultValue);
 }
 
 @end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
index 57c47ac..eded154 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
@@ -14,7 +14,8 @@
 /// dependency injection).
 /// @return an FLTCam object.
 extern FLTCam *_Nullable FLTCreateCamWithCaptureSessionQueueAndMediaSettings(
-    dispatch_queue_t _Nullable captureSessionQueue, FLTCamMediaSettings *_Nullable mediaSettings,
+    dispatch_queue_t _Nullable captureSessionQueue,
+    FCPPlatformMediaSettings *_Nullable mediaSettings,
     FLTCamMediaSettingsAVWrapper *_Nullable mediaSettingsAVWrapper);
 
 extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue);
@@ -24,7 +25,7 @@
 /// @param resolutionPreset preset for camera's captureSession resolution
 /// @return an FLTCam object.
 extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession,
-                                                   NSString *resolutionPreset);
+                                                   FCPPlatformResolutionPreset resolutionPreset);
 
 /// Creates an `FLTCam` with a given captureSession and resolutionPreset.
 /// Allows to inject a capture device and a block to compute the video dimensions.
@@ -34,8 +35,8 @@
 /// @param videoDimensionsForFormat custom code to determine video dimensions
 /// @return an FLTCam object.
 extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat(
-    AVCaptureSession *captureSession, NSString *resolutionPreset, AVCaptureDevice *captureDevice,
-    VideoDimensionsForFormat videoDimensionsForFormat);
+    AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset,
+    AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat);
 
 /// Creates a test sample buffer.
 /// @return a test sample buffer.
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
index d334576..0dac5c4 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
@@ -3,21 +3,29 @@
 // found in the LICENSE file.
 
 #import "CameraTestUtils.h"
+
 #import <OCMock/OCMock.h>
 @import AVFoundation;
+@import camera_avfoundation;
+
+static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings(
+    FCPPlatformResolutionPreset resolutionPreset) {
+  return [FCPPlatformMediaSettings makeWithResolutionPreset:resolutionPreset
+                                            framesPerSecond:nil
+                                               videoBitrate:nil
+                                               audioBitrate:nil
+                                                enableAudio:YES];
+}
 
 FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) {
   return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil);
 }
 
 FLTCam *FLTCreateCamWithCaptureSessionQueueAndMediaSettings(
-    dispatch_queue_t captureSessionQueue, FLTCamMediaSettings *mediaSettings,
+    dispatch_queue_t captureSessionQueue, FCPPlatformMediaSettings *mediaSettings,
     FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper) {
   if (!mediaSettings) {
-    mediaSettings = [[FLTCamMediaSettings alloc] initWithFramesPerSecond:nil
-                                                            videoBitrate:nil
-                                                            audioBitrate:nil
-                                                             enableAudio:true];
+    mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium);
   }
 
   if (!mediaSettingsAVWrapper) {
@@ -44,7 +52,6 @@
   OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
 
   id fltCam = [[FLTCam alloc] initWithCameraName:@"camera"
-                                resolutionPreset:@"medium"
                                    mediaSettings:mediaSettings
                           mediaSettingsAVWrapper:mediaSettingsAVWrapper
                                      orientation:UIDeviceOrientationPortrait
@@ -82,7 +89,7 @@
 }
 
 FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession,
-                                            NSString *resolutionPreset) {
+                                            FCPPlatformResolutionPreset resolutionPreset) {
   id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
   OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
       .andReturn(inputMock);
@@ -91,24 +98,19 @@
   OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
   OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
 
-  return
-      [[FLTCam alloc] initWithCameraName:@"camera"
-                        resolutionPreset:resolutionPreset
-                           mediaSettings:[[FLTCamMediaSettings alloc] initWithFramesPerSecond:nil
-                                                                                 videoBitrate:nil
-                                                                                 audioBitrate:nil
-                                                                                  enableAudio:true]
-                  mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
-                             orientation:UIDeviceOrientationPortrait
-                     videoCaptureSession:captureSession
-                     audioCaptureSession:audioSessionMock
-                     captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
-                                   error:nil];
+  return [[FLTCam alloc] initWithCameraName:@"camera"
+                              mediaSettings:FCPGetDefaultMediaSettings(resolutionPreset)
+                     mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
+                                orientation:UIDeviceOrientationPortrait
+                        videoCaptureSession:captureSession
+                        audioCaptureSession:audioSessionMock
+                        captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
+                                      error:nil];
 }
 
 FLTCam *FLTCreateCamWithVideoDimensionsForFormat(
-    AVCaptureSession *captureSession, NSString *resolutionPreset, AVCaptureDevice *captureDevice,
-    VideoDimensionsForFormat videoDimensionsForFormat) {
+    AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset,
+    AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) {
   id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
   OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
       .andReturn(inputMock);
@@ -117,22 +119,17 @@
   OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
   OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
 
-  return [[FLTCam alloc]
-      initWithResolutionPreset:resolutionPreset
-                 mediaSettings:[[FLTCamMediaSettings alloc] initWithFramesPerSecond:nil
-                                                                       videoBitrate:nil
-                                                                       audioBitrate:nil
-                                                                        enableAudio:true]
-        mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
-                   orientation:UIDeviceOrientationPortrait
-           videoCaptureSession:captureSession
-           audioCaptureSession:audioSessionMock
-           captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
-          captureDeviceFactory:^AVCaptureDevice *(void) {
-            return captureDevice;
-          }
-      videoDimensionsForFormat:videoDimensionsForFormat
-                         error:nil];
+  return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset)
+                        mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
+                                   orientation:UIDeviceOrientationPortrait
+                           videoCaptureSession:captureSession
+                           audioCaptureSession:audioSessionMock
+                           captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
+                          captureDeviceFactory:^AVCaptureDevice *(void) {
+                            return captureDevice;
+                          }
+                      videoDimensionsForFormat:videoDimensionsForFormat
+                                         error:nil];
 }
 
 CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
index 00c583d..f81625f 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
@@ -45,8 +45,9 @@
 
   // `FLTCam::captureToFile` runs on capture session queue.
   dispatch_async(captureSessionQueue, ^{
-    [cam captureToFile:^(id _Nullable result) {
-      XCTAssertTrue([result isKindOfClass:[FlutterError class]]);
+    [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) {
+      XCTAssertNil(result);
+      XCTAssertNotNil(error);
       [errorExpectation fulfill];
     }];
   });
@@ -84,7 +85,7 @@
 
   // `FLTCam::captureToFile` runs on capture session queue.
   dispatch_async(captureSessionQueue, ^{
-    [cam captureToFile:^(id _Nullable result) {
+    [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) {
       XCTAssertEqual(result, filePath);
       [pathExpectation fulfill];
     }];
@@ -100,7 +101,7 @@
   dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
                               (void *)FLTCaptureSessionQueueSpecific, NULL);
   FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
-  [cam setImageFileFormat:FCPFileFormatHEIF];
+  [cam setImageFileFormat:FCPPlatformImageFileFormatHeif];
 
   AVCapturePhotoSettings *settings =
       [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}];
@@ -125,8 +126,7 @@
   cam.capturePhotoOutput = mockOutput;
   // `FLTCam::captureToFile` runs on capture session queue.
   dispatch_async(captureSessionQueue, ^{
-    [cam captureToFile:^(id _Nullable result) {
-      NSString *filePath = (NSString *)result;
+    [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) {
       XCTAssertEqualObjects([filePath pathExtension], @"heif");
       [expectation fulfill];
     }];
@@ -142,7 +142,7 @@
   dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
                               (void *)FLTCaptureSessionQueueSpecific, NULL);
   FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
-  [cam setImageFileFormat:FCPFileFormatHEIF];
+  [cam setImageFileFormat:FCPPlatformImageFileFormatHeif];
 
   AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
   id mockSettings = OCMClassMock([AVCapturePhotoSettings class]);
@@ -162,8 +162,7 @@
   cam.capturePhotoOutput = mockOutput;
   // `FLTCam::captureToFile` runs on capture session queue.
   dispatch_async(captureSessionQueue, ^{
-    [cam captureToFile:^(id _Nullable result) {
-      NSString *filePath = (NSString *)result;
+    [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) {
       XCTAssertEqualObjects([filePath pathExtension], @"jpg");
       [expectation fulfill];
     }];
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
index 9c036ad..cba488d 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
@@ -55,12 +55,12 @@
   });
 
   // Pause then resume the recording.
-  [cam startVideoRecordingWithResult:^(id _Nullable result){
-  }];
-  [cam pauseVideoRecordingWithResult:^(id _Nullable result){
-  }];
-  [cam resumeVideoRecordingWithResult:^(id _Nullable result){
-  }];
+  [cam
+      startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+      }
+                  messengerForStreaming:nil];
+  [cam pauseVideoRecording];
+  [cam resumeVideoRecording];
 
   [cam captureOutput:cam.captureVideoOutput
       didOutputSampleBuffer:sampleBuffer
@@ -111,8 +111,10 @@
     writtenSamples = [writtenSamples arrayByAddingObject:@"audio"];
   });
 
-  [cam startVideoRecordingWithResult:^(id _Nullable result){
-  }];
+  [cam
+      startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+      }
+                  messengerForStreaming:nil];
 
   [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
   [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
deleted file mode 100644
index f91896b..0000000
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
+++ /dev/null
@@ -1,109 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-@import camera_avfoundation;
-@import camera_avfoundation.Test;
-@import XCTest;
-#import <OCMock/OCMock.h>
-
-@interface ThreadSafeTextureRegistryTests : XCTestCase
-@end
-
-@implementation ThreadSafeTextureRegistryTests
-
-- (void)testShouldStayOnMainThreadIfCalledFromMainThread {
-  NSObject<FlutterTextureRegistry> *mockTextureRegistry =
-      OCMProtocolMock(@protocol(FlutterTextureRegistry));
-  FLTThreadSafeTextureRegistry *threadSafeTextureRegistry =
-      [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:mockTextureRegistry];
-
-  XCTestExpectation *registerTextureExpectation =
-      [self expectationWithDescription:@"registerTexture must be called on the main thread"];
-  XCTestExpectation *unregisterTextureExpectation =
-      [self expectationWithDescription:@"unregisterTexture must be called on the main thread"];
-  XCTestExpectation *textureFrameAvailableExpectation =
-      [self expectationWithDescription:@"textureFrameAvailable must be called on the main thread"];
-  XCTestExpectation *registerTextureCompletionExpectation =
-      [self expectationWithDescription:
-                @"registerTexture's completion block must be called on the main thread"];
-
-  OCMStub([mockTextureRegistry registerTexture:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
-    if (NSThread.isMainThread) {
-      [registerTextureExpectation fulfill];
-    }
-  });
-
-  OCMStub([mockTextureRegistry unregisterTexture:0]).andDo(^(NSInvocation *invocation) {
-    if (NSThread.isMainThread) {
-      [unregisterTextureExpectation fulfill];
-    }
-  });
-
-  OCMStub([mockTextureRegistry textureFrameAvailable:0]).andDo(^(NSInvocation *invocation) {
-    if (NSThread.isMainThread) {
-      [textureFrameAvailableExpectation fulfill];
-    }
-  });
-
-  NSObject<FlutterTexture> *anyTexture = OCMProtocolMock(@protocol(FlutterTexture));
-  [threadSafeTextureRegistry registerTexture:anyTexture
-                                  completion:^(int64_t textureId) {
-                                    if (NSThread.isMainThread) {
-                                      [registerTextureCompletionExpectation fulfill];
-                                    }
-                                  }];
-  [threadSafeTextureRegistry textureFrameAvailable:0];
-  [threadSafeTextureRegistry unregisterTexture:0];
-  [self waitForExpectationsWithTimeout:1 handler:nil];
-}
-
-- (void)testShouldDispatchToMainThreadIfCalledFromBackgroundThread {
-  NSObject<FlutterTextureRegistry> *mockTextureRegistry =
-      OCMProtocolMock(@protocol(FlutterTextureRegistry));
-  FLTThreadSafeTextureRegistry *threadSafeTextureRegistry =
-      [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:mockTextureRegistry];
-
-  XCTestExpectation *registerTextureExpectation =
-      [self expectationWithDescription:@"registerTexture must be called on the main thread"];
-  XCTestExpectation *unregisterTextureExpectation =
-      [self expectationWithDescription:@"unregisterTexture must be called on the main thread"];
-  XCTestExpectation *textureFrameAvailableExpectation =
-      [self expectationWithDescription:@"textureFrameAvailable must be called on the main thread"];
-  XCTestExpectation *registerTextureCompletionExpectation =
-      [self expectationWithDescription:
-                @"registerTexture's completion block must be called on the main thread"];
-
-  OCMStub([mockTextureRegistry registerTexture:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
-    if (NSThread.isMainThread) {
-      [registerTextureExpectation fulfill];
-    }
-  });
-
-  OCMStub([mockTextureRegistry unregisterTexture:0]).andDo(^(NSInvocation *invocation) {
-    if (NSThread.isMainThread) {
-      [unregisterTextureExpectation fulfill];
-    }
-  });
-
-  OCMStub([mockTextureRegistry textureFrameAvailable:0]).andDo(^(NSInvocation *invocation) {
-    if (NSThread.isMainThread) {
-      [textureFrameAvailableExpectation fulfill];
-    }
-  });
-
-  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
-    NSObject<FlutterTexture> *anyTexture = OCMProtocolMock(@protocol(FlutterTexture));
-    [threadSafeTextureRegistry registerTexture:anyTexture
-                                    completion:^(int64_t textureId) {
-                                      if (NSThread.isMainThread) {
-                                        [registerTextureCompletionExpectation fulfill];
-                                      }
-                                    }];
-    [threadSafeTextureRegistry textureFrameAvailable:0];
-    [threadSafeTextureRegistry unregisterTexture:0];
-  });
-  [self waitForExpectationsWithTimeout:1 handler:nil];
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
index 90a124e..de89aec 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
@@ -6,12 +6,12 @@
 #import "CameraPlugin_Test.h"
 
 @import AVFoundation;
+@import Flutter;
 
 #import "CameraPermissionUtils.h"
 #import "CameraProperties.h"
 #import "FLTCam.h"
 #import "FLTThreadSafeEventChannel.h"
-#import "FLTThreadSafeTextureRegistry.h"
 #import "QueueUtils.h"
 #import "messages.g.h"
 
@@ -22,7 +22,7 @@
 }
 
 @interface CameraPlugin ()
-@property(readonly, nonatomic) FLTThreadSafeTextureRegistry *registry;
+@property(readonly, nonatomic) id<FlutterTextureRegistry> registry;
 @property(readonly, nonatomic) NSObject<FlutterBinaryMessenger> *messenger;
 @property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI;
 @end
@@ -30,12 +30,8 @@
 @implementation CameraPlugin
 
 + (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
-  FlutterMethodChannel *channel =
-      [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/camera_avfoundation"
-                                  binaryMessenger:[registrar messenger]];
   CameraPlugin *instance = [[CameraPlugin alloc] initWithRegistry:[registrar textures]
                                                         messenger:[registrar messenger]];
-  [registrar addMethodCallDelegate:instance channel:channel];
   SetUpFCPCameraApi([registrar messenger], instance);
 }
 
@@ -52,7 +48,7 @@
                        globalAPI:(FCPCameraGlobalEventApi *)globalAPI {
   self = [super init];
   NSAssert(self, @"super init cannot be nil");
-  _registry = [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:registry];
+  _registry = registry;
   _messenger = messenger;
   _globalEventAPI = globalAPI;
   _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL);
@@ -103,13 +99,7 @@
   });
 }
 
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
-  // Invoke the plugin on another dispatch queue to avoid blocking the UI.
-  __weak typeof(self) weakSelf = self;
-  dispatch_async(self.captureSessionQueue, ^{
-    [weakSelf handleMethodCallAsync:call result:result];
-  });
-}
+#pragma mark FCPCameraApi Implementation
 
 - (void)availableCamerasWithCompletion:
     (nonnull void (^)(NSArray<FCPPlatformCameraDescription *> *_Nullable,
@@ -148,274 +138,355 @@
   });
 }
 
-- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)result {
-  if ([@"create" isEqualToString:call.method]) {
-    [self handleCreateMethodCall:call result:result];
-  } else if ([@"startImageStream" isEqualToString:call.method]) {
-    [_camera startImageStreamWithMessenger:_messenger];
-    result(nil);
-  } else if ([@"stopImageStream" isEqualToString:call.method]) {
-    [_camera stopImageStream];
-    result(nil);
-  } else if ([@"receivedImageStreamData" isEqualToString:call.method]) {
-    [_camera receivedImageStreamData];
-    result(nil);
-  } else {
-    NSDictionary *argsMap = call.arguments;
-    NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;
-    if ([@"initialize" isEqualToString:call.method]) {
-      NSString *videoFormatValue = ((NSString *)argsMap[@"imageFormatGroup"]);
-
-      [_camera setVideoFormat:FLTGetVideoFormatFromString(videoFormatValue)];
-
-      __weak CameraPlugin *weakSelf = self;
-      _camera.onFrameAvailable = ^{
-        if (![weakSelf.camera isPreviewPaused]) {
-          [weakSelf.registry textureFrameAvailable:cameraId];
-        }
-      };
-      _camera.dartAPI = [[FCPCameraEventApi alloc]
-          initWithBinaryMessenger:_messenger
-             messageChannelSuffix:[NSString stringWithFormat:@"%ld", cameraId]];
-      [_camera reportInitializationState];
-      [self sendDeviceOrientation:[UIDevice currentDevice].orientation];
-      [_camera start];
-      result(nil);
-    } else if ([@"takePicture" isEqualToString:call.method]) {
-      [_camera captureToFile:result];
-    } else if ([@"dispose" isEqualToString:call.method]) {
-      [_registry unregisterTexture:cameraId];
-      [_camera close];
-      result(nil);
-    } else if ([@"prepareForVideoRecording" isEqualToString:call.method]) {
-      [self.camera setUpCaptureSessionForAudio];
-      result(nil);
-    } else if ([@"startVideoRecording" isEqualToString:call.method]) {
-      BOOL enableStream = [call.arguments[@"enableStream"] boolValue];
-      if (enableStream) {
-        [_camera startVideoRecordingWithResult:result messengerForStreaming:_messenger];
-      } else {
-        [_camera startVideoRecordingWithResult:result];
-      }
-    } else if ([@"stopVideoRecording" isEqualToString:call.method]) {
-      [_camera stopVideoRecordingWithResult:result];
-    } else if ([@"pauseVideoRecording" isEqualToString:call.method]) {
-      [_camera pauseVideoRecordingWithResult:result];
-    } else if ([@"resumeVideoRecording" isEqualToString:call.method]) {
-      [_camera resumeVideoRecordingWithResult:result];
-    } else if ([@"getMaxZoomLevel" isEqualToString:call.method]) {
-      [_camera getMaxZoomLevelWithResult:result];
-    } else if ([@"getMinZoomLevel" isEqualToString:call.method]) {
-      [_camera getMinZoomLevelWithResult:result];
-    } else if ([@"setZoomLevel" isEqualToString:call.method]) {
-      CGFloat zoom = ((NSNumber *)argsMap[@"zoom"]).floatValue;
-      [_camera setZoomLevel:zoom Result:result];
-    } else if ([@"setFlashMode" isEqualToString:call.method]) {
-      [_camera setFlashModeWithResult:result mode:call.arguments[@"mode"]];
-    } else if ([@"setExposureMode" isEqualToString:call.method]) {
-      [_camera setExposureModeWithResult:result mode:call.arguments[@"mode"]];
-    } else if ([@"setExposurePoint" isEqualToString:call.method]) {
-      BOOL reset = ((NSNumber *)call.arguments[@"reset"]).boolValue;
-      double x = 0.5;
-      double y = 0.5;
-      if (!reset) {
-        x = ((NSNumber *)call.arguments[@"x"]).doubleValue;
-        y = ((NSNumber *)call.arguments[@"y"]).doubleValue;
-      }
-      [_camera setExposurePointWithResult:result x:x y:y];
-    } else if ([@"getMinExposureOffset" isEqualToString:call.method]) {
-      result(@(_camera.captureDevice.minExposureTargetBias));
-    } else if ([@"getMaxExposureOffset" isEqualToString:call.method]) {
-      result(@(_camera.captureDevice.maxExposureTargetBias));
-    } else if ([@"getExposureOffsetStepSize" isEqualToString:call.method]) {
-      result(@(0.0));
-    } else if ([@"setExposureOffset" isEqualToString:call.method]) {
-      [_camera setExposureOffsetWithResult:result
-                                    offset:((NSNumber *)call.arguments[@"offset"]).doubleValue];
-    } else if ([@"lockCaptureOrientation" isEqualToString:call.method]) {
-      [_camera lockCaptureOrientationWithResult:result orientation:call.arguments[@"orientation"]];
-    } else if ([@"unlockCaptureOrientation" isEqualToString:call.method]) {
-      [_camera unlockCaptureOrientationWithResult:result];
-    } else if ([@"setFocusMode" isEqualToString:call.method]) {
-      [_camera setFocusModeWithResult:result mode:call.arguments[@"mode"]];
-    } else if ([@"setFocusPoint" isEqualToString:call.method]) {
-      BOOL reset = ((NSNumber *)call.arguments[@"reset"]).boolValue;
-      double x = 0.5;
-      double y = 0.5;
-      if (!reset) {
-        x = ((NSNumber *)call.arguments[@"x"]).doubleValue;
-        y = ((NSNumber *)call.arguments[@"y"]).doubleValue;
-      }
-      [_camera setFocusPointWithResult:result x:x y:y];
-    } else if ([@"pausePreview" isEqualToString:call.method]) {
-      [_camera pausePreviewWithResult:result];
-    } else if ([@"resumePreview" isEqualToString:call.method]) {
-      [_camera resumePreviewWithResult:result];
-    } else if ([@"setDescriptionWhileRecording" isEqualToString:call.method]) {
-      [_camera setDescriptionWhileRecording:(call.arguments[@"cameraName"]) result:result];
-    } else if ([@"setImageFileFormat" isEqualToString:call.method]) {
-      NSString *fileFormat = call.arguments[@"fileFormat"];
-      [_camera setImageFileFormat:FCPGetFileFormatFromString(fileFormat)];
-    } else {
-      result(FlutterMethodNotImplemented);
-    }
-  }
-}
-
-- (void)handleCreateMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
+- (void)createCameraWithName:(nonnull NSString *)cameraName
+                    settings:(nonnull FCPPlatformMediaSettings *)settings
+                  completion:
+                      (nonnull void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
   // Create FLTCam only if granted camera access (and audio access if audio is enabled)
   __weak typeof(self) weakSelf = self;
-  FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
-    typeof(self) strongSelf = weakSelf;
-    if (!strongSelf) return;
+  dispatch_async(self.captureSessionQueue, ^{
+    FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+      typeof(self) strongSelf = weakSelf;
+      if (!strongSelf) return;
 
-    if (error) {
-      result(error);
-    } else {
-      // Request audio permission on `create` call with `enableAudio` argument instead of the
-      // `prepareForVideoRecording` call. This is because `prepareForVideoRecording` call is
-      // optional, and used as a workaround to fix a missing frame issue on iOS.
-      BOOL audioEnabled = [call.arguments[@"enableAudio"] boolValue];
-      if (audioEnabled) {
-        // Setup audio capture session only if granted audio access.
-        FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
-          // cannot use the outter `strongSelf`
-          typeof(self) strongSelf = weakSelf;
-          if (!strongSelf) return;
-          if (error) {
-            result(error);
-          } else {
-            [strongSelf createCameraOnSessionQueueWithCreateMethodCall:call result:result];
-          }
-        });
+      if (error) {
+        completion(nil, error);
       } else {
-        [strongSelf createCameraOnSessionQueueWithCreateMethodCall:call result:result];
+        // Request audio permission on `create` call with `enableAudio` argument instead of the
+        // `prepareForVideoRecording` call. This is because `prepareForVideoRecording` call is
+        // optional, and used as a workaround to fix a missing frame issue on iOS.
+        if (settings.enableAudio) {
+          // Setup audio capture session only if granted audio access.
+          FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+            // cannot use the outter `strongSelf`
+            typeof(self) strongSelf = weakSelf;
+            if (!strongSelf) return;
+            if (error) {
+              completion(nil, error);
+            } else {
+              [strongSelf createCameraOnSessionQueueWithName:cameraName
+                                                    settings:settings
+                                                  completion:completion];
+            }
+          });
+        } else {
+          [strongSelf createCameraOnSessionQueueWithName:cameraName
+                                                settings:settings
+                                              completion:completion];
+        }
       }
-    }
+    });
   });
 }
 
-// Returns number value if provided and positive, or nil.
-// Used to parse values like framerates and bitrates, that are positive by nature.
-// nil allows to ignore unsupported values.
-+ (NSNumber *)positiveNumberValueOrNilForArgument:(NSString *)argument
-                                       fromMethod:(FlutterMethodCall *)flutterMethodCall
-                                            error:(NSError **)error {
-  id value = flutterMethodCall.arguments[argument];
-
-  if (!value || [value isEqual:[NSNull null]]) {
-    return nil;
-  }
-
-  if (![value isKindOfClass:[NSNumber class]]) {
-    if (error) {
-      *error = [NSError errorWithDomain:@"ArgumentError"
-                                   code:0
-                               userInfo:@{
-                                 NSLocalizedDescriptionKey :
-                                     [NSString stringWithFormat:@"%@ should be a number", argument]
-                               }];
-    }
-    return nil;
-  }
-
-  NSNumber *number = (NSNumber *)value;
-
-  if (isnan([number doubleValue])) {
-    if (error) {
-      *error = [NSError errorWithDomain:@"ArgumentError"
-                                   code:0
-                               userInfo:@{
-                                 NSLocalizedDescriptionKey :
-                                     [NSString stringWithFormat:@"%@ should not be a nan", argument]
-                               }];
-    }
-    return nil;
-  }
-
-  if ([number doubleValue] <= 0.0) {
-    if (error) {
-      *error = [NSError errorWithDomain:@"ArgumentError"
-                                   code:0
-                               userInfo:@{
-                                 NSLocalizedDescriptionKey : [NSString
-                                     stringWithFormat:@"%@ should be a positive number", argument]
-                               }];
-    }
-    return nil;
-  }
-
-  return number;
+- (void)initializeCamera:(NSInteger)cameraId
+         withImageFormat:(FCPPlatformImageFormatGroup)imageFormat
+              completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf sessionQueueInitializeCamera:cameraId
+                           withImageFormat:imageFormat
+                                completion:completion];
+  });
 }
 
-- (void)createCameraOnSessionQueueWithCreateMethodCall:(FlutterMethodCall *)createMethodCall
-                                                result:(FlutterResult)result {
+- (void)startImageStreamWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera startImageStreamWithMessenger:weakSelf.messenger];
+    completion(nil);
+  });
+}
+
+- (void)stopImageStreamWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera stopImageStream];
+    completion(nil);
+  });
+}
+
+- (void)receivedImageStreamDataWithCompletion:
+    (nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera receivedImageStreamData];
+    completion(nil);
+  });
+}
+
+- (void)takePictureWithCompletion:(nonnull void (^)(NSString *_Nullable,
+                                                    FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera captureToFileWithCompletion:completion];
+  });
+}
+
+- (void)prepareForVideoRecordingWithCompletion:
+    (nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setUpCaptureSessionForAudio];
+    completion(nil);
+  });
+}
+
+- (void)startVideoRecordingWithStreaming:(BOOL)enableStream
+                              completion:(nonnull void (^)(FlutterError *_Nullable))completion {
   __weak typeof(self) weakSelf = self;
   dispatch_async(self.captureSessionQueue, ^{
     typeof(self) strongSelf = weakSelf;
     if (!strongSelf) return;
-
-    NSString *cameraName = createMethodCall.arguments[@"cameraName"];
-
-    NSError *error;
-
-    NSNumber *framesPerSecond = [CameraPlugin positiveNumberValueOrNilForArgument:@"fps"
-                                                                       fromMethod:createMethodCall
-                                                                            error:&error];
-    if (error) {
-      result(FlutterErrorFromNSError(error));
-      return;
-    }
-
-    NSNumber *videoBitrate = [CameraPlugin positiveNumberValueOrNilForArgument:@"videoBitrate"
-                                                                    fromMethod:createMethodCall
-                                                                         error:&error];
-    if (error) {
-      result(FlutterErrorFromNSError(error));
-      return;
-    }
-
-    NSNumber *audioBitrate = [CameraPlugin positiveNumberValueOrNilForArgument:@"audioBitrate"
-                                                                    fromMethod:createMethodCall
-                                                                         error:&error];
-    if (error) {
-      result(FlutterErrorFromNSError(error));
-      return;
-    }
-
-    NSString *resolutionPreset = createMethodCall.arguments[@"resolutionPreset"];
-    NSNumber *enableAudio = createMethodCall.arguments[@"enableAudio"];
-    FLTCamMediaSettings *mediaSettings =
-        [[FLTCamMediaSettings alloc] initWithFramesPerSecond:framesPerSecond
-                                                videoBitrate:videoBitrate
-                                                audioBitrate:audioBitrate
-                                                 enableAudio:[enableAudio boolValue]];
-    FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper =
-        [[FLTCamMediaSettingsAVWrapper alloc] init];
-
-    FLTCam *cam = [[FLTCam alloc] initWithCameraName:cameraName
-                                    resolutionPreset:resolutionPreset
-                                       mediaSettings:mediaSettings
-                              mediaSettingsAVWrapper:mediaSettingsAVWrapper
-                                         orientation:[[UIDevice currentDevice] orientation]
-                                 captureSessionQueue:strongSelf.captureSessionQueue
-                                               error:&error];
-
-    if (error) {
-      result(FlutterErrorFromNSError(error));
-    } else {
-      if (strongSelf.camera) {
-        [strongSelf.camera close];
-      }
-      strongSelf.camera = cam;
-      [strongSelf.registry registerTexture:cam
-                                completion:^(int64_t textureId) {
-                                  result(@{
-                                    @"cameraId" : @(textureId),
-                                  });
-                                }];
-    }
+    [strongSelf.camera
+        startVideoRecordingWithCompletion:completion
+                    messengerForStreaming:(enableStream ? strongSelf.messenger : nil)];
   });
 }
 
+- (void)stopVideoRecordingWithCompletion:(nonnull void (^)(NSString *_Nullable,
+                                                           FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera stopVideoRecordingWithCompletion:completion];
+  });
+}
+
+- (void)pauseVideoRecordingWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera pauseVideoRecording];
+    completion(nil);
+  });
+}
+
+- (void)resumeVideoRecordingWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera resumeVideoRecording];
+    completion(nil);
+  });
+}
+
+- (void)getMinimumZoomLevel:(nonnull void (^)(NSNumber *_Nullable,
+                                              FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    completion(@(weakSelf.camera.minimumAvailableZoomFactor), nil);
+  });
+}
+
+- (void)getMaximumZoomLevel:(nonnull void (^)(NSNumber *_Nullable,
+                                              FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    completion(@(weakSelf.camera.maximumAvailableZoomFactor), nil);
+  });
+}
+
+- (void)setZoomLevel:(double)zoom completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setZoomLevel:zoom withCompletion:completion];
+  });
+}
+
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+          completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setFlashMode:mode withCompletion:completion];
+  });
+}
+
+- (void)setExposureMode:(FCPPlatformExposureMode)mode
+             completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setExposureMode:mode];
+    completion(nil);
+  });
+}
+
+- (void)setExposurePoint:(nullable FCPPlatformPoint *)point
+              completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setExposurePoint:point withCompletion:completion];
+  });
+}
+
+- (void)getMinimumExposureOffset:(nonnull void (^)(NSNumber *_Nullable,
+                                                   FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    completion(@(weakSelf.camera.captureDevice.minExposureTargetBias), nil);
+  });
+}
+
+- (void)getMaximumExposureOffset:(nonnull void (^)(NSNumber *_Nullable,
+                                                   FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    completion(@(weakSelf.camera.captureDevice.maxExposureTargetBias), nil);
+  });
+}
+
+- (void)setExposureOffset:(double)offset
+               completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setExposureOffset:offset];
+    completion(nil);
+  });
+}
+
+- (void)setFocusMode:(FCPPlatformFocusMode)mode
+          completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setFocusMode:mode];
+    completion(nil);
+  });
+}
+
+- (void)setFocusPoint:(nullable FCPPlatformPoint *)point
+           completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setFocusPoint:point withCompletion:completion];
+  });
+}
+
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation
+                    completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera lockCaptureOrientation:orientation];
+    completion(nil);
+  });
+}
+
+- (void)unlockCaptureOrientationWithCompletion:
+    (nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera unlockCaptureOrientation];
+    completion(nil);
+  });
+}
+
+- (void)pausePreviewWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera pausePreview];
+    completion(nil);
+  });
+}
+
+- (void)resumePreviewWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera resumePreview];
+    completion(nil);
+  });
+}
+
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)format
+                completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setImageFileFormat:format];
+    completion(nil);
+  });
+}
+
+- (void)updateDescriptionWhileRecordingCameraName:(nonnull NSString *)cameraName
+                                       completion:
+                                           (nonnull void (^)(FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera setDescriptionWhileRecording:cameraName withCompletion:completion];
+  });
+}
+
+- (void)disposeCamera:(NSInteger)cameraId
+           completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  [_registry unregisterTexture:cameraId];
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf.camera close];
+    completion(nil);
+  });
+}
+
+#pragma mark Private
+
+// This must be called on captureSessionQueue. It is extracted from
+// initializeCamera:withImageFormat:completion: to make it easier to reason about strong/weak
+// self pointers.
+- (void)sessionQueueInitializeCamera:(NSInteger)cameraId
+                     withImageFormat:(FCPPlatformImageFormatGroup)imageFormat
+                          completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+  [_camera setVideoFormat:FCPGetPixelFormatForPigeonFormat(imageFormat)];
+
+  __weak CameraPlugin *weakSelf = self;
+  _camera.onFrameAvailable = ^{
+    typeof(self) strongSelf = weakSelf;
+    if (!strongSelf) return;
+    if (![strongSelf.camera isPreviewPaused]) {
+      FLTEnsureToRunOnMainQueue(^{
+        [weakSelf.registry textureFrameAvailable:cameraId];
+      });
+    }
+  };
+  _camera.dartAPI = [[FCPCameraEventApi alloc]
+      initWithBinaryMessenger:_messenger
+         messageChannelSuffix:[NSString stringWithFormat:@"%ld", cameraId]];
+  [_camera reportInitializationState];
+  [self sendDeviceOrientation:[UIDevice currentDevice].orientation];
+  [_camera start];
+  completion(nil);
+}
+
+- (void)createCameraOnSessionQueueWithName:(NSString *)name
+                                  settings:(FCPPlatformMediaSettings *)settings
+                                completion:(nonnull void (^)(NSNumber *_Nullable,
+                                                             FlutterError *_Nullable))completion {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(self.captureSessionQueue, ^{
+    [weakSelf sessionQueueCreateCameraWithName:name settings:settings completion:completion];
+  });
+}
+
+// This must be called on captureSessionQueue. It is extracted from
+// initializeCamera:withImageFormat:completion: to make it easier to reason about strong/weak
+// self pointers.
+- (void)sessionQueueCreateCameraWithName:(NSString *)name
+                                settings:(FCPPlatformMediaSettings *)settings
+                              completion:(nonnull void (^)(NSNumber *_Nullable,
+                                                           FlutterError *_Nullable))completion {
+  FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper =
+      [[FLTCamMediaSettingsAVWrapper alloc] init];
+
+  NSError *error;
+  FLTCam *cam = [[FLTCam alloc] initWithCameraName:name
+                                     mediaSettings:settings
+                            mediaSettingsAVWrapper:mediaSettingsAVWrapper
+                                       orientation:[[UIDevice currentDevice] orientation]
+                               captureSessionQueue:self.captureSessionQueue
+                                             error:&error];
+
+  if (error) {
+    completion(nil, FlutterErrorFromNSError(error));
+  } else {
+    [_camera close];
+    _camera = cam;
+    __weak typeof(self) weakSelf = self;
+    FLTEnsureToRunOnMainQueue(^{
+      completion(@([weakSelf.registry registerTexture:cam]), nil);
+    });
+  }
+}
+
 @end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
index 65a82b7..bc864d1 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
@@ -12,7 +12,6 @@
     header "FLTCam_Test.h"
     header "FLTSavePhotoDelegate_Test.h"
     header "FLTThreadSafeEventChannel.h"
-    header "FLTThreadSafeTextureRegistry.h"
     header "QueueUtils.h"
   }
 }
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
index ab6fb18..c29c2f3 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
@@ -30,13 +30,6 @@
 /// Hide the default public constructor.
 - (instancetype)init NS_UNAVAILABLE;
 
-/// Handles `FlutterMethodCall`s and ensures result is send on the main dispatch queue.
-///
-/// @param call The method call command object.
-/// @param result A wrapper around the `FlutterResult` callback which ensures the callback is called
-/// on the main dispatch queue.
-- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)result;
-
 /// Called by the @c NSNotificationManager each time the device's orientation is changed.
 ///
 /// @param notification @c NSNotification instance containing a reference to the `UIDevice` object
@@ -44,8 +37,10 @@
 - (void)orientationChanged:(NSNotification *)notification;
 
 /// Creates FLTCam on session queue and reports the creation result.
-/// @param createMethodCall the create method call
-/// @param result a thread safe flutter result wrapper object to report creation result.
-- (void)createCameraOnSessionQueueWithCreateMethodCall:(FlutterMethodCall *)createMethodCall
-                                                result:(FlutterResult)result;
+/// @param name the name of the camera.
+/// @param settings the creation settings.
+/// @param completion the callback to inform the Dart side of the plugin of creation.
+- (void)createCameraOnSessionQueueWithName:(NSString *)name
+                                  settings:(FCPPlatformMediaSettings *)settings
+                                completion:(void (^)(NSNumber *, FlutterError *))completion;
 @end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
index e19f98f..ea7a4a3 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
@@ -9,83 +9,19 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-#pragma mark - flash mode
-
-/// Represents camera's flash mode. Mirrors `FlashMode` enum in flash_mode.dart.
-typedef NS_ENUM(NSInteger, FLTFlashMode) {
-  FLTFlashModeOff,
-  FLTFlashModeAuto,
-  FLTFlashModeAlways,
-  FLTFlashModeTorch,
-  // This should never occur; it indicates an unknown value was received over
-  // the platform channel.
-  FLTFlashModeInvalid,
-};
-
-/// Gets FLTFlashMode from its string representation.
-/// @param mode a string representation of the FLTFlashMode.
-extern FLTFlashMode FLTGetFLTFlashModeForString(NSString *mode);
-
 /// Gets AVCaptureFlashMode from FLTFlashMode.
 /// @param mode flash mode.
-extern AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode);
+extern AVCaptureFlashMode FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashMode mode);
 
-#pragma mark - exposure mode
-
-/// Gets FCPPlatformExposureMode from its string representation.
-/// @param mode a string representation of the exposure mode.
-extern FCPPlatformExposureMode FCPGetExposureModeForString(NSString *mode);
-
-#pragma mark - focus mode
-
-/// Gets FCPPlatformFocusMode from its string representation.
-/// @param mode a string representation of focus mode.
-extern FCPPlatformFocusMode FCPGetFocusModeForString(NSString *mode);
-
-#pragma mark - device orientation
-
-/// Gets UIDeviceOrientation from its string representation.
-extern UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation);
+/// Gets UIDeviceOrientation from its Pigeon representation.
+extern UIDeviceOrientation FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+    FCPPlatformDeviceOrientation orientation);
 
 /// Gets a Pigeon representation of UIDeviceOrientation.
 extern FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation(
     UIDeviceOrientation orientation);
 
-#pragma mark - resolution preset
-
-/// Represents camera's resolution present. Mirrors ResolutionPreset in camera.dart.
-typedef NS_ENUM(NSInteger, FLTResolutionPreset) {
-  FLTResolutionPresetVeryLow,
-  FLTResolutionPresetLow,
-  FLTResolutionPresetMedium,
-  FLTResolutionPresetHigh,
-  FLTResolutionPresetVeryHigh,
-  FLTResolutionPresetUltraHigh,
-  FLTResolutionPresetMax,
-  // This should never occur; it indicates an unknown value was received over
-  // the platform channel.
-  FLTResolutionPresetInvalid,
-};
-
-/// Gets FLTResolutionPreset from its string representation.
-/// @param preset a string representation of FLTResolutionPreset.
-extern FLTResolutionPreset FLTGetFLTResolutionPresetForString(NSString *preset);
-
-#pragma mark - video format
-
-/// Gets VideoFormat from its string representation.
-extern OSType FLTGetVideoFormatFromString(NSString *videoFormatString);
-
-/// Represents image format. Mirrors ImageFileFormat in camera.dart.
-typedef NS_ENUM(NSInteger, FCPFileFormat) {
-  FCPFileFormatJPEG,
-  FCPFileFormatHEIF,
-  FCPFileFormatInvalid,
-};
-
-#pragma mark - image extension
-
-/// Gets a string representation of ImageFileFormat.
-extern FCPFileFormat FCPGetFileFormatFromString(NSString *fileFormatString);
+/// Gets VideoFormat from its Pigeon representation.
+extern OSType FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroup imageFormat);
 
 NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
index e068c18..5aa1f25 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
@@ -4,78 +4,32 @@
 
 #import "CameraProperties.h"
 
-#pragma mark - flash mode
-
-FLTFlashMode FLTGetFLTFlashModeForString(NSString *mode) {
-  if ([mode isEqualToString:@"off"]) {
-    return FLTFlashModeOff;
-  } else if ([mode isEqualToString:@"auto"]) {
-    return FLTFlashModeAuto;
-  } else if ([mode isEqualToString:@"always"]) {
-    return FLTFlashModeAlways;
-  } else if ([mode isEqualToString:@"torch"]) {
-    return FLTFlashModeTorch;
-  } else {
-    return FLTFlashModeInvalid;
-  }
-}
-
-AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode) {
+AVCaptureFlashMode FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashMode mode) {
   switch (mode) {
-    case FLTFlashModeOff:
+    case FCPPlatformFlashModeOff:
       return AVCaptureFlashModeOff;
-    case FLTFlashModeAuto:
+    case FCPPlatformFlashModeAuto:
       return AVCaptureFlashModeAuto;
-    case FLTFlashModeAlways:
+    case FCPPlatformFlashModeAlways:
       return AVCaptureFlashModeOn;
-    case FLTFlashModeTorch:
-    default:
+    case FCPPlatformFlashModeTorch:
+      NSCAssert(false, @"This mode cannot be converted, and requires custom handling.");
       return -1;
   }
 }
 
-#pragma mark - exposure mode
-
-FCPPlatformExposureMode FCPGetExposureModeForString(NSString *mode) {
-  if ([mode isEqualToString:@"auto"]) {
-    return FCPPlatformExposureModeAuto;
-  } else if ([mode isEqualToString:@"locked"]) {
-    return FCPPlatformExposureModeLocked;
-  } else {
-    // This should be unreachable; see _serializeExposureMode in avfoundation_camera.dart.
-    NSCAssert(false, @"Unsupported exposure mode");
-    return FCPPlatformExposureModeAuto;
-  }
-}
-
-#pragma mark - focus mode
-
-FCPPlatformFocusMode FCPGetFocusModeForString(NSString *mode) {
-  if ([mode isEqualToString:@"auto"]) {
-    return FCPPlatformFocusModeAuto;
-  } else if ([mode isEqualToString:@"locked"]) {
-    return FCPPlatformFocusModeLocked;
-  } else {
-    // This should be unreachable; see _serializeFocusMode in avfoundation_camera.dart.
-    NSCAssert(false, @"Unsupported focus mode");
-    return FCPPlatformFocusModeAuto;
-  }
-}
-
-#pragma mark - device orientation
-
-UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation) {
-  if ([orientation isEqualToString:@"portraitDown"]) {
-    return UIDeviceOrientationPortraitUpsideDown;
-  } else if ([orientation isEqualToString:@"landscapeLeft"]) {
-    return UIDeviceOrientationLandscapeLeft;
-  } else if ([orientation isEqualToString:@"landscapeRight"]) {
-    return UIDeviceOrientationLandscapeRight;
-  } else if ([orientation isEqualToString:@"portraitUp"]) {
-    return UIDeviceOrientationPortrait;
-  } else {
-    return UIDeviceOrientationUnknown;
-  }
+UIDeviceOrientation FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+    FCPPlatformDeviceOrientation orientation) {
+  switch (orientation) {
+    case FCPPlatformDeviceOrientationPortraitDown:
+      return UIDeviceOrientationPortraitUpsideDown;
+    case FCPPlatformDeviceOrientationLandscapeLeft:
+      return UIDeviceOrientationLandscapeLeft;
+    case FCPPlatformDeviceOrientationLandscapeRight:
+      return UIDeviceOrientationLandscapeRight;
+    case FCPPlatformDeviceOrientationPortraitUp:
+      return UIDeviceOrientationPortrait;
+  };
 }
 
 FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation(
@@ -93,49 +47,11 @@
   };
 }
 
-#pragma mark - resolution preset
-
-FLTResolutionPreset FLTGetFLTResolutionPresetForString(NSString *preset) {
-  if ([preset isEqualToString:@"veryLow"]) {
-    return FLTResolutionPresetVeryLow;
-  } else if ([preset isEqualToString:@"low"]) {
-    return FLTResolutionPresetLow;
-  } else if ([preset isEqualToString:@"medium"]) {
-    return FLTResolutionPresetMedium;
-  } else if ([preset isEqualToString:@"high"]) {
-    return FLTResolutionPresetHigh;
-  } else if ([preset isEqualToString:@"veryHigh"]) {
-    return FLTResolutionPresetVeryHigh;
-  } else if ([preset isEqualToString:@"ultraHigh"]) {
-    return FLTResolutionPresetUltraHigh;
-  } else if ([preset isEqualToString:@"max"]) {
-    return FLTResolutionPresetMax;
-  } else {
-    return FLTResolutionPresetInvalid;
-  }
-}
-
-#pragma mark - video format
-
-OSType FLTGetVideoFormatFromString(NSString *videoFormatString) {
-  if ([videoFormatString isEqualToString:@"bgra8888"]) {
-    return kCVPixelFormatType_32BGRA;
-  } else if ([videoFormatString isEqualToString:@"yuv420"]) {
-    return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
-  } else {
-    NSLog(@"The selected imageFormatGroup is not supported by iOS. Defaulting to brga8888");
-    return kCVPixelFormatType_32BGRA;
-  }
-}
-
-#pragma mark - file format
-
-FCPFileFormat FCPGetFileFormatFromString(NSString *fileFormatString) {
-  if ([fileFormatString isEqualToString:@"jpg"]) {
-    return FCPFileFormatJPEG;
-  } else if ([fileFormatString isEqualToString:@"heif"]) {
-    return FCPFileFormatHEIF;
-  } else {
-    return FCPFileFormatInvalid;
+OSType FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroup imageFormat) {
+  switch (imageFormat) {
+    case FCPPlatformImageFormatGroupBgra8888:
+      return kCVPixelFormatType_32BGRA;
+    case FCPPlatformImageFormatGroupYuv420:
+      return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
   }
 }
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
index ddc1e25..d8f9792 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
@@ -7,9 +7,7 @@
 @import Flutter;
 
 #import "CameraProperties.h"
-#import "FLTCamMediaSettings.h"
 #import "FLTCamMediaSettingsAVWrapper.h"
-#import "FLTThreadSafeTextureRegistry.h"
 #import "messages.g.h"
 
 NS_ASSUME_NONNULL_BEGIN
@@ -24,17 +22,17 @@
 /// The API instance used to communicate with the Dart side of the plugin. Once initially set, this
 /// should only ever be accessed on the main thread.
 @property(nonatomic) FCPCameraEventApi *dartAPI;
-@property(assign, nonatomic) FLTResolutionPreset resolutionPreset;
 @property(assign, nonatomic) FCPPlatformExposureMode exposureMode;
 @property(assign, nonatomic) FCPPlatformFocusMode focusMode;
-@property(assign, nonatomic) FLTFlashMode flashMode;
+@property(assign, nonatomic) FCPPlatformFlashMode flashMode;
 // Format used for video and image streaming.
 @property(assign, nonatomic) FourCharCode videoFormat;
-@property(assign, nonatomic) FCPFileFormat fileFormat;
+@property(assign, nonatomic) FCPPlatformImageFileFormat fileFormat;
+@property(assign, nonatomic) CGFloat minimumAvailableZoomFactor;
+@property(assign, nonatomic) CGFloat maximumAvailableZoomFactor;
 
 /// Initializes an `FLTCam` instance.
 /// @param cameraName a name used to uniquely identify the camera.
-/// @param resolutionPreset the resolution preset
 /// @param mediaSettings the media settings configuration parameters
 /// @param mediaSettingsAVWrapper AVFoundation wrapper to perform media settings related operations
 /// (for dependency injection in unit tests).
@@ -42,8 +40,7 @@
 /// @param captureSessionQueue the queue on which camera's capture session operations happen.
 /// @param error report to the caller if any error happened creating the camera.
 - (instancetype)initWithCameraName:(NSString *)cameraName
-                  resolutionPreset:(NSString *)resolutionPreset
-                     mediaSettings:(FLTCamMediaSettings *)mediaSettings
+                     mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
             mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
                        orientation:(UIDeviceOrientation)orientation
                captureSessionQueue:(dispatch_queue_t)captureSessionQueue
@@ -54,26 +51,27 @@
 - (void)start;
 - (void)stop;
 - (void)setDeviceOrientation:(UIDeviceOrientation)orientation;
-- (void)captureToFile:(FlutterResult)result;
+- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable,
+                                              FlutterError *_Nullable))completion;
 - (void)close;
-- (void)startVideoRecordingWithResult:(FlutterResult)result;
-- (void)setImageFileFormat:(FCPFileFormat)fileFormat;
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)fileFormat;
 /// Starts recording a video with an optional streaming messenger.
-/// If the messenger is non-null then it will be called for each
+/// If the messenger is non-nil then it will be called for each
 /// captured frame, allowing streaming concurrently with recording.
 ///
 /// @param messenger Nullable messenger for capturing each frame.
-- (void)startVideoRecordingWithResult:(FlutterResult)result
-                messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger;
-- (void)stopVideoRecordingWithResult:(FlutterResult)result;
-- (void)pauseVideoRecordingWithResult:(FlutterResult)result;
-- (void)resumeVideoRecordingWithResult:(FlutterResult)result;
-- (void)lockCaptureOrientationWithResult:(FlutterResult)result
-                             orientation:(NSString *)orientationStr;
-- (void)unlockCaptureOrientationWithResult:(FlutterResult)result;
-- (void)setFlashModeWithResult:(FlutterResult)result mode:(NSString *)modeStr;
-- (void)setExposureModeWithResult:(FlutterResult)result mode:(NSString *)modeStr;
-- (void)setFocusModeWithResult:(FlutterResult)result mode:(NSString *)modeStr;
+- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion
+                    messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger;
+- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
+                                                   FlutterError *_Nullable))completion;
+- (void)pauseVideoRecording;
+- (void)resumeVideoRecording;
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation;
+- (void)unlockCaptureOrientation;
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+      withCompletion:(void (^)(FlutterError *_Nullable))completion;
+- (void)setExposureMode:(FCPPlatformExposureMode)mode;
+- (void)setFocusMode:(FCPPlatformFocusMode)mode;
 - (void)applyFocusMode;
 
 /// Acknowledges the receipt of one image stream frame.
@@ -95,17 +93,26 @@
 /// @param focusMode The focus mode that should be applied to the @captureDevice instance.
 /// @param captureDevice The AVCaptureDevice to which the @focusMode will be applied.
 - (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice;
-- (void)pausePreviewWithResult:(FlutterResult)result;
-- (void)resumePreviewWithResult:(FlutterResult)result;
-- (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResult)result;
-- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y;
-- (void)setFocusPointWithResult:(FlutterResult)result x:(double)x y:(double)y;
-- (void)setExposureOffsetWithResult:(FlutterResult)result offset:(double)offset;
+- (void)pausePreview;
+- (void)resumePreview;
+- (void)setDescriptionWhileRecording:(NSString *)cameraName
+                      withCompletion:(void (^)(FlutterError *_Nullable))completion;
+
+/// Sets the exposure point, in a (0,1) coordinate system.
+///
+/// If @c point is nil, the exposure point will reset to the center.
+- (void)setExposurePoint:(nullable FCPPlatformPoint *)point
+          withCompletion:(void (^)(FlutterError *_Nullable))completion;
+
+/// Sets the focus point, in a (0,1) coordinate system.
+///
+/// If @c point is nil, the focus point will reset to the center.
+- (void)setFocusPoint:(nullable FCPPlatformPoint *)point
+       withCompletion:(void (^)(FlutterError *_Nullable))completion;
+- (void)setExposureOffset:(double)offset;
 - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger;
 - (void)stopImageStream;
-- (void)getMaxZoomLevelWithResult:(FlutterResult)result;
-- (void)getMinZoomLevelWithResult:(FlutterResult)result;
-- (void)setZoomLevel:(CGFloat)zoom Result:(FlutterResult)result;
+- (void)setZoomLevel:(CGFloat)zoom withCompletion:(void (^)(FlutterError *_Nullable))completion;
 - (void)setUpCaptureSessionForAudio;
 
 @end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
index f65af07..45ab3e0 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
@@ -51,7 +51,7 @@
                       AVCaptureAudioDataOutputSampleBufferDelegate>
 
 @property(readonly, nonatomic) int64_t textureId;
-@property(readonly, nonatomic) FLTCamMediaSettings *mediaSettings;
+@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings;
 @property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper;
 @property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
 @property(readonly, nonatomic) AVCaptureSession *videoCaptureSession;
@@ -114,14 +114,12 @@
 NSString *const errorMethod = @"error";
 
 - (instancetype)initWithCameraName:(NSString *)cameraName
-                  resolutionPreset:(NSString *)resolutionPreset
-                     mediaSettings:(FLTCamMediaSettings *)mediaSettings
+                     mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
             mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
                        orientation:(UIDeviceOrientation)orientation
                captureSessionQueue:(dispatch_queue_t)captureSessionQueue
                              error:(NSError **)error {
   return [self initWithCameraName:cameraName
-                 resolutionPreset:resolutionPreset
                     mediaSettings:mediaSettings
            mediaSettingsAVWrapper:mediaSettingsAVWrapper
                       orientation:orientation
@@ -132,16 +130,14 @@
 }
 
 - (instancetype)initWithCameraName:(NSString *)cameraName
-                  resolutionPreset:(NSString *)resolutionPreset
-                     mediaSettings:(FLTCamMediaSettings *)mediaSettings
+                     mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
             mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
                        orientation:(UIDeviceOrientation)orientation
                videoCaptureSession:(AVCaptureSession *)videoCaptureSession
                audioCaptureSession:(AVCaptureSession *)audioCaptureSession
                captureSessionQueue:(dispatch_queue_t)captureSessionQueue
                              error:(NSError **)error {
-  return [self initWithResolutionPreset:resolutionPreset
-      mediaSettings:mediaSettings
+  return [self initWithMediaSettings:mediaSettings
       mediaSettingsAVWrapper:mediaSettingsAVWrapper
       orientation:orientation
       videoCaptureSession:videoCaptureSession
@@ -156,30 +152,17 @@
       error:error];
 }
 
-- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
-                           mediaSettings:(FLTCamMediaSettings *)mediaSettings
-                  mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
-                             orientation:(UIDeviceOrientation)orientation
-                     videoCaptureSession:(AVCaptureSession *)videoCaptureSession
-                     audioCaptureSession:(AVCaptureSession *)audioCaptureSession
-                     captureSessionQueue:(dispatch_queue_t)captureSessionQueue
-                    captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
-                videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
-                                   error:(NSError **)error {
+- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
+               mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
+                          orientation:(UIDeviceOrientation)orientation
+                  videoCaptureSession:(AVCaptureSession *)videoCaptureSession
+                  audioCaptureSession:(AVCaptureSession *)audioCaptureSession
+                  captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+                 captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
+             videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
+                                error:(NSError **)error {
   self = [super init];
   NSAssert(self, @"super init cannot be nil");
-  _resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
-  if (_resolutionPreset == FLTResolutionPresetInvalid) {
-    *error = [NSError
-        errorWithDomain:NSCocoaErrorDomain
-                   code:NSURLErrorUnknown
-               userInfo:@{
-                 NSLocalizedDescriptionKey :
-                     [NSString stringWithFormat:@"Unknown resolution preset %@", resolutionPreset]
-               }];
-    return nil;
-  }
-
   _mediaSettings = mediaSettings;
   _mediaSettingsAVWrapper = mediaSettingsAVWrapper;
 
@@ -192,14 +175,14 @@
   _captureDeviceFactory = captureDeviceFactory;
   _captureDevice = captureDeviceFactory();
   _videoDimensionsForFormat = videoDimensionsForFormat;
-  _flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
+  _flashMode = _captureDevice.hasFlash ? FCPPlatformFlashModeAuto : FCPPlatformFlashModeOff;
   _exposureMode = FCPPlatformExposureModeAuto;
   _focusMode = FCPPlatformFocusModeAuto;
   _lockedCaptureOrientation = UIDeviceOrientationUnknown;
   _deviceOrientation = orientation;
   _videoFormat = kCVPixelFormatType_32BGRA;
   _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];
-  _fileFormat = FCPFileFormatJPEG;
+  _fileFormat = FCPPlatformImageFileFormatJpeg;
 
   // To limit memory consumption, limit the number of frames pending processing.
   // After some testing, 4 was determined to be the best maximum value.
@@ -236,7 +219,7 @@
       // If _resolutionPreset is not supported by camera there is
       // fallback to lower resolution presets.
       // If none can be selected there is error condition.
-      if (![self setCaptureSessionPreset:_resolutionPreset withError:error]) {
+      if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
         [_videoCaptureSession commitConfiguration];
         [_captureDevice unlockForConfiguration];
         return nil;
@@ -257,7 +240,7 @@
   } else {
     // If the frame rate is not important fall to a less restrictive
     // behavior (no configuration locking).
-    if (![self setCaptureSessionPreset:_resolutionPreset withError:error]) {
+    if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
       return nil;
     }
   }
@@ -332,7 +315,7 @@
       @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
 }
 
-- (void)setImageFileFormat:(FCPFileFormat)fileFormat {
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)fileFormat {
   _fileFormat = fileFormat;
 }
 
@@ -370,10 +353,11 @@
   }
 }
 
-- (void)captureToFile:(FlutterResult)result {
+- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable,
+                                              FlutterError *_Nullable))completion {
   AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
 
-  if (_resolutionPreset == FLTResolutionPresetMax) {
+  if (self.mediaSettings.resolutionPreset == FCPPlatformResolutionPresetMax) {
     [settings setHighResolutionPhotoEnabled:YES];
   }
 
@@ -382,7 +366,7 @@
   BOOL isHEVCCodecAvailable =
       [self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC];
 
-  if (_fileFormat == FCPFileFormatHEIF && isHEVCCodecAvailable) {
+  if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) {
     settings =
         [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}];
     extension = @"heif";
@@ -390,7 +374,7 @@
     extension = @"jpg";
   }
 
-  AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(_flashMode);
+  AVCaptureFlashMode avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(_flashMode);
   if (avFlashMode != -1) {
     [settings setFlashMode:avFlashMode];
   }
@@ -400,7 +384,7 @@
                                                     prefix:@"CAP_"
                                                      error:error];
   if (error) {
-    result(FlutterErrorFromNSError(error));
+    completion(nil, FlutterErrorFromNSError(error));
     return;
   }
 
@@ -419,10 +403,10 @@
         });
 
         if (error) {
-          result(FlutterErrorFromNSError(error));
+          completion(nil, FlutterErrorFromNSError(error));
         } else {
           NSAssert(path, @"Path must not be nil if no error.");
-          result(path);
+          completion(path, nil);
         }
       }];
 
@@ -477,9 +461,10 @@
   return file;
 }
 
-- (BOOL)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset withError:(NSError **)error {
+- (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
+                      withError:(NSError **)error {
   switch (resolutionPreset) {
-    case FLTResolutionPresetMax: {
+    case FCPPlatformResolutionPresetMax: {
       AVCaptureDeviceFormat *bestFormat =
           [self highestResolutionFormatForCaptureDevice:_captureDevice];
       if (bestFormat) {
@@ -497,7 +482,7 @@
         }
       }
     }
-    case FLTResolutionPresetUltraHigh:
+    case FCPPlatformResolutionPresetUltraHigh:
       if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
         _videoCaptureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
         _previewSize = CGSizeMake(3840, 2160);
@@ -510,25 +495,25 @@
                        _captureDevice.activeFormat.highResolutionStillImageDimensions.height);
         break;
       }
-    case FLTResolutionPresetVeryHigh:
+    case FCPPlatformResolutionPresetVeryHigh:
       if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
         _videoCaptureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
         _previewSize = CGSizeMake(1920, 1080);
         break;
       }
-    case FLTResolutionPresetHigh:
+    case FCPPlatformResolutionPresetHigh:
       if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
         _videoCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720;
         _previewSize = CGSizeMake(1280, 720);
         break;
       }
-    case FLTResolutionPresetMedium:
+    case FCPPlatformResolutionPresetMedium:
       if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
         _videoCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;
         _previewSize = CGSizeMake(640, 480);
         break;
       }
-    case FLTResolutionPresetLow:
+    case FCPPlatformResolutionPresetLow:
       if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
         _videoCaptureSession.sessionPreset = AVCaptureSessionPreset352x288;
         _previewSize = CGSizeMake(352, 288);
@@ -819,12 +804,8 @@
   return pixelBuffer;
 }
 
-- (void)startVideoRecordingWithResult:(FlutterResult)result {
-  [self startVideoRecordingWithResult:result messengerForStreaming:nil];
-}
-
-- (void)startVideoRecordingWithResult:(FlutterResult)result
-                messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger {
+- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion
+                    messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger {
   if (!_isRecording) {
     if (messenger != nil) {
       [self startImageStreamWithMessenger:messenger];
@@ -836,11 +817,13 @@
                                                            prefix:@"REC_"
                                                             error:error];
     if (error) {
-      result(FlutterErrorFromNSError(error));
+      completion(FlutterErrorFromNSError(error));
       return;
     }
     if (![self setupWriterForPath:_videoRecordingPath]) {
-      result([FlutterError errorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil]);
+      completion([FlutterError errorWithCode:@"IOError"
+                                     message:@"Setup Writer Failed"
+                                     details:nil]);
       return;
     }
     _isRecording = YES;
@@ -849,13 +832,16 @@
     _audioTimeOffset = CMTimeMake(0, 1);
     _videoIsDisconnected = NO;
     _audioIsDisconnected = NO;
-    result(nil);
+    completion(nil);
   } else {
-    result([FlutterError errorWithCode:@"Error" message:@"Video is already recording" details:nil]);
+    completion([FlutterError errorWithCode:@"Error"
+                                   message:@"Video is already recording"
+                                   details:nil]);
   }
 }
 
-- (void)stopVideoRecordingWithResult:(FlutterResult)result {
+- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
+                                                   FlutterError *_Nullable))completion {
   if (_isRecording) {
     _isRecording = NO;
 
@@ -863,12 +849,12 @@
       [_videoWriter finishWritingWithCompletionHandler:^{
         if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
           [self updateOrientation];
-          result(self->_videoRecordingPath);
+          completion(self->_videoRecordingPath, nil);
           self->_videoRecordingPath = nil;
         } else {
-          result([FlutterError errorWithCode:@"IOError"
-                                     message:@"AVAssetWriter could not finish writing!"
-                                     details:nil]);
+          completion(nil, [FlutterError errorWithCode:@"IOError"
+                                              message:@"AVAssetWriter could not finish writing!"
+                                              details:nil]);
         }
       }];
     }
@@ -877,75 +863,47 @@
         [NSError errorWithDomain:NSCocoaErrorDomain
                             code:NSURLErrorResourceUnavailable
                         userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
-    result(FlutterErrorFromNSError(error));
+    completion(nil, FlutterErrorFromNSError(error));
   }
 }
 
-- (void)pauseVideoRecordingWithResult:(FlutterResult)result {
+- (void)pauseVideoRecording {
   _isRecordingPaused = YES;
   _videoIsDisconnected = YES;
   _audioIsDisconnected = YES;
-  result(nil);
 }
 
-- (void)resumeVideoRecordingWithResult:(FlutterResult)result {
+- (void)resumeVideoRecording {
   _isRecordingPaused = NO;
-  result(nil);
 }
 
-- (void)lockCaptureOrientationWithResult:(FlutterResult)result
-                             orientation:(NSString *)orientationStr {
-  UIDeviceOrientation orientation = FLTGetUIDeviceOrientationForString(orientationStr);
-  // "Unknown" should never be sent, so is used to represent an unexpected
-  // value.
-  if (orientation == UIDeviceOrientationUnknown) {
-    result(FlutterErrorFromNSError([NSError
-        errorWithDomain:NSCocoaErrorDomain
-                   code:NSURLErrorUnknown
-               userInfo:@{
-                 NSLocalizedDescriptionKey :
-                     [NSString stringWithFormat:@"Unknown device orientation %@", orientationStr]
-               }]));
-    return;
-  }
-
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)pigeonOrientation {
+  UIDeviceOrientation orientation =
+      FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation);
   if (_lockedCaptureOrientation != orientation) {
     _lockedCaptureOrientation = orientation;
     [self updateOrientation];
   }
-
-  result(nil);
 }
 
-- (void)unlockCaptureOrientationWithResult:(FlutterResult)result {
+- (void)unlockCaptureOrientation {
   _lockedCaptureOrientation = UIDeviceOrientationUnknown;
   [self updateOrientation];
-  result(nil);
 }
 
-- (void)setFlashModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
-  FLTFlashMode mode = FLTGetFLTFlashModeForString(modeStr);
-  if (mode == FLTFlashModeInvalid) {
-    result(FlutterErrorFromNSError([NSError
-        errorWithDomain:NSCocoaErrorDomain
-                   code:NSURLErrorUnknown
-               userInfo:@{
-                 NSLocalizedDescriptionKey :
-                     [NSString stringWithFormat:@"Unknown flash mode %@", modeStr]
-               }]));
-    return;
-  }
-  if (mode == FLTFlashModeTorch) {
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+      withCompletion:(void (^)(FlutterError *_Nullable))completion {
+  if (mode == FCPPlatformFlashModeTorch) {
     if (!_captureDevice.hasTorch) {
-      result([FlutterError errorWithCode:@"setFlashModeFailed"
-                                 message:@"Device does not support torch mode"
-                                 details:nil]);
+      completion([FlutterError errorWithCode:@"setFlashModeFailed"
+                                     message:@"Device does not support torch mode"
+                                     details:nil]);
       return;
     }
     if (!_captureDevice.isTorchAvailable) {
-      result([FlutterError errorWithCode:@"setFlashModeFailed"
-                                 message:@"Torch mode is currently not available"
-                                 details:nil]);
+      completion([FlutterError errorWithCode:@"setFlashModeFailed"
+                                     message:@"Torch mode is currently not available"
+                                     details:nil]);
       return;
     }
     if (_captureDevice.torchMode != AVCaptureTorchModeOn) {
@@ -955,17 +913,17 @@
     }
   } else {
     if (!_captureDevice.hasFlash) {
-      result([FlutterError errorWithCode:@"setFlashModeFailed"
-                                 message:@"Device does not have flash capabilities"
-                                 details:nil]);
+      completion([FlutterError errorWithCode:@"setFlashModeFailed"
+                                     message:@"Device does not have flash capabilities"
+                                     details:nil]);
       return;
     }
-    AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(mode);
+    AVCaptureFlashMode avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode);
     if (![_capturePhotoOutput.supportedFlashModes
             containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) {
-      result([FlutterError errorWithCode:@"setFlashModeFailed"
-                                 message:@"Device does not support this specific flash mode"
-                                 details:nil]);
+      completion([FlutterError errorWithCode:@"setFlashModeFailed"
+                                     message:@"Device does not support this specific flash mode"
+                                     details:nil]);
       return;
     }
     if (_captureDevice.torchMode != AVCaptureTorchModeOff) {
@@ -975,14 +933,12 @@
     }
   }
   _flashMode = mode;
-  result(nil);
+  completion(nil);
 }
 
-- (void)setExposureModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
-  FCPPlatformExposureMode mode = FCPGetExposureModeForString(modeStr);
+- (void)setExposureMode:(FCPPlatformExposureMode)mode {
   _exposureMode = mode;
   [self applyExposureMode];
-  result(nil);
 }
 
 - (void)applyExposureMode {
@@ -1002,11 +958,9 @@
   [_captureDevice unlockForConfiguration];
 }
 
-- (void)setFocusModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
-  FCPPlatformFocusMode mode = FCPGetFocusModeForString(modeStr);
+- (void)setFocusMode:(FCPPlatformFocusMode)mode {
   _focusMode = mode;
   [self applyFocusMode];
-  result(nil);
 }
 
 - (void)applyFocusMode {
@@ -1032,21 +986,20 @@
   [captureDevice unlockForConfiguration];
 }
 
-- (void)pausePreviewWithResult:(FlutterResult)result {
+- (void)pausePreview {
   _isPreviewPaused = true;
-  result(nil);
 }
 
-- (void)resumePreviewWithResult:(FlutterResult)result {
+- (void)resumePreview {
   _isPreviewPaused = false;
-  result(nil);
 }
 
-- (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResult)result {
+- (void)setDescriptionWhileRecording:(NSString *)cameraName
+                      withCompletion:(void (^)(FlutterError *_Nullable))completion {
   if (!_isRecording) {
-    result([FlutterError errorWithCode:@"setDescriptionWhileRecordingFailed"
-                               message:@"Device was not recording"
-                               details:nil]);
+    completion([FlutterError errorWithCode:@"setDescriptionWhileRecordingFailed"
+                                   message:@"Device was not recording"
+                                   details:nil]);
     return;
   }
 
@@ -1066,7 +1019,7 @@
   NSError *error = nil;
   AVCaptureConnection *newConnection = [self createConnection:&error];
   if (error) {
-    result(FlutterErrorFromNSError(error));
+    completion(FlutterErrorFromNSError(error));
     return;
   }
 
@@ -1077,41 +1030,41 @@
 
   // Add the new connections to the session.
   if (![_videoCaptureSession canAddInput:_captureVideoInput])
-    result([FlutterError errorWithCode:@"VideoError"
-                               message:@"Unable switch video input"
-                               details:nil]);
+    completion([FlutterError errorWithCode:@"VideoError"
+                                   message:@"Unable switch video input"
+                                   details:nil]);
   [_videoCaptureSession addInputWithNoConnections:_captureVideoInput];
   if (![_videoCaptureSession canAddOutput:_captureVideoOutput])
-    result([FlutterError errorWithCode:@"VideoError"
-                               message:@"Unable switch video output"
-                               details:nil]);
+    completion([FlutterError errorWithCode:@"VideoError"
+                                   message:@"Unable switch video output"
+                                   details:nil]);
   [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput];
   if (![_videoCaptureSession canAddConnection:newConnection])
-    result([FlutterError errorWithCode:@"VideoError"
-                               message:@"Unable switch video connection"
-                               details:nil]);
+    completion([FlutterError errorWithCode:@"VideoError"
+                                   message:@"Unable switch video connection"
+                                   details:nil]);
   [_videoCaptureSession addConnection:newConnection];
   [_videoCaptureSession commitConfiguration];
 
-  result(nil);
+  completion(nil);
 }
 
-- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
-                                            x:(double)x
-                                            y:(double)y {
-  double oldX = x, oldY = y;
+- (CGPoint)CGPointForPoint:(nonnull FCPPlatformPoint *)point
+           withOrientation:(UIDeviceOrientation)orientation {
+  double x = point.x;
+  double y = point.y;
   switch (orientation) {
     case UIDeviceOrientationPortrait:  // 90 ccw
-      y = 1 - oldX;
-      x = oldY;
+      y = 1 - point.x;
+      x = point.y;
       break;
     case UIDeviceOrientationPortraitUpsideDown:  // 90 cw
-      x = 1 - oldY;
-      y = oldX;
+      x = 1 - point.y;
+      y = point.x;
       break;
     case UIDeviceOrientationLandscapeRight:  // 180
-      x = 1 - x;
-      y = 1 - y;
+      x = 1 - point.x;
+      y = 1 - point.y;
       break;
     case UIDeviceOrientationLandscapeLeft:
     default:
@@ -1121,48 +1074,53 @@
   return CGPointMake(x, y);
 }
 
-- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y {
+- (void)setExposurePoint:(FCPPlatformPoint *)point
+          withCompletion:(void (^)(FlutterError *_Nullable))completion {
   if (!_captureDevice.isExposurePointOfInterestSupported) {
-    result([FlutterError errorWithCode:@"setExposurePointFailed"
-                               message:@"Device does not have exposure point capabilities"
-                               details:nil]);
+    completion([FlutterError errorWithCode:@"setExposurePointFailed"
+                                   message:@"Device does not have exposure point capabilities"
+                                   details:nil]);
     return;
   }
   UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
   [_captureDevice lockForConfiguration:nil];
-  [_captureDevice setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
-                                                                                    x:x
-                                                                                    y:y]];
+  // A nil point resets to the center.
+  [_captureDevice
+      setExposurePointOfInterest:[self CGPointForPoint:(point
+                                                            ?: [FCPPlatformPoint makeWithX:0.5
+                                                                                         y:0.5])
+                                       withOrientation:orientation]];
   [_captureDevice unlockForConfiguration];
   // Retrigger auto exposure
   [self applyExposureMode];
-  result(nil);
+  completion(nil);
 }
 
-- (void)setFocusPointWithResult:(FlutterResult)result x:(double)x y:(double)y {
+- (void)setFocusPoint:(FCPPlatformPoint *)point
+       withCompletion:(void (^)(FlutterError *_Nullable))completion {
   if (!_captureDevice.isFocusPointOfInterestSupported) {
-    result([FlutterError errorWithCode:@"setFocusPointFailed"
-                               message:@"Device does not have focus point capabilities"
-                               details:nil]);
+    completion([FlutterError errorWithCode:@"setFocusPointFailed"
+                                   message:@"Device does not have focus point capabilities"
+                                   details:nil]);
     return;
   }
   UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
   [_captureDevice lockForConfiguration:nil];
-
-  [_captureDevice setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
-                                                                                 x:x
-                                                                                 y:y]];
+  // A nil point resets to the center.
+  [_captureDevice
+      setFocusPointOfInterest:[self
+                                  CGPointForPoint:(point ?: [FCPPlatformPoint makeWithX:0.5 y:0.5])
+                                  withOrientation:orientation]];
   [_captureDevice unlockForConfiguration];
   // Retrigger auto focus
   [self applyFocusMode];
-  result(nil);
+  completion(nil);
 }
 
-- (void)setExposureOffsetWithResult:(FlutterResult)result offset:(double)offset {
+- (void)setExposureOffset:(double)offset {
   [_captureDevice lockForConfiguration:nil];
   [_captureDevice setExposureTargetBias:offset completionHandler:nil];
   [_captureDevice unlockForConfiguration];
-  result(@(offset));
 }
 
 - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
@@ -1214,46 +1172,34 @@
   self.streamingPendingFramesCount--;
 }
 
-- (void)getMaxZoomLevelWithResult:(FlutterResult)result {
-  CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
-
-  result([NSNumber numberWithFloat:maxZoomFactor]);
-}
-
-- (void)getMinZoomLevelWithResult:(FlutterResult)result {
-  CGFloat minZoomFactor = [self getMinAvailableZoomFactor];
-  result([NSNumber numberWithFloat:minZoomFactor]);
-}
-
-- (void)setZoomLevel:(CGFloat)zoom Result:(FlutterResult)result {
-  CGFloat maxAvailableZoomFactor = [self getMaxAvailableZoomFactor];
-  CGFloat minAvailableZoomFactor = [self getMinAvailableZoomFactor];
-
-  if (maxAvailableZoomFactor < zoom || minAvailableZoomFactor > zoom) {
+- (void)setZoomLevel:(CGFloat)zoom withCompletion:(void (^)(FlutterError *_Nullable))completion {
+  if (_captureDevice.maxAvailableVideoZoomFactor < zoom ||
+      _captureDevice.minAvailableVideoZoomFactor > zoom) {
     NSString *errorMessage = [NSString
         stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).",
-                         minAvailableZoomFactor, maxAvailableZoomFactor];
+                         _captureDevice.minAvailableVideoZoomFactor,
+                         _captureDevice.maxAvailableVideoZoomFactor];
 
-    result([FlutterError errorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil]);
+    completion([FlutterError errorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil]);
     return;
   }
 
   NSError *error = nil;
   if (![_captureDevice lockForConfiguration:&error]) {
-    result(FlutterErrorFromNSError(error));
+    completion(FlutterErrorFromNSError(error));
     return;
   }
   _captureDevice.videoZoomFactor = zoom;
   [_captureDevice unlockForConfiguration];
 
-  result(nil);
+  completion(nil);
 }
 
-- (CGFloat)getMinAvailableZoomFactor {
+- (CGFloat)minimumAvailableZoomFactor {
   return _captureDevice.minAvailableVideoZoomFactor;
 }
 
-- (CGFloat)getMaxAvailableZoomFactor {
+- (CGFloat)maximumAvailableZoomFactor {
   return _captureDevice.maxAvailableVideoZoomFactor;
 }
 
@@ -1335,7 +1281,7 @@
     [_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
   }
 
-  if (_flashMode == FLTFlashModeTorch) {
+  if (_flashMode == FCPPlatformFlashModeTorch) {
     [self.captureDevice lockForConfiguration:nil];
     [self.captureDevice setTorchMode:AVCaptureTorchModeOn];
     [self.captureDevice unlockForConfiguration];
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h
deleted file mode 100644
index 004accf..0000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-@import Foundation;
-
-NS_ASSUME_NONNULL_BEGIN
-
-/**
- * Media settings configuration parameters.
- */
-@interface FLTCamMediaSettings : NSObject
-
-/**
- * @property framesPerSecond optional frame rate of video being recorded.
- */
-@property(atomic, readonly, strong, nullable) NSNumber *framesPerSecond;
-
-/**
- * @property videoBitrate optional bitrate of video being recorded.
- */
-@property(atomic, readonly, strong, nullable) NSNumber *videoBitrate;
-
-/**
- * @property audioBitrate optional bitrate of audio being recorded.
- */
-@property(atomic, readonly, strong, nullable) NSNumber *audioBitrate;
-
-/**
- * @property enableAudio whether audio should be recorded.
- */
-@property(atomic, readonly) BOOL enableAudio;
-
-/**
- * @method initWithFramesPerSecond:videoBitrate:audioBitrate:enableAudio:
- *
- * @abstract Initialize `FLTCamMediaSettings`.
- *
- * @param framesPerSecond optional frame rate of video being recorded.
- * @param videoBitrate optional bitrate of video being recorded.
- * @param audioBitrate optional bitrate of audio being recorded.
- * @param enableAudio whether audio should be recorded.
- *
- * @result FLTCamMediaSettings instance
- */
-- (instancetype)initWithFramesPerSecond:(nullable NSNumber *)framesPerSecond
-                           videoBitrate:(nullable NSNumber *)videoBitrate
-                           audioBitrate:(nullable NSNumber *)audioBitrate
-                            enableAudio:(BOOL)enableAudio NS_DESIGNATED_INITIALIZER;
-
-- (instancetype)init NS_UNAVAILABLE;
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m
deleted file mode 100644
index 5c2ca5a..0000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "FLTCamMediaSettings.h"
-
-static void AssertPositiveNumberOrNil(NSNumber *_Nullable param, const char *_Nonnull paramName) {
-  if (param != nil) {
-    NSCAssert(!isnan([param doubleValue]), @"%s is NaN", paramName);
-    NSCAssert([param doubleValue] > 0, @"%s is not positive: %@", paramName, param);
-  }
-}
-
-@implementation FLTCamMediaSettings
-
-- (instancetype)initWithFramesPerSecond:(nullable NSNumber *)framesPerSecond
-                           videoBitrate:(nullable NSNumber *)videoBitrate
-                           audioBitrate:(nullable NSNumber *)audioBitrate
-                            enableAudio:(BOOL)enableAudio {
-  self = [super init];
-
-  if (self != nil) {
-    AssertPositiveNumberOrNil(framesPerSecond, "framesPerSecond");
-    AssertPositiveNumberOrNil(videoBitrate, "videoBitrate");
-    AssertPositiveNumberOrNil(audioBitrate, "audioBitrate");
-
-    _framesPerSecond = framesPerSecond;
-    _videoBitrate = videoBitrate;
-    _audioBitrate = audioBitrate;
-    _enableAudio = enableAudio;
-  }
-
-  return self;
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
index ed9fad6..d05838f 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
@@ -55,8 +55,7 @@
 /// Initializes a camera instance.
 /// Allows for injecting dependencies that are usually internal.
 - (instancetype)initWithCameraName:(NSString *)cameraName
-                  resolutionPreset:(NSString *)resolutionPreset
-                     mediaSettings:(FLTCamMediaSettings *)mediaSettings
+                     mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
             mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
                        orientation:(UIDeviceOrientation)orientation
                videoCaptureSession:(AVCaptureSession *)videoCaptureSession
@@ -67,16 +66,15 @@
 ///  Initializes a camera instance.
 ///  Allows for testing with specified resolution, audio preference, orientation,
 ///  and direct access to capture sessions and blocks.
-- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
-                           mediaSettings:(FLTCamMediaSettings *)mediaSettings
-                  mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
-                             orientation:(UIDeviceOrientation)orientation
-                     videoCaptureSession:(AVCaptureSession *)videoCaptureSession
-                     audioCaptureSession:(AVCaptureSession *)audioCaptureSession
-                     captureSessionQueue:(dispatch_queue_t)captureSessionQueue
-                    captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
-                videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
-                                   error:(NSError **)error;
+- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
+               mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
+                          orientation:(UIDeviceOrientation)orientation
+                  videoCaptureSession:(AVCaptureSession *)videoCaptureSession
+                  audioCaptureSession:(AVCaptureSession *)audioCaptureSession
+                  captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+                 captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
+             videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
+                                error:(NSError **)error;
 
 /// Start streaming images.
 - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
deleted file mode 100644
index 2f80f68..0000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Flutter/Flutter.h>
-
-NS_ASSUME_NONNULL_BEGIN
-
-/// A thread safe wrapper for FlutterTextureRegistry that can be called from any thread, by
-/// dispatching its underlying engine calls to the main thread.
-@interface FLTThreadSafeTextureRegistry : NSObject
-
-/// Creates a FLTThreadSafeTextureRegistry by wrapping an object conforming to
-/// FlutterTextureRegistry.
-/// @param registry The FlutterTextureRegistry object to be wrapped.
-- (instancetype)initWithTextureRegistry:(NSObject<FlutterTextureRegistry> *)registry;
-
-/// Registers a `FlutterTexture` on the main thread for usage in Flutter and returns an id that can
-/// be used to reference that texture when calling into Flutter with channels.
-///
-/// On success the completion block completes with the pointer to the registered texture, else with
-/// 0. The completion block runs on the main thread.
-- (void)registerTexture:(NSObject<FlutterTexture> *)texture
-             completion:(void (^)(int64_t))completion;
-
-/// Notifies the Flutter engine on the main thread that the given texture has been updated.
-- (void)textureFrameAvailable:(int64_t)textureId;
-
-/// Notifies the Flutter engine on the main thread to unregister a `FlutterTexture` that has been
-/// previously registered with `registerTexture:`.
-/// @param textureId The result that was previously returned from `registerTexture:`.
-- (void)unregisterTexture:(int64_t)textureId;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
deleted file mode 100644
index b82d566..0000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "FLTThreadSafeTextureRegistry.h"
-#import "QueueUtils.h"
-
-@interface FLTThreadSafeTextureRegistry ()
-@property(nonatomic, strong) NSObject<FlutterTextureRegistry> *registry;
-@end
-
-@implementation FLTThreadSafeTextureRegistry
-
-- (instancetype)initWithTextureRegistry:(NSObject<FlutterTextureRegistry> *)registry {
-  self = [super init];
-  if (self) {
-    _registry = registry;
-  }
-  return self;
-}
-
-- (void)registerTexture:(NSObject<FlutterTexture> *)texture
-             completion:(void (^)(int64_t))completion {
-  __weak typeof(self) weakSelf = self;
-  FLTEnsureToRunOnMainQueue(^{
-    typeof(self) strongSelf = weakSelf;
-    if (!strongSelf) return;
-    completion([strongSelf.registry registerTexture:texture]);
-  });
-}
-
-- (void)textureFrameAvailable:(int64_t)textureId {
-  __weak typeof(self) weakSelf = self;
-  FLTEnsureToRunOnMainQueue(^{
-    [weakSelf.registry textureFrameAvailable:textureId];
-  });
-}
-
-- (void)unregisterTexture:(int64_t)textureId {
-  __weak typeof(self) weakSelf = self;
-  FLTEnsureToRunOnMainQueue(^{
-    [weakSelf.registry unregisterTexture:textureId];
-  });
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/messages.g.h b/packages/camera/camera_avfoundation/ios/Classes/messages.g.h
index 4f17971..8e3dd43 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/messages.g.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/messages.g.h
@@ -52,6 +52,19 @@
 - (instancetype)initWithValue:(FCPPlatformExposureMode)value;
 @end
 
+typedef NS_ENUM(NSUInteger, FCPPlatformFlashMode) {
+  FCPPlatformFlashModeOff = 0,
+  FCPPlatformFlashModeAuto = 1,
+  FCPPlatformFlashModeAlways = 2,
+  FCPPlatformFlashModeTorch = 3,
+};
+
+/// Wrapper for FCPPlatformFlashMode to allow for nullability.
+@interface FCPPlatformFlashModeBox : NSObject
+@property(nonatomic, assign) FCPPlatformFlashMode value;
+- (instancetype)initWithValue:(FCPPlatformFlashMode)value;
+@end
+
 typedef NS_ENUM(NSUInteger, FCPPlatformFocusMode) {
   FCPPlatformFocusModeAuto = 0,
   FCPPlatformFocusModeLocked = 1,
@@ -63,8 +76,48 @@
 - (instancetype)initWithValue:(FCPPlatformFocusMode)value;
 @end
 
+/// Pigeon version of ImageFileFormat.
+typedef NS_ENUM(NSUInteger, FCPPlatformImageFileFormat) {
+  FCPPlatformImageFileFormatJpeg = 0,
+  FCPPlatformImageFileFormatHeif = 1,
+};
+
+/// Wrapper for FCPPlatformImageFileFormat to allow for nullability.
+@interface FCPPlatformImageFileFormatBox : NSObject
+@property(nonatomic, assign) FCPPlatformImageFileFormat value;
+- (instancetype)initWithValue:(FCPPlatformImageFileFormat)value;
+@end
+
+typedef NS_ENUM(NSUInteger, FCPPlatformImageFormatGroup) {
+  FCPPlatformImageFormatGroupBgra8888 = 0,
+  FCPPlatformImageFormatGroupYuv420 = 1,
+};
+
+/// Wrapper for FCPPlatformImageFormatGroup to allow for nullability.
+@interface FCPPlatformImageFormatGroupBox : NSObject
+@property(nonatomic, assign) FCPPlatformImageFormatGroup value;
+- (instancetype)initWithValue:(FCPPlatformImageFormatGroup)value;
+@end
+
+typedef NS_ENUM(NSUInteger, FCPPlatformResolutionPreset) {
+  FCPPlatformResolutionPresetLow = 0,
+  FCPPlatformResolutionPresetMedium = 1,
+  FCPPlatformResolutionPresetHigh = 2,
+  FCPPlatformResolutionPresetVeryHigh = 3,
+  FCPPlatformResolutionPresetUltraHigh = 4,
+  FCPPlatformResolutionPresetMax = 5,
+};
+
+/// Wrapper for FCPPlatformResolutionPreset to allow for nullability.
+@interface FCPPlatformResolutionPresetBox : NSObject
+@property(nonatomic, assign) FCPPlatformResolutionPreset value;
+- (instancetype)initWithValue:(FCPPlatformResolutionPreset)value;
+@end
+
 @class FCPPlatformCameraDescription;
 @class FCPPlatformCameraState;
+@class FCPPlatformMediaSettings;
+@class FCPPlatformPoint;
 @class FCPPlatformSize;
 
 @interface FCPPlatformCameraDescription : NSObject
@@ -98,6 +151,29 @@
 @property(nonatomic, assign) BOOL focusPointSupported;
 @end
 
+@interface FCPPlatformMediaSettings : NSObject
+/// `init` unavailable to enforce nonnull fields, see the `make` class method.
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)makeWithResolutionPreset:(FCPPlatformResolutionPreset)resolutionPreset
+                         framesPerSecond:(nullable NSNumber *)framesPerSecond
+                            videoBitrate:(nullable NSNumber *)videoBitrate
+                            audioBitrate:(nullable NSNumber *)audioBitrate
+                             enableAudio:(BOOL)enableAudio;
+@property(nonatomic, assign) FCPPlatformResolutionPreset resolutionPreset;
+@property(nonatomic, strong, nullable) NSNumber *framesPerSecond;
+@property(nonatomic, strong, nullable) NSNumber *videoBitrate;
+@property(nonatomic, strong, nullable) NSNumber *audioBitrate;
+@property(nonatomic, assign) BOOL enableAudio;
+@end
+
+@interface FCPPlatformPoint : NSObject
+/// `init` unavailable to enforce nonnull fields, see the `make` class method.
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)makeWithX:(double)x y:(double)y;
+@property(nonatomic, assign) double x;
+@property(nonatomic, assign) double y;
+@end
+
 @interface FCPPlatformSize : NSObject
 /// `init` unavailable to enforce nonnull fields, see the `make` class method.
 - (instancetype)init NS_UNAVAILABLE;
@@ -113,6 +189,92 @@
 /// Returns the list of available cameras.
 - (void)availableCamerasWithCompletion:(void (^)(NSArray<FCPPlatformCameraDescription *> *_Nullable,
                                                  FlutterError *_Nullable))completion;
+/// Create a new camera with the given settings, and returns its ID.
+- (void)createCameraWithName:(NSString *)cameraName
+                    settings:(FCPPlatformMediaSettings *)settings
+                  completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Initializes the camera with the given ID.
+- (void)initializeCamera:(NSInteger)cameraId
+         withImageFormat:(FCPPlatformImageFormatGroup)imageFormat
+              completion:(void (^)(FlutterError *_Nullable))completion;
+/// Begins streaming frames from the camera.
+- (void)startImageStreamWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Stops streaming frames from the camera.
+- (void)stopImageStreamWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Called by the Dart side of the plugin when it has received the last image
+/// frame sent.
+///
+/// This is used to throttle sending frames across the channel.
+- (void)receivedImageStreamDataWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Indicates that the given camera is no longer being used on the Dart side,
+/// and any associated resources can be cleaned up.
+- (void)disposeCamera:(NSInteger)cameraId completion:(void (^)(FlutterError *_Nullable))completion;
+/// Locks the camera capture to the current device orientation.
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation
+                    completion:(void (^)(FlutterError *_Nullable))completion;
+/// Unlocks camera capture orientation, allowing it to automatically adapt to
+/// device orientation.
+- (void)unlockCaptureOrientationWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Takes a picture with the current settings, and returns the path to the
+/// resulting file.
+- (void)takePictureWithCompletion:(void (^)(NSString *_Nullable,
+                                            FlutterError *_Nullable))completion;
+/// Does any preprocessing necessary before beginning to record video.
+- (void)prepareForVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Begins recording video, optionally enabling streaming to Dart at the same
+/// time.
+- (void)startVideoRecordingWithStreaming:(BOOL)enableStream
+                              completion:(void (^)(FlutterError *_Nullable))completion;
+/// Stops recording video, and results the path to the resulting file.
+- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
+                                                   FlutterError *_Nullable))completion;
+/// Pauses video recording.
+- (void)pauseVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Resumes a previously paused video recording.
+- (void)resumeVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Switches the camera to the given flash mode.
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+          completion:(void (^)(FlutterError *_Nullable))completion;
+/// Switches the camera to the given exposure mode.
+- (void)setExposureMode:(FCPPlatformExposureMode)mode
+             completion:(void (^)(FlutterError *_Nullable))completion;
+/// Anchors auto-exposure to the given point in (0,1) coordinate space.
+///
+/// A null value resets to the default exposure point.
+- (void)setExposurePoint:(nullable FCPPlatformPoint *)point
+              completion:(void (^)(FlutterError *_Nullable))completion;
+/// Returns the minimum exposure offset supported by the camera.
+- (void)getMinimumExposureOffset:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Returns the maximum exposure offset supported by the camera.
+- (void)getMaximumExposureOffset:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Sets the exposure offset manually to the given value.
+- (void)setExposureOffset:(double)offset completion:(void (^)(FlutterError *_Nullable))completion;
+/// Switches the camera to the given focus mode.
+- (void)setFocusMode:(FCPPlatformFocusMode)mode
+          completion:(void (^)(FlutterError *_Nullable))completion;
+/// Anchors auto-focus to the given point in (0,1) coordinate space.
+///
+/// A null value resets to the default focus point.
+- (void)setFocusPoint:(nullable FCPPlatformPoint *)point
+           completion:(void (^)(FlutterError *_Nullable))completion;
+/// Returns the minimum zoom level supported by the camera.
+- (void)getMinimumZoomLevel:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Returns the maximum zoom level supported by the camera.
+- (void)getMaximumZoomLevel:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Sets the zoom factor.
+- (void)setZoomLevel:(double)zoom completion:(void (^)(FlutterError *_Nullable))completion;
+/// Pauses streaming of preview frames.
+- (void)pausePreviewWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Resumes a previously paused preview stream.
+- (void)resumePreviewWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Changes the camera used while recording video.
+///
+/// This should only be called while video recording is active.
+- (void)updateDescriptionWhileRecordingCameraName:(NSString *)cameraName
+                                       completion:(void (^)(FlutterError *_Nullable))completion;
+/// Sets the file format used for taking pictures.
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)format
+                completion:(void (^)(FlutterError *_Nullable))completion;
 @end
 
 extern void SetUpFCPCameraApi(id<FlutterBinaryMessenger> binaryMessenger,
diff --git a/packages/camera/camera_avfoundation/ios/Classes/messages.g.m b/packages/camera/camera_avfoundation/ios/Classes/messages.g.m
index fd1100c..d90b63d 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/messages.g.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/messages.g.m
@@ -69,6 +69,16 @@
 }
 @end
 
+@implementation FCPPlatformFlashModeBox
+- (instancetype)initWithValue:(FCPPlatformFlashMode)value {
+  self = [super init];
+  if (self) {
+    _value = value;
+  }
+  return self;
+}
+@end
+
 @implementation FCPPlatformFocusModeBox
 - (instancetype)initWithValue:(FCPPlatformFocusMode)value {
   self = [super init];
@@ -79,6 +89,37 @@
 }
 @end
 
+/// Pigeon version of ImageFileFormat.
+@implementation FCPPlatformImageFileFormatBox
+- (instancetype)initWithValue:(FCPPlatformImageFileFormat)value {
+  self = [super init];
+  if (self) {
+    _value = value;
+  }
+  return self;
+}
+@end
+
+@implementation FCPPlatformImageFormatGroupBox
+- (instancetype)initWithValue:(FCPPlatformImageFormatGroup)value {
+  self = [super init];
+  if (self) {
+    _value = value;
+  }
+  return self;
+}
+@end
+
+@implementation FCPPlatformResolutionPresetBox
+- (instancetype)initWithValue:(FCPPlatformResolutionPreset)value {
+  self = [super init];
+  if (self) {
+    _value = value;
+  }
+  return self;
+}
+@end
+
 @interface FCPPlatformCameraDescription ()
 + (FCPPlatformCameraDescription *)fromList:(NSArray *)list;
 + (nullable FCPPlatformCameraDescription *)nullableFromList:(NSArray *)list;
@@ -91,6 +132,18 @@
 - (NSArray *)toList;
 @end
 
+@interface FCPPlatformMediaSettings ()
++ (FCPPlatformMediaSettings *)fromList:(NSArray *)list;
++ (nullable FCPPlatformMediaSettings *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
+@interface FCPPlatformPoint ()
++ (FCPPlatformPoint *)fromList:(NSArray *)list;
++ (nullable FCPPlatformPoint *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
 @interface FCPPlatformSize ()
 + (FCPPlatformSize *)fromList:(NSArray *)list;
 + (nullable FCPPlatformSize *)nullableFromList:(NSArray *)list;
@@ -159,6 +212,67 @@
 }
 @end
 
+@implementation FCPPlatformMediaSettings
++ (instancetype)makeWithResolutionPreset:(FCPPlatformResolutionPreset)resolutionPreset
+                         framesPerSecond:(nullable NSNumber *)framesPerSecond
+                            videoBitrate:(nullable NSNumber *)videoBitrate
+                            audioBitrate:(nullable NSNumber *)audioBitrate
+                             enableAudio:(BOOL)enableAudio {
+  FCPPlatformMediaSettings *pigeonResult = [[FCPPlatformMediaSettings alloc] init];
+  pigeonResult.resolutionPreset = resolutionPreset;
+  pigeonResult.framesPerSecond = framesPerSecond;
+  pigeonResult.videoBitrate = videoBitrate;
+  pigeonResult.audioBitrate = audioBitrate;
+  pigeonResult.enableAudio = enableAudio;
+  return pigeonResult;
+}
++ (FCPPlatformMediaSettings *)fromList:(NSArray *)list {
+  FCPPlatformMediaSettings *pigeonResult = [[FCPPlatformMediaSettings alloc] init];
+  pigeonResult.resolutionPreset = [GetNullableObjectAtIndex(list, 0) integerValue];
+  pigeonResult.framesPerSecond = GetNullableObjectAtIndex(list, 1);
+  pigeonResult.videoBitrate = GetNullableObjectAtIndex(list, 2);
+  pigeonResult.audioBitrate = GetNullableObjectAtIndex(list, 3);
+  pigeonResult.enableAudio = [GetNullableObjectAtIndex(list, 4) boolValue];
+  return pigeonResult;
+}
++ (nullable FCPPlatformMediaSettings *)nullableFromList:(NSArray *)list {
+  return (list) ? [FCPPlatformMediaSettings fromList:list] : nil;
+}
+- (NSArray *)toList {
+  return @[
+    @(self.resolutionPreset),
+    self.framesPerSecond ?: [NSNull null],
+    self.videoBitrate ?: [NSNull null],
+    self.audioBitrate ?: [NSNull null],
+    @(self.enableAudio),
+  ];
+}
+@end
+
+@implementation FCPPlatformPoint
++ (instancetype)makeWithX:(double)x y:(double)y {
+  FCPPlatformPoint *pigeonResult = [[FCPPlatformPoint alloc] init];
+  pigeonResult.x = x;
+  pigeonResult.y = y;
+  return pigeonResult;
+}
++ (FCPPlatformPoint *)fromList:(NSArray *)list {
+  FCPPlatformPoint *pigeonResult = [[FCPPlatformPoint alloc] init];
+  pigeonResult.x = [GetNullableObjectAtIndex(list, 0) doubleValue];
+  pigeonResult.y = [GetNullableObjectAtIndex(list, 1) doubleValue];
+  return pigeonResult;
+}
++ (nullable FCPPlatformPoint *)nullableFromList:(NSArray *)list {
+  return (list) ? [FCPPlatformPoint fromList:list] : nil;
+}
+- (NSArray *)toList {
+  return @[
+    @(self.x),
+    @(self.y),
+  ];
+}
+@end
+
 @implementation FCPPlatformSize
 + (instancetype)makeWithWidth:(double)width height:(double)height {
   FCPPlatformSize *pigeonResult = [[FCPPlatformSize alloc] init];
@@ -190,6 +304,10 @@
   switch (type) {
     case 128:
       return [FCPPlatformCameraDescription fromList:[self readValue]];
+    case 129:
+      return [FCPPlatformMediaSettings fromList:[self readValue]];
+    case 130:
+      return [FCPPlatformPoint fromList:[self readValue]];
     default:
       return [super readValueOfType:type];
   }
@@ -203,6 +321,12 @@
   if ([value isKindOfClass:[FCPPlatformCameraDescription class]]) {
     [self writeByte:128];
     [self writeValue:[value toList]];
+  } else if ([value isKindOfClass:[FCPPlatformMediaSettings class]]) {
+    [self writeByte:129];
+    [self writeValue:[value toList]];
+  } else if ([value isKindOfClass:[FCPPlatformPoint class]]) {
+    [self writeByte:130];
+    [self writeValue:[value toList]];
   } else {
     [super writeValue:value];
   }
@@ -264,6 +388,733 @@
       [channel setMessageHandler:nil];
     }
   }
+  /// Create a new camera with the given settings, and returns its ID.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:
+                                      @"%@%@",
+                                      @"dev.flutter.pigeon.camera_avfoundation.CameraApi.create",
+                                      messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(createCameraWithName:settings:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(createCameraWithName:settings:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        NSString *arg_cameraName = GetNullableObjectAtIndex(args, 0);
+        FCPPlatformMediaSettings *arg_settings = GetNullableObjectAtIndex(args, 1);
+        [api createCameraWithName:arg_cameraName
+                         settings:arg_settings
+                       completion:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+                         callback(wrapResult(output, error));
+                       }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Initializes the camera with the given ID.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:
+               [NSString
+                   stringWithFormat:@"%@%@",
+                                    @"dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize",
+                                    messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(initializeCamera:withImageFormat:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(initializeCamera:withImageFormat:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        NSInteger arg_cameraId = [GetNullableObjectAtIndex(args, 0) integerValue];
+        FCPPlatformImageFormatGroup arg_imageFormat =
+            [GetNullableObjectAtIndex(args, 1) integerValue];
+        [api initializeCamera:arg_cameraId
+              withImageFormat:arg_imageFormat
+                   completion:^(FlutterError *_Nullable error) {
+                     callback(wrapResult(nil, error));
+                   }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Begins streaming frames from the camera.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.startImageStream",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(startImageStreamWithCompletion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(startImageStreamWithCompletion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api startImageStreamWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Stops streaming frames from the camera.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(stopImageStreamWithCompletion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(stopImageStreamWithCompletion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api stopImageStreamWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Called by the Dart side of the plugin when it has received the last image
+  /// frame sent.
+  ///
+  /// This is used to throttle sending frames across the channel.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.receivedImageStreamData",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(receivedImageStreamDataWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(receivedImageStreamDataWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api receivedImageStreamDataWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Indicates that the given camera is no longer being used on the Dart side,
+  /// and any associated resources can be cleaned up.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:
+                                      @"%@%@",
+                                      @"dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose",
+                                      messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(disposeCamera:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(disposeCamera:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        NSInteger arg_cameraId = [GetNullableObjectAtIndex(args, 0) integerValue];
+        [api disposeCamera:arg_cameraId
+                completion:^(FlutterError *_Nullable error) {
+                  callback(wrapResult(nil, error));
+                }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Locks the camera capture to the current device orientation.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.lockCaptureOrientation",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(lockCaptureOrientation:completion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(lockCaptureOrientation:completion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformDeviceOrientation arg_orientation =
+            [GetNullableObjectAtIndex(args, 0) integerValue];
+        [api lockCaptureOrientation:arg_orientation
+                         completion:^(FlutterError *_Nullable error) {
+                           callback(wrapResult(nil, error));
+                         }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Unlocks camera capture orientation, allowing it to automatically adapt to
+  /// device orientation.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.unlockCaptureOrientation",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(unlockCaptureOrientationWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(unlockCaptureOrientationWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api unlockCaptureOrientationWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Takes a picture with the current settings, and returns the path to the
+  /// resulting file.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:
+               [NSString
+                   stringWithFormat:@"%@%@",
+                                    @"dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture",
+                                    messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(takePictureWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(takePictureWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api
+            takePictureWithCompletion:^(NSString *_Nullable output, FlutterError *_Nullable error) {
+              callback(wrapResult(output, error));
+            }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Does any preprocessing necessary before beginning to record video.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.prepareForVideoRecording",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(prepareForVideoRecordingWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(prepareForVideoRecordingWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api prepareForVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Begins recording video, optionally enabling streaming to Dart at the same
+  /// time.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.startVideoRecording",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(startVideoRecordingWithStreaming:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(startVideoRecordingWithStreaming:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        BOOL arg_enableStream = [GetNullableObjectAtIndex(args, 0) boolValue];
+        [api startVideoRecordingWithStreaming:arg_enableStream
+                                   completion:^(FlutterError *_Nullable error) {
+                                     callback(wrapResult(nil, error));
+                                   }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Stops recording video, and results the path to the resulting file.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.stopVideoRecording",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(stopVideoRecordingWithCompletion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(stopVideoRecordingWithCompletion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api stopVideoRecordingWithCompletion:^(NSString *_Nullable output,
+                                                FlutterError *_Nullable error) {
+          callback(wrapResult(output, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Pauses video recording.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.pauseVideoRecording",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(pauseVideoRecordingWithCompletion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(pauseVideoRecordingWithCompletion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api pauseVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Resumes a previously paused video recording.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.resumeVideoRecording",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(resumeVideoRecordingWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(resumeVideoRecordingWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api resumeVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Switches the camera to the given flash mode.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(setFlashMode:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(setFlashMode:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformFlashMode arg_mode = [GetNullableObjectAtIndex(args, 0) integerValue];
+        [api setFlashMode:arg_mode
+               completion:^(FlutterError *_Nullable error) {
+                 callback(wrapResult(nil, error));
+               }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Switches the camera to the given exposure mode.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(setExposureMode:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(setExposureMode:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformExposureMode arg_mode = [GetNullableObjectAtIndex(args, 0) integerValue];
+        [api setExposureMode:arg_mode
+                  completion:^(FlutterError *_Nullable error) {
+                    callback(wrapResult(nil, error));
+                  }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Anchors auto-exposure to the given point in (0,1) coordinate space.
+  ///
+  /// A null value resets to the default exposure point.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.setExposurePoint",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(setExposurePoint:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(setExposurePoint:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformPoint *arg_point = GetNullableObjectAtIndex(args, 0);
+        [api setExposurePoint:arg_point
+                   completion:^(FlutterError *_Nullable error) {
+                     callback(wrapResult(nil, error));
+                   }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Returns the minimum exposure offset supported by the camera.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.getMinExposureOffset",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(getMinimumExposureOffset:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(getMinimumExposureOffset:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api getMinimumExposureOffset:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+          callback(wrapResult(output, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Returns the maximum exposure offset supported by the camera.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.getMaxExposureOffset",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(getMaximumExposureOffset:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(getMaximumExposureOffset:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api getMaximumExposureOffset:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+          callback(wrapResult(output, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Sets the exposure offset manually to the given value.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.setExposureOffset",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(setExposureOffset:completion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(setExposureOffset:completion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        double arg_offset = [GetNullableObjectAtIndex(args, 0) doubleValue];
+        [api setExposureOffset:arg_offset
+                    completion:^(FlutterError *_Nullable error) {
+                      callback(wrapResult(nil, error));
+                    }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Switches the camera to the given focus mode.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(setFocusMode:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(setFocusMode:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformFocusMode arg_mode = [GetNullableObjectAtIndex(args, 0) integerValue];
+        [api setFocusMode:arg_mode
+               completion:^(FlutterError *_Nullable error) {
+                 callback(wrapResult(nil, error));
+               }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Anchors auto-focus to the given point in (0,1) coordinate space.
+  ///
+  /// A null value resets to the default focus point.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(setFocusPoint:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(setFocusPoint:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformPoint *arg_point = GetNullableObjectAtIndex(args, 0);
+        [api setFocusPoint:arg_point
+                completion:^(FlutterError *_Nullable error) {
+                  callback(wrapResult(nil, error));
+                }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Returns the minimum zoom level supported by the camera.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(getMinimumZoomLevel:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(getMinimumZoomLevel:)", api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api getMinimumZoomLevel:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+          callback(wrapResult(output, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Returns the maximum zoom level supported by the camera.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(getMaximumZoomLevel:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(getMaximumZoomLevel:)", api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api getMaximumZoomLevel:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+          callback(wrapResult(output, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Sets the zoom factor.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(setZoomLevel:completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(setZoomLevel:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        double arg_zoom = [GetNullableObjectAtIndex(args, 0) doubleValue];
+        [api setZoomLevel:arg_zoom
+               completion:^(FlutterError *_Nullable error) {
+                 callback(wrapResult(nil, error));
+               }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Pauses streaming of preview frames.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(pausePreviewWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(pausePreviewWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api pausePreviewWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Resumes a previously paused preview stream.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString
+                            stringWithFormat:
+                                @"%@%@",
+                                @"dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview",
+                                messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(resumePreviewWithCompletion:)],
+                @"FCPCameraApi api (%@) doesn't respond to @selector(resumePreviewWithCompletion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        [api resumePreviewWithCompletion:^(FlutterError *_Nullable error) {
+          callback(wrapResult(nil, error));
+        }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Changes the camera used while recording video.
+  ///
+  /// This should only be called while video recording is active.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.updateDescriptionWhileRecording",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert([api respondsToSelector:@selector(updateDescriptionWhileRecordingCameraName:
+                                                                                 completion:)],
+                @"FCPCameraApi api (%@) doesn't respond to "
+                @"@selector(updateDescriptionWhileRecordingCameraName:completion:)",
+                api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        NSString *arg_cameraName = GetNullableObjectAtIndex(args, 0);
+        [api updateDescriptionWhileRecordingCameraName:arg_cameraName
+                                            completion:^(FlutterError *_Nullable error) {
+                                              callback(wrapResult(nil, error));
+                                            }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
+  /// Sets the file format used for taking pictures.
+  {
+    FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+           initWithName:[NSString stringWithFormat:@"%@%@",
+                                                   @"dev.flutter.pigeon.camera_avfoundation."
+                                                   @"CameraApi.setImageFileFormat",
+                                                   messageChannelSuffix]
+        binaryMessenger:binaryMessenger
+                  codec:FCPCameraApiGetCodec()];
+    if (api) {
+      NSCAssert(
+          [api respondsToSelector:@selector(setImageFileFormat:completion:)],
+          @"FCPCameraApi api (%@) doesn't respond to @selector(setImageFileFormat:completion:)",
+          api);
+      [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+        NSArray *args = message;
+        FCPPlatformImageFileFormat arg_format = [GetNullableObjectAtIndex(args, 0) integerValue];
+        [api setImageFileFormat:arg_format
+                     completion:^(FlutterError *_Nullable error) {
+                       callback(wrapResult(nil, error));
+                     }];
+      }];
+    } else {
+      [channel setMessageHandler:nil];
+    }
+  }
 }
 NSObject<FlutterMessageCodec> *FCPCameraGlobalEventApiGetCodec(void) {
   static FlutterStandardMessageCodec *sSharedObject = nil;
diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
index dc9f3c7..6f94786 100644
--- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
+++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
@@ -6,7 +6,6 @@
 import 'dart:math';
 
 import 'package:camera_platform_interface/camera_platform_interface.dart';
-import 'package:flutter/foundation.dart';
 import 'package:flutter/material.dart';
 import 'package:flutter/services.dart';
 import 'package:flutter/widgets.dart';
@@ -16,9 +15,6 @@
 import 'type_conversion.dart';
 import 'utils.dart';
 
-const MethodChannel _channel =
-    MethodChannel('plugins.flutter.io/camera_avfoundation');
-
 /// An iOS implementation of [CameraPlatform] based on AVFoundation.
 class AVFoundationCamera extends CameraPlatform {
   /// Creates a new AVFoundation-based [CameraPlatform] implementation instance.
@@ -100,19 +96,16 @@
     MediaSettings? mediaSettings,
   ) async {
     try {
-      final Map<String, dynamic>? reply = await _channel
-          .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
-        'cameraName': cameraDescription.name,
-        'resolutionPreset': null != mediaSettings?.resolutionPreset
-            ? _serializeResolutionPreset(mediaSettings!.resolutionPreset!)
-            : null,
-        'fps': mediaSettings?.fps,
-        'videoBitrate': mediaSettings?.videoBitrate,
-        'audioBitrate': mediaSettings?.audioBitrate,
-        'enableAudio': mediaSettings?.enableAudio ?? true,
-      });
-
-      return reply!['cameraId']! as int;
+      return await _hostApi.create(
+          cameraDescription.name,
+          PlatformMediaSettings(
+            resolutionPreset:
+                _pigeonResolutionPreset(mediaSettings?.resolutionPreset),
+            framesPerSecond: mediaSettings?.fps,
+            videoBitrate: mediaSettings?.videoBitrate,
+            audioBitrate: mediaSettings?.audioBitrate,
+            enableAudio: mediaSettings?.enableAudio ?? true,
+          ));
     } on PlatformException catch (e) {
       throw CameraException(e.code, e.message);
     }
@@ -122,38 +115,26 @@
   Future<void> initializeCamera(
     int cameraId, {
     ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
-  }) {
+  }) async {
     hostCameraHandlers.putIfAbsent(cameraId,
         () => HostCameraMessageHandler(cameraId, cameraEventStreamController));
 
     final Completer<void> completer = Completer<void>();
 
-    onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+    unawaited(onCameraInitialized(cameraId)
+        .first
+        .then((CameraInitializedEvent value) {
       completer.complete();
-    });
+    }));
 
-    _channel.invokeMapMethod<String, dynamic>(
-      'initialize',
-      <String, dynamic>{
-        'cameraId': cameraId,
-        'imageFormatGroup': imageFormatGroup.name(),
-      },
-    ).catchError(
-      // TODO(srawlins): This should return a value of the future's type. This
-      // will fail upcoming analysis checks with
-      // https://github.com/flutter/flutter/issues/105750.
-      // ignore: body_might_complete_normally_catch_error
-      (Object error, StackTrace stackTrace) {
-        if (error is! PlatformException) {
-          // ignore: only_throw_errors
-          throw error;
-        }
-        completer.completeError(
-          CameraException(error.code, error.message),
-          stackTrace,
-        );
-      },
-    );
+    try {
+      await _hostApi.initialize(cameraId, _pigeonImageFormat(imageFormatGroup));
+    } on PlatformException catch (e, s) {
+      completer.completeError(
+        CameraException(e.code, e.message),
+        s,
+      );
+    }
 
     return completer.future;
   }
@@ -164,10 +145,7 @@
         hostCameraHandlers.remove(cameraId);
     handler?.dispose();
 
-    await _channel.invokeMethod<void>(
-      'dispose',
-      <String, dynamic>{'cameraId': cameraId},
-    );
+    await _hostApi.dispose(cameraId);
   }
 
   @override
@@ -206,43 +184,25 @@
     int cameraId,
     DeviceOrientation orientation,
   ) async {
-    await _channel.invokeMethod<String>(
-      'lockCaptureOrientation',
-      <String, dynamic>{
-        'cameraId': cameraId,
-        'orientation': serializeDeviceOrientation(orientation)
-      },
-    );
+    await _hostApi
+        .lockCaptureOrientation(serializeDeviceOrientation(orientation));
   }
 
   @override
   Future<void> unlockCaptureOrientation(int cameraId) async {
-    await _channel.invokeMethod<String>(
-      'unlockCaptureOrientation',
-      <String, dynamic>{'cameraId': cameraId},
-    );
+    await _hostApi.unlockCaptureOrientation();
   }
 
   @override
   Future<XFile> takePicture(int cameraId) async {
-    final String? path = await _channel.invokeMethod<String>(
-      'takePicture',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    if (path == null) {
-      throw CameraException(
-        'INVALID_PATH',
-        'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
-      );
-    }
-
+    final String path = await _hostApi.takePicture();
     return XFile(path);
   }
 
   @override
-  Future<void> prepareForVideoRecording() =>
-      _channel.invokeMethod<void>('prepareForVideoRecording');
+  Future<void> prepareForVideoRecording() async {
+    await _hostApi.prepareForVideoRecording();
+  }
 
   @override
   Future<void> startVideoRecording(int cameraId,
@@ -253,14 +213,8 @@
 
   @override
   Future<void> startVideoCapturing(VideoCaptureOptions options) async {
-    await _channel.invokeMethod<void>(
-      'startVideoRecording',
-      <String, dynamic>{
-        'cameraId': options.cameraId,
-        'maxVideoDuration': options.maxDuration?.inMilliseconds,
-        'enableStream': options.streamCallback != null,
-      },
-    );
+    // Max video duration is currently not supported.
+    await _hostApi.startVideoRecording(options.streamCallback != null);
 
     if (options.streamCallback != null) {
       _frameStreamController = _createStreamController();
@@ -271,33 +225,19 @@
 
   @override
   Future<XFile> stopVideoRecording(int cameraId) async {
-    final String? path = await _channel.invokeMethod<String>(
-      'stopVideoRecording',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    if (path == null) {
-      throw CameraException(
-        'INVALID_PATH',
-        'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
-      );
-    }
-
+    final String path = await _hostApi.stopVideoRecording();
     return XFile(path);
   }
 
   @override
-  Future<void> pauseVideoRecording(int cameraId) => _channel.invokeMethod<void>(
-        'pauseVideoRecording',
-        <String, dynamic>{'cameraId': cameraId},
-      );
+  Future<void> pauseVideoRecording(int cameraId) async {
+    await _hostApi.pauseVideoRecording();
+  }
 
   @override
-  Future<void> resumeVideoRecording(int cameraId) =>
-      _channel.invokeMethod<void>(
-        'resumeVideoRecording',
-        <String, dynamic>{'cameraId': cameraId},
-      );
+  Future<void> resumeVideoRecording(int cameraId) async {
+    await _hostApi.resumeVideoRecording();
+  }
 
   @override
   Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
@@ -322,7 +262,7 @@
   }
 
   Future<void> _startPlatformStream() async {
-    await _channel.invokeMethod<void>('startImageStream');
+    await _hostApi.startImageStream();
     _startStreamListener();
   }
 
@@ -332,7 +272,7 @@
     _platformImageStreamSubscription =
         cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
       try {
-        _channel.invokeMethod<void>('receivedImageStreamData');
+        _hostApi.receivedImageStreamData();
       } on PlatformException catch (e) {
         throw CameraException(e.code, e.message);
       }
@@ -342,7 +282,7 @@
   }
 
   FutureOr<void> _onFrameStreamCancel() async {
-    await _channel.invokeMethod<void>('stopImageStream');
+    await _hostApi.stopImageStream();
     await _platformImageStreamSubscription?.cancel();
     _platformImageStreamSubscription = null;
     _frameStreamController = null;
@@ -354,140 +294,75 @@
   }
 
   @override
-  Future<void> setFlashMode(int cameraId, FlashMode mode) =>
-      _channel.invokeMethod<void>(
-        'setFlashMode',
-        <String, dynamic>{
-          'cameraId': cameraId,
-          'mode': _serializeFlashMode(mode),
-        },
-      );
+  Future<void> setFlashMode(int cameraId, FlashMode mode) async {
+    await _hostApi.setFlashMode(_pigeonFlashMode(mode));
+  }
 
   @override
-  Future<void> setExposureMode(int cameraId, ExposureMode mode) =>
-      _channel.invokeMethod<void>(
-        'setExposureMode',
-        <String, dynamic>{
-          'cameraId': cameraId,
-          'mode': _serializeExposureMode(mode),
-        },
-      );
+  Future<void> setExposureMode(int cameraId, ExposureMode mode) async {
+    await _hostApi.setExposureMode(_pigeonExposureMode(mode));
+  }
 
   @override
-  Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+  Future<void> setExposurePoint(int cameraId, Point<double>? point) async {
     assert(point == null || point.x >= 0 && point.x <= 1);
     assert(point == null || point.y >= 0 && point.y <= 1);
 
-    return _channel.invokeMethod<void>(
-      'setExposurePoint',
-      <String, dynamic>{
-        'cameraId': cameraId,
-        'reset': point == null,
-        'x': point?.x,
-        'y': point?.y,
-      },
-    );
+    await _hostApi.setExposurePoint(_pigeonPoint(point));
   }
 
   @override
   Future<double> getMinExposureOffset(int cameraId) async {
-    final double? minExposureOffset = await _channel.invokeMethod<double>(
-      'getMinExposureOffset',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    return minExposureOffset!;
+    return _hostApi.getMinExposureOffset();
   }
 
   @override
   Future<double> getMaxExposureOffset(int cameraId) async {
-    final double? maxExposureOffset = await _channel.invokeMethod<double>(
-      'getMaxExposureOffset',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    return maxExposureOffset!;
+    return _hostApi.getMaxExposureOffset();
   }
 
   @override
   Future<double> getExposureOffsetStepSize(int cameraId) async {
-    final double? stepSize = await _channel.invokeMethod<double>(
-      'getExposureOffsetStepSize',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    return stepSize!;
+    // iOS has no step size.
+    return 0;
   }
 
   @override
   Future<double> setExposureOffset(int cameraId, double offset) async {
-    final double? appliedOffset = await _channel.invokeMethod<double>(
-      'setExposureOffset',
-      <String, dynamic>{
-        'cameraId': cameraId,
-        'offset': offset,
-      },
-    );
-
-    return appliedOffset!;
+    await _hostApi.setExposureOffset(offset);
+    // The platform API allows for implementations that have to adjust the
+    // target offset and return the actual offset used, but there is never
+    // adjustment in this implementation.
+    return offset;
   }
 
   @override
-  Future<void> setFocusMode(int cameraId, FocusMode mode) =>
-      _channel.invokeMethod<void>(
-        'setFocusMode',
-        <String, dynamic>{
-          'cameraId': cameraId,
-          'mode': _serializeFocusMode(mode),
-        },
-      );
+  Future<void> setFocusMode(int cameraId, FocusMode mode) async {
+    await _hostApi.setFocusMode(_pigeonFocusMode(mode));
+  }
 
   @override
-  Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+  Future<void> setFocusPoint(int cameraId, Point<double>? point) async {
     assert(point == null || point.x >= 0 && point.x <= 1);
     assert(point == null || point.y >= 0 && point.y <= 1);
 
-    return _channel.invokeMethod<void>(
-      'setFocusPoint',
-      <String, dynamic>{
-        'cameraId': cameraId,
-        'reset': point == null,
-        'x': point?.x,
-        'y': point?.y,
-      },
-    );
+    await _hostApi.setFocusPoint(_pigeonPoint(point));
   }
 
   @override
   Future<double> getMaxZoomLevel(int cameraId) async {
-    final double? maxZoomLevel = await _channel.invokeMethod<double>(
-      'getMaxZoomLevel',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    return maxZoomLevel!;
+    return _hostApi.getMaxZoomLevel();
   }
 
   @override
   Future<double> getMinZoomLevel(int cameraId) async {
-    final double? minZoomLevel = await _channel.invokeMethod<double>(
-      'getMinZoomLevel',
-      <String, dynamic>{'cameraId': cameraId},
-    );
-
-    return minZoomLevel!;
+    return _hostApi.getMinZoomLevel();
   }
 
   @override
   Future<void> setZoomLevel(int cameraId, double zoom) async {
     try {
-      await _channel.invokeMethod<double>(
-        'setZoomLevel',
-        <String, dynamic>{
-          'cameraId': cameraId,
-          'zoom': zoom,
-        },
-      );
+      await _hostApi.setZoomLevel(zoom);
     } on PlatformException catch (e) {
       throw CameraException(e.code, e.message);
     }
@@ -495,40 +370,23 @@
 
   @override
   Future<void> pausePreview(int cameraId) async {
-    await _channel.invokeMethod<double>(
-      'pausePreview',
-      <String, dynamic>{'cameraId': cameraId},
-    );
+    await _hostApi.pausePreview();
   }
 
   @override
   Future<void> resumePreview(int cameraId) async {
-    await _channel.invokeMethod<double>(
-      'resumePreview',
-      <String, dynamic>{'cameraId': cameraId},
-    );
+    await _hostApi.resumePreview();
   }
 
   @override
   Future<void> setDescriptionWhileRecording(
       CameraDescription description) async {
-    await _channel.invokeMethod<double>(
-      'setDescriptionWhileRecording',
-      <String, dynamic>{
-        'cameraName': description.name,
-      },
-    );
+    await _hostApi.updateDescriptionWhileRecording(description.name);
   }
 
   @override
-  Future<void> setImageFileFormat(int cameraId, ImageFileFormat format) {
-    return _channel.invokeMethod<void>(
-      'setImageFileFormat',
-      <String, dynamic>{
-        'cameraId': cameraId,
-        'fileFormat': format.name,
-      },
-    );
+  Future<void> setImageFileFormat(int cameraId, ImageFileFormat format) async {
+    await _hostApi.setImageFileFormat(_pigeonImageFileFormat(format));
   }
 
   @override
@@ -536,12 +394,13 @@
     return Texture(textureId: cameraId);
   }
 
-  String _serializeFocusMode(FocusMode mode) {
+  /// Returns an [FocusMode]'s Pigeon representation.
+  PlatformFocusMode _pigeonFocusMode(FocusMode mode) {
     switch (mode) {
       case FocusMode.locked:
-        return 'locked';
+        return PlatformFocusMode.locked;
       case FocusMode.auto:
-        return 'auto';
+        return PlatformFocusMode.auto;
     }
     // The enum comes from a different package, which could get a new value at
     // any time, so provide a fallback that ensures this won't break when used
@@ -549,15 +408,16 @@
     // the switch rather than a `default` so that the linter will flag the
     // switch as needing an update.
     // ignore: dead_code
-    return 'auto';
+    return PlatformFocusMode.auto;
   }
 
-  String _serializeExposureMode(ExposureMode mode) {
+  /// Returns an [ExposureMode]'s Pigeon representation.
+  PlatformExposureMode _pigeonExposureMode(ExposureMode mode) {
     switch (mode) {
       case ExposureMode.locked:
-        return 'locked';
+        return PlatformExposureMode.locked;
       case ExposureMode.auto:
-        return 'auto';
+        return PlatformExposureMode.auto;
     }
     // The enum comes from a different package, which could get a new value at
     // any time, so provide a fallback that ensures this won't break when used
@@ -565,20 +425,20 @@
     // the switch rather than a `default` so that the linter will flag the
     // switch as needing an update.
     // ignore: dead_code
-    return 'auto';
+    return PlatformExposureMode.auto;
   }
 
-  /// Returns the flash mode as a String.
-  String _serializeFlashMode(FlashMode flashMode) {
+  /// Returns a [FlashMode]'s Pigeon representation.
+  PlatformFlashMode _pigeonFlashMode(FlashMode flashMode) {
     switch (flashMode) {
       case FlashMode.off:
-        return 'off';
+        return PlatformFlashMode.off;
       case FlashMode.auto:
-        return 'auto';
+        return PlatformFlashMode.auto;
       case FlashMode.always:
-        return 'always';
+        return PlatformFlashMode.always;
       case FlashMode.torch:
-        return 'torch';
+        return PlatformFlashMode.torch;
     }
     // The enum comes from a different package, which could get a new value at
     // any time, so provide a fallback that ensures this won't break when used
@@ -586,24 +446,30 @@
     // the switch rather than a `default` so that the linter will flag the
     // switch as needing an update.
     // ignore: dead_code
-    return 'off';
+    return PlatformFlashMode.off;
   }
 
-  /// Returns the resolution preset as a String.
-  String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+  /// Returns a [ResolutionPreset]'s Pigeon representation.
+  PlatformResolutionPreset _pigeonResolutionPreset(
+      ResolutionPreset? resolutionPreset) {
+    if (resolutionPreset == null) {
+      // Provide a default if one isn't provided, since the native side needs
+      // to set something.
+      return PlatformResolutionPreset.high;
+    }
     switch (resolutionPreset) {
       case ResolutionPreset.max:
-        return 'max';
+        return PlatformResolutionPreset.max;
       case ResolutionPreset.ultraHigh:
-        return 'ultraHigh';
+        return PlatformResolutionPreset.ultraHigh;
       case ResolutionPreset.veryHigh:
-        return 'veryHigh';
+        return PlatformResolutionPreset.veryHigh;
       case ResolutionPreset.high:
-        return 'high';
+        return PlatformResolutionPreset.high;
       case ResolutionPreset.medium:
-        return 'medium';
+        return PlatformResolutionPreset.medium;
       case ResolutionPreset.low:
-        return 'low';
+        return PlatformResolutionPreset.low;
     }
     // The enum comes from a different package, which could get a new value at
     // any time, so provide a fallback that ensures this won't break when used
@@ -611,7 +477,59 @@
     // the switch rather than a `default` so that the linter will flag the
     // switch as needing an update.
     // ignore: dead_code
-    return 'max';
+    return PlatformResolutionPreset.max;
+  }
+
+  /// Returns an [ImageFormatGroup]'s Pigeon representation.
+  PlatformImageFormatGroup _pigeonImageFormat(ImageFormatGroup format) {
+    switch (format) {
+      // "unknown" is used to indicate the default.
+      case ImageFormatGroup.unknown:
+      case ImageFormatGroup.bgra8888:
+        return PlatformImageFormatGroup.bgra8888;
+      case ImageFormatGroup.yuv420:
+        return PlatformImageFormatGroup.yuv420;
+      case ImageFormatGroup.jpeg:
+      case ImageFormatGroup.nv21:
+      // Fall through.
+    }
+    // The enum comes from a different package, which could get a new value at
+    // any time, so provide a fallback that ensures this won't break when used
+    // with a version that contains new values. This is deliberately outside
+    // the switch rather than a `default` so that the linter will flag the
+    // switch as needing an update.
+    // TODO(stuartmorgan): Consider throwing an UnsupportedError, instead of
+    // doing fallback, when a specific unsupported format is requested. This
+    // would require a breaking change at this layer and the app-facing layer.
+    return PlatformImageFormatGroup.bgra8888;
+  }
+
+  /// Returns an [ImageFileFormat]'s Pigeon representation.
+  PlatformImageFileFormat _pigeonImageFileFormat(ImageFileFormat format) {
+    switch (format) {
+      case ImageFileFormat.heif:
+        return PlatformImageFileFormat.heif;
+      case ImageFileFormat.jpeg:
+        return PlatformImageFileFormat.jpeg;
+    }
+    // The enum comes from a different package, which could get a new value at
+    // any time, so provide a fallback that ensures this won't break when used
+    // with a version that contains new values. This is deliberately outside
+    // the switch rather than a `default` so that the linter will flag the
+    // switch as needing an update.
+    // TODO(stuartmorgan): Consider throwing an UnsupportedError, instead of
+    // doing fallback, when a specific unsupported format is requested. This
+    // would require a breaking change at this layer and the app-facing layer.
+    // ignore: dead_code
+    return PlatformImageFileFormat.jpeg;
+  }
+
+  /// Returns a [Point]s Pigeon representation.
+  PlatformPoint? _pigeonPoint(Point<double>? point) {
+    if (point == null) {
+      return null;
+    }
+    return PlatformPoint(x: point.x, y: point.y);
   }
 }
 
diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart
index a4b3992..4290eb0 100644
--- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart
+++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart
@@ -52,11 +52,38 @@
   locked,
 }
 
+enum PlatformFlashMode {
+  off,
+  auto,
+  always,
+  torch,
+}
+
 enum PlatformFocusMode {
   auto,
   locked,
 }
 
+/// Pigeon version of ImageFileFormat.
+enum PlatformImageFileFormat {
+  jpeg,
+  heif,
+}
+
+enum PlatformImageFormatGroup {
+  bgra8888,
+  yuv420,
+}
+
+enum PlatformResolutionPreset {
+  low,
+  medium,
+  high,
+  veryHigh,
+  ultraHigh,
+  max,
+}
+
 class PlatformCameraDescription {
   PlatformCameraDescription({
     required this.name,
@@ -131,6 +158,73 @@
   }
 }
 
+class PlatformMediaSettings {
+  PlatformMediaSettings({
+    required this.resolutionPreset,
+    this.framesPerSecond,
+    this.videoBitrate,
+    this.audioBitrate,
+    required this.enableAudio,
+  });
+
+  PlatformResolutionPreset resolutionPreset;
+
+  int? framesPerSecond;
+
+  int? videoBitrate;
+
+  int? audioBitrate;
+
+  bool enableAudio;
+
+  Object encode() {
+    return <Object?>[
+      resolutionPreset.index,
+      framesPerSecond,
+      videoBitrate,
+      audioBitrate,
+      enableAudio,
+    ];
+  }
+
+  static PlatformMediaSettings decode(Object result) {
+    result as List<Object?>;
+    return PlatformMediaSettings(
+      resolutionPreset: PlatformResolutionPreset.values[result[0]! as int],
+      framesPerSecond: result[1] as int?,
+      videoBitrate: result[2] as int?,
+      audioBitrate: result[3] as int?,
+      enableAudio: result[4]! as bool,
+    );
+  }
+}
+
+class PlatformPoint {
+  PlatformPoint({
+    required this.x,
+    required this.y,
+  });
+
+  double x;
+
+  double y;
+
+  Object encode() {
+    return <Object?>[
+      x,
+      y,
+    ];
+  }
+
+  static PlatformPoint decode(Object result) {
+    result as List<Object?>;
+    return PlatformPoint(
+      x: result[0]! as double,
+      y: result[1]! as double,
+    );
+  }
+}
+
 class PlatformSize {
   PlatformSize({
     required this.width,
@@ -164,6 +258,12 @@
     if (value is PlatformCameraDescription) {
       buffer.putUint8(128);
       writeValue(buffer, value.encode());
+    } else if (value is PlatformMediaSettings) {
+      buffer.putUint8(129);
+      writeValue(buffer, value.encode());
+    } else if (value is PlatformPoint) {
+      buffer.putUint8(130);
+      writeValue(buffer, value.encode());
     } else {
       super.writeValue(buffer, value);
     }
@@ -174,6 +274,10 @@
     switch (type) {
       case 128:
         return PlatformCameraDescription.decode(readValue(buffer)!);
+      case 129:
+        return PlatformMediaSettings.decode(readValue(buffer)!);
+      case 130:
+        return PlatformPoint.decode(readValue(buffer)!);
       default:
         return super.readValueOfType(type, buffer);
     }
@@ -225,6 +329,781 @@
           .cast<PlatformCameraDescription?>();
     }
   }
+
+  /// Create a new camera with the given settings, and returns its ID.
+  Future<int> create(String cameraName, PlatformMediaSettings settings) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.create$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList = await __pigeon_channel
+        .send(<Object?>[cameraName, settings]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as int?)!;
+    }
+  }
+
+  /// Initializes the camera with the given ID.
+  Future<void> initialize(
+      int cameraId, PlatformImageFormatGroup imageFormat) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList = await __pigeon_channel
+        .send(<Object?>[cameraId, imageFormat.index]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Begins streaming frames from the camera.
+  Future<void> startImageStream() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Stops streaming frames from the camera.
+  Future<void> stopImageStream() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Called by the Dart side of the plugin when it has received the last image
+  /// frame sent.
+  ///
+  /// This is used to throttle sending frames across the channel.
+  Future<void> receivedImageStreamData() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Indicates that the given camera is no longer being used on the Dart side,
+  /// and any associated resources can be cleaned up.
+  Future<void> dispose(int cameraId) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[cameraId]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Locks the camera capture to the current device orientation.
+  Future<void> lockCaptureOrientation(
+      PlatformDeviceOrientation orientation) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList = await __pigeon_channel
+        .send(<Object?>[orientation.index]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Unlocks camera capture orientation, allowing it to automatically adapt to
+  /// device orientation.
+  Future<void> unlockCaptureOrientation() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Takes a picture with the current settings, and returns the path to the
+  /// resulting file.
+  Future<String> takePicture() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as String?)!;
+    }
+  }
+
+  /// Does any preprocessing necessary before beginning to record video.
+  Future<void> prepareForVideoRecording() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Begins recording video, optionally enabling streaming to Dart at the same
+  /// time.
+  Future<void> startVideoRecording(bool enableStream) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[enableStream]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Stops recording video, and results the path to the resulting file.
+  Future<String> stopVideoRecording() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as String?)!;
+    }
+  }
+
+  /// Pauses video recording.
+  Future<void> pauseVideoRecording() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Resumes a previously paused video recording.
+  Future<void> resumeVideoRecording() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Switches the camera to the given flash mode.
+  Future<void> setFlashMode(PlatformFlashMode mode) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[mode.index]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Switches the camera to the given exposure mode.
+  Future<void> setExposureMode(PlatformExposureMode mode) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[mode.index]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Anchors auto-exposure to the given point in (0,1) coordinate space.
+  ///
+  /// A null value resets to the default exposure point.
+  Future<void> setExposurePoint(PlatformPoint? point) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[point]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Returns the minimum exposure offset supported by the camera.
+  Future<double> getMinExposureOffset() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as double?)!;
+    }
+  }
+
+  /// Returns the maximum exposure offset supported by the camera.
+  Future<double> getMaxExposureOffset() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as double?)!;
+    }
+  }
+
+  /// Sets the exposure offset manually to the given value.
+  Future<void> setExposureOffset(double offset) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[offset]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Switches the camera to the given focus mode.
+  Future<void> setFocusMode(PlatformFocusMode mode) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[mode.index]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Anchors auto-focus to the given point in (0,1) coordinate space.
+  ///
+  /// A null value resets to the default focus point.
+  Future<void> setFocusPoint(PlatformPoint? point) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[point]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Returns the minimum zoom level supported by the camera.
+  Future<double> getMinZoomLevel() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as double?)!;
+    }
+  }
+
+  /// Returns the maximum zoom level supported by the camera.
+  Future<double> getMaxZoomLevel() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else if (__pigeon_replyList[0] == null) {
+      throw PlatformException(
+        code: 'null-error',
+        message: 'Host platform returned null value for non-null return value.',
+      );
+    } else {
+      return (__pigeon_replyList[0] as double?)!;
+    }
+  }
+
+  /// Sets the zoom factor.
+  Future<void> setZoomLevel(double zoom) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[zoom]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Pauses streaming of preview frames.
+  Future<void> pausePreview() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Resumes a previously paused preview stream.
+  Future<void> resumePreview() async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(null) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Changes the camera used while recording video.
+  ///
+  /// This should only be called while video recording is active.
+  Future<void> updateDescriptionWhileRecording(String cameraName) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[cameraName]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
+
+  /// Sets the file format used for taking pictures.
+  Future<void> setImageFileFormat(PlatformImageFileFormat format) async {
+    final String __pigeon_channelName =
+        'dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat$__pigeon_messageChannelSuffix';
+    final BasicMessageChannel<Object?> __pigeon_channel =
+        BasicMessageChannel<Object?>(
+      __pigeon_channelName,
+      pigeonChannelCodec,
+      binaryMessenger: __pigeon_binaryMessenger,
+    );
+    final List<Object?>? __pigeon_replyList =
+        await __pigeon_channel.send(<Object?>[format.index]) as List<Object?>?;
+    if (__pigeon_replyList == null) {
+      throw _createConnectionError(__pigeon_channelName);
+    } else if (__pigeon_replyList.length > 1) {
+      throw PlatformException(
+        code: __pigeon_replyList[0]! as String,
+        message: __pigeon_replyList[1] as String?,
+        details: __pigeon_replyList[2],
+      );
+    } else {
+      return;
+    }
+  }
 }
 
 /// Handler for native callbacks that are not tied to a specific camera ID.
diff --git a/packages/camera/camera_avfoundation/lib/src/utils.dart b/packages/camera/camera_avfoundation/lib/src/utils.dart
index 5c38f80..3fd7f59 100644
--- a/packages/camera/camera_avfoundation/lib/src/utils.dart
+++ b/packages/camera/camera_avfoundation/lib/src/utils.dart
@@ -26,17 +26,18 @@
   };
 }
 
-/// Returns the device orientation as a String.
-String serializeDeviceOrientation(DeviceOrientation orientation) {
+/// Convents the given device orientation to Pigeon.
+PlatformDeviceOrientation serializeDeviceOrientation(
+    DeviceOrientation orientation) {
   switch (orientation) {
     case DeviceOrientation.portraitUp:
-      return 'portraitUp';
+      return PlatformDeviceOrientation.portraitUp;
     case DeviceOrientation.portraitDown:
-      return 'portraitDown';
+      return PlatformDeviceOrientation.portraitDown;
     case DeviceOrientation.landscapeRight:
-      return 'landscapeRight';
+      return PlatformDeviceOrientation.landscapeRight;
     case DeviceOrientation.landscapeLeft:
-      return 'landscapeLeft';
+      return PlatformDeviceOrientation.landscapeLeft;
   }
   // The enum comes from a different package, which could get a new value at
   // any time, so provide a fallback that ensures this won't break when used
@@ -44,7 +45,7 @@
   // the switch rather than a `default` so that the linter will flag the
   // switch as needing an update.
   // ignore: dead_code
-  return 'portraitUp';
+  return PlatformDeviceOrientation.portraitUp;
 }
 
 /// Converts a Pigeon [PlatformDeviceOrientation] to a [DeviceOrientation].
diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart
index e88b9cc..f99e03f 100644
--- a/packages/camera/camera_avfoundation/pigeons/messages.dart
+++ b/packages/camera/camera_avfoundation/pigeons/messages.dart
@@ -38,12 +38,42 @@
   locked,
 }
 
+// Pigeon version of FlashMode.
+enum PlatformFlashMode {
+  off,
+  auto,
+  always,
+  torch,
+}
+
 // Pigeon version of FocusMode.
 enum PlatformFocusMode {
   auto,
   locked,
 }
 
+/// Pigeon version of ImageFileFormat.
+enum PlatformImageFileFormat {
+  jpeg,
+  heif,
+}
+
+// Pigeon version of the subset of ImageFormatGroup supported on iOS.
+enum PlatformImageFormatGroup {
+  bgra8888,
+  yuv420,
+}
+
+// Pigeon version of ResolutionPreset.
+enum PlatformResolutionPreset {
+  low,
+  medium,
+  high,
+  veryHigh,
+  ultraHigh,
+  max,
+}
+
 // Pigeon version of CameraDescription.
 class PlatformCameraDescription {
   PlatformCameraDescription({
@@ -84,6 +114,31 @@
   final bool focusPointSupported;
 }
 
+// Pigeon version of to MediaSettings.
+class PlatformMediaSettings {
+  PlatformMediaSettings({
+    required this.resolutionPreset,
+    required this.framesPerSecond,
+    required this.videoBitrate,
+    required this.audioBitrate,
+    required this.enableAudio,
+  });
+
+  final PlatformResolutionPreset resolutionPreset;
+  final int? framesPerSecond;
+  final int? videoBitrate;
+  final int? audioBitrate;
+  final bool enableAudio;
+}
+
+// Pigeon equivalent of CGPoint.
+class PlatformPoint {
+  PlatformPoint({required this.x, required this.y});
+
+  final double x;
+  final double y;
+}
+
 // Pigeon equivalent of CGSize.
 class PlatformSize {
   PlatformSize({required this.width, required this.height});
@@ -101,6 +156,152 @@
   @async
   @ObjCSelector('availableCamerasWithCompletion')
   List<PlatformCameraDescription?> getAvailableCameras();
+
+  /// Create a new camera with the given settings, and returns its ID.
+  @async
+  @ObjCSelector('createCameraWithName:settings:')
+  int create(String cameraName, PlatformMediaSettings settings);
+
+  /// Initializes the camera with the given ID.
+  @async
+  @ObjCSelector('initializeCamera:withImageFormat:')
+  void initialize(int cameraId, PlatformImageFormatGroup imageFormat);
+
+  /// Begins streaming frames from the camera.
+  @async
+  void startImageStream();
+
+  /// Stops streaming frames from the camera.
+  @async
+  void stopImageStream();
+
+  /// Called by the Dart side of the plugin when it has received the last image
+  /// frame sent.
+  ///
+  /// This is used to throttle sending frames across the channel.
+  @async
+  void receivedImageStreamData();
+
+  /// Indicates that the given camera is no longer being used on the Dart side,
+  /// and any associated resources can be cleaned up.
+  @async
+  @ObjCSelector('disposeCamera:')
+  void dispose(int cameraId);
+
+  /// Locks the camera capture to the current device orientation.
+  @async
+  @ObjCSelector('lockCaptureOrientation:')
+  void lockCaptureOrientation(PlatformDeviceOrientation orientation);
+
+  /// Unlocks camera capture orientation, allowing it to automatically adapt to
+  /// device orientation.
+  @async
+  void unlockCaptureOrientation();
+
+  /// Takes a picture with the current settings, and returns the path to the
+  /// resulting file.
+  @async
+  String takePicture();
+
+  /// Does any preprocessing necessary before beginning to record video.
+  @async
+  void prepareForVideoRecording();
+
+  /// Begins recording video, optionally enabling streaming to Dart at the same
+  /// time.
+  @async
+  @ObjCSelector('startVideoRecordingWithStreaming:')
+  void startVideoRecording(bool enableStream);
+
+  /// Stops recording video, and results the path to the resulting file.
+  @async
+  String stopVideoRecording();
+
+  /// Pauses video recording.
+  @async
+  void pauseVideoRecording();
+
+  /// Resumes a previously paused video recording.
+  @async
+  void resumeVideoRecording();
+
+  /// Switches the camera to the given flash mode.
+  @async
+  @ObjCSelector('setFlashMode:')
+  void setFlashMode(PlatformFlashMode mode);
+
+  /// Switches the camera to the given exposure mode.
+  @async
+  @ObjCSelector('setExposureMode:')
+  void setExposureMode(PlatformExposureMode mode);
+
+  /// Anchors auto-exposure to the given point in (0,1) coordinate space.
+  ///
+  /// A null value resets to the default exposure point.
+  @async
+  @ObjCSelector('setExposurePoint:')
+  void setExposurePoint(PlatformPoint? point);
+
+  /// Returns the minimum exposure offset supported by the camera.
+  @async
+  @ObjCSelector('getMinimumExposureOffset')
+  double getMinExposureOffset();
+
+  /// Returns the maximum exposure offset supported by the camera.
+  @async
+  @ObjCSelector('getMaximumExposureOffset')
+  double getMaxExposureOffset();
+
+  /// Sets the exposure offset manually to the given value.
+  @async
+  @ObjCSelector('setExposureOffset:')
+  void setExposureOffset(double offset);
+
+  /// Switches the camera to the given focus mode.
+  @async
+  @ObjCSelector('setFocusMode:')
+  void setFocusMode(PlatformFocusMode mode);
+
+  /// Anchors auto-focus to the given point in (0,1) coordinate space.
+  ///
+  /// A null value resets to the default focus point.
+  @async
+  @ObjCSelector('setFocusPoint:')
+  void setFocusPoint(PlatformPoint? point);
+
+  /// Returns the minimum zoom level supported by the camera.
+  @async
+  @ObjCSelector('getMinimumZoomLevel')
+  double getMinZoomLevel();
+
+  /// Returns the maximum zoom level supported by the camera.
+  @async
+  @ObjCSelector('getMaximumZoomLevel')
+  double getMaxZoomLevel();
+
+  /// Sets the zoom factor.
+  @async
+  @ObjCSelector('setZoomLevel:')
+  void setZoomLevel(double zoom);
+
+  /// Pauses streaming of preview frames.
+  @async
+  void pausePreview();
+
+  /// Resumes a previously paused preview stream.
+  @async
+  void resumePreview();
+
+  /// Changes the camera used while recording video.
+  ///
+  /// This should only be called while video recording is active.
+  @async
+  void updateDescriptionWhileRecording(String cameraName);
+
+  /// Sets the file format used for taking pictures.
+  @async
+  @ObjCSelector('setImageFileFormat:')
+  void setImageFileFormat(PlatformImageFileFormat format);
 }
 
 /// Handler for native callbacks that are not tied to a specific camera ID.
diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml
index 7cec9cd..b91fe76 100644
--- a/packages/camera/camera_avfoundation/pubspec.yaml
+++ b/packages/camera/camera_avfoundation/pubspec.yaml
@@ -2,7 +2,7 @@
 description: iOS implementation of the camera plugin.
 repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.15+4
+version: 0.9.16
 
 environment:
   sdk: ^3.2.3
diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
index 5ee2dab..df04b1a 100644
--- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
+++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
@@ -17,11 +17,8 @@
 import 'package:mockito/mockito.dart';
 
 import 'avfoundation_camera_test.mocks.dart';
-import 'method_channel_mock.dart';
 
-const String _channelName = 'plugins.flutter.io/camera_avfoundation';
-
-@GenerateMocks(<Type>[CameraApi])
+@GenerateNiceMocks(<MockSpec<Object>>[MockSpec<CameraApi>()])
 void main() {
   TestWidgetsFlutterBinding.ensureInitialized();
 
@@ -33,39 +30,28 @@
   group('Creation, Initialization & Disposal Tests', () {
     test('Should send creation data and receive back a camera id', () async {
       // Arrange
-      final MethodChannelMock cameraMockChannel = MethodChannelMock(
-          channelName: _channelName,
-          methods: <String, dynamic>{
-            'create': <String, dynamic>{
-              'cameraId': 1,
-              'imageFormatGroup': 'unknown',
-            }
-          });
-      final AVFoundationCamera camera = AVFoundationCamera();
+      final MockCameraApi mockApi = MockCameraApi();
+      when(mockApi.create(any, any)).thenAnswer((_) async => 1);
+      final AVFoundationCamera camera = AVFoundationCamera(api: mockApi);
+      const String cameraName = 'Test';
 
       // Act
       final int cameraId = await camera.createCamera(
         const CameraDescription(
-            name: 'Test',
+            name: cameraName,
             lensDirection: CameraLensDirection.back,
             sensorOrientation: 0),
         ResolutionPreset.high,
       );
 
       // Assert
-      expect(cameraMockChannel.log, <Matcher>[
-        isMethodCall(
-          'create',
-          arguments: <String, Object?>{
-            'cameraName': 'Test',
-            'resolutionPreset': 'high',
-            'fps': null,
-            'videoBitrate': null,
-            'audioBitrate': null,
-            'enableAudio': false
-          },
-        ),
-      ]);
+      final VerificationResult verification =
+          verify(mockApi.create(captureAny, captureAny));
+      expect(verification.captured[0], cameraName);
+      final PlatformMediaSettings? settings =
+          verification.captured[1] as PlatformMediaSettings?;
+      expect(settings, isNotNull);
+      expect(settings?.resolutionPreset, PlatformResolutionPreset.high);
       expect(cameraId, 1);
     });
 
@@ -73,57 +59,54 @@
         'Should send creation data and receive back a camera id using createCameraWithSettings',
         () async {
       // Arrange
-      final MethodChannelMock cameraMockChannel = MethodChannelMock(
-          channelName: _channelName,
-          methods: <String, dynamic>{
-            'create': <String, dynamic>{
-              'cameraId': 1,
-              'imageFormatGroup': 'unknown',
-            }
-          });
-      final AVFoundationCamera camera = AVFoundationCamera();
+      final MockCameraApi mockApi = MockCameraApi();
+      when(mockApi.create(any, any)).thenAnswer((_) async => 1);
+      final AVFoundationCamera camera = AVFoundationCamera(api: mockApi);
+      const String cameraName = 'Test';
+      const int fps = 15;
+      const int videoBitrate = 200000;
+      const int audioBitrate = 32000;
 
       // Act
       final int cameraId = await camera.createCameraWithSettings(
         const CameraDescription(
-            name: 'Test',
+            name: cameraName,
             lensDirection: CameraLensDirection.back,
             sensorOrientation: 0),
         const MediaSettings(
           resolutionPreset: ResolutionPreset.low,
-          fps: 15,
-          videoBitrate: 200000,
-          audioBitrate: 32000,
+          fps: fps,
+          videoBitrate: videoBitrate,
+          audioBitrate: audioBitrate,
+          enableAudio: true,
         ),
       );
 
       // Assert
-      expect(cameraMockChannel.log, <Matcher>[
-        isMethodCall(
-          'create',
-          arguments: <String, Object?>{
-            'cameraName': 'Test',
-            'resolutionPreset': 'low',
-            'fps': 15,
-            'videoBitrate': 200000,
-            'audioBitrate': 32000,
-            'enableAudio': false
-          },
-        ),
-      ]);
+      final VerificationResult verification =
+          verify(mockApi.create(captureAny, captureAny));
+      expect(verification.captured[0], cameraName);
+      final PlatformMediaSettings? settings =
+          verification.captured[1] as PlatformMediaSettings?;
+      expect(settings, isNotNull);
+      expect(settings?.resolutionPreset, PlatformResolutionPreset.low);
+      expect(settings?.framesPerSecond, fps);
+      expect(settings?.videoBitrate, videoBitrate);
+      expect(settings?.audioBitrate, audioBitrate);
+      expect(settings?.enableAudio, true);
       expect(cameraId, 1);
     });
 
     test('Should throw CameraException when create throws a PlatformException',
         () {
       // Arrange
-      MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
-        'create': PlatformException(
-          code: 'TESTING_ERROR_CODE',
-          message: 'Mock error message used during testing.',
-        )
+      const String exceptionCode = 'TESTING_ERROR_CODE';
+      const String exceptionMessage = 'Mock error message used during testing.';
+      final MockCameraApi mockApi = MockCameraApi();
+      when(mockApi.create(any, any)).thenAnswer((_) async {
+        throw PlatformException(code: exceptionCode, message: exceptionMessage);
       });
-      final AVFoundationCamera camera = AVFoundationCamera();
+      final AVFoundationCamera camera = AVFoundationCamera(api: mockApi);
 
       // Act
       expect(
@@ -137,41 +120,9 @@
         ),
         throwsA(
           isA<CameraException>()
-              .having(
-                  (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+              .having((CameraException e) => e.code, 'code', exceptionCode)
               .having((CameraException e) => e.description, 'description',
-                  'Mock error message used during testing.'),
-        ),
-      );
-    });
-
-    test('Should throw CameraException when create throws a PlatformException',
-        () {
-      // Arrange
-      MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
-        'create': PlatformException(
-          code: 'TESTING_ERROR_CODE',
-          message: 'Mock error message used during testing.',
-        )
-      });
-      final AVFoundationCamera camera = AVFoundationCamera();
-
-      // Act
-      expect(
-        () => camera.createCamera(
-          const CameraDescription(
-            name: 'Test',
-            lensDirection: CameraLensDirection.back,
-            sensorOrientation: 0,
-          ),
-          ResolutionPreset.high,
-        ),
-        throwsA(
-          isA<CameraException>()
-              .having(
-                  (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
-              .having((CameraException e) => e.description, 'description',
-                  'Mock error message used during testing.'),
+                  exceptionMessage),
         ),
       );
     });
@@ -180,16 +131,15 @@
       'Should throw CameraException when initialize throws a PlatformException',
       () {
         // Arrange
-        MethodChannelMock(
-          channelName: _channelName,
-          methods: <String, dynamic>{
-            'initialize': PlatformException(
-              code: 'TESTING_ERROR_CODE',
-              message: 'Mock error message used during testing.',
-            )
-          },
-        );
-        final AVFoundationCamera camera = AVFoundationCamera();
+        const String exceptionCode = 'TESTING_ERROR_CODE';
+        const String exceptionMessage =
+            'Mock error message used during testing.';
+        final MockCameraApi mockApi = MockCameraApi();
+        when(mockApi.initialize(any, any)).thenAnswer((_) async {
+          throw PlatformException(
+              code: exceptionCode, message: exceptionMessage);
+        });
+        final AVFoundationCamera camera = AVFoundationCamera(api: mockApi);
 
         // Act
         expect(
@@ -210,16 +160,8 @@
 
     test('Should send initialization data', () async {
       // Arrange
-      final MethodChannelMock cameraMockChannel = MethodChannelMock(
-          channelName: _channelName,
-          methods: <String, dynamic>{
-            'create': <String, dynamic>{
-              'cameraId': 1,
-              'imageFormatGroup': 'unknown',
-            },
-            'initialize': null
-          });
-      final AVFoundationCamera camera = AVFoundationCamera();
+      final MockCameraApi mockApi = MockCameraApi();
+      final AVFoundationCamera camera = AVFoundationCamera(api: mockApi);
       final int cameraId = await camera.createCamera(
         const CameraDescription(
           name: 'Test',
@@ -243,30 +185,17 @@
       await initializeFuture;
 
       // Assert
-      expect(cameraId, 1);
-      expect(cameraMockChannel.log, <Matcher>[
-        anything,
-        isMethodCall(
-          'initialize',
-          arguments: <String, Object?>{
-            'cameraId': 1,
-            'imageFormatGroup': 'unknown',
-          },
-        ),
-      ]);
+      final VerificationResult verification =
+          verify(mockApi.initialize(captureAny, captureAny));
+      expect(verification.captured[0], cameraId);
+      // The default when unspecified should be bgra8888.
+      expect(verification.captured[1], PlatformImageFormatGroup.bgra8888);
     });
 
     test('Should send a disposal call on dispose', () async {
       // Arrange
-      final MethodChannelMock cameraMockChannel = MethodChannelMock(
-          channelName: _channelName,
-          methods: <String, dynamic>{
-            'create': <String, dynamic>{'cameraId': 1},
-            'initialize': null,
-            'dispose': <String, dynamic>{'cameraId': 1}
-          });
-
-      final AVFoundationCamera camera = AVFoundationCamera();
+      final MockCameraApi mockApi = MockCameraApi();
+      final AVFoundationCamera camera = AVFoundationCamera(api: mockApi);
       final int cameraId = await camera.createCamera(
         const CameraDescription(
           name: 'Test',
@@ -291,15 +220,9 @@
       await camera.dispose(cameraId);
 
       // Assert
-      expect(cameraId, 1);
-      expect(cameraMockChannel.log, <Matcher>[
-        anything,
-        anything,
-        isMethodCall(
-          'dispose',
-          arguments: <String, Object?>{'cameraId': 1},
-        ),
-      ]);
+      final VerificationResult verification =
+          verify(mockApi.dispose(captureAny));
+      expect(verification.captured[0], cameraId);
     });
   });
 
@@ -307,14 +230,9 @@
     late AVFoundationCamera camera;
     late int cameraId;
     setUp(() async {
-      MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{
-          'create': <String, dynamic>{'cameraId': 1},
-          'initialize': null
-        },
-      );
-      camera = AVFoundationCamera();
+      final MockCameraApi mockApi = MockCameraApi();
+      when(mockApi.create(any, any)).thenAnswer((_) async => 1);
+      camera = AVFoundationCamera(api: mockApi);
       cameraId = await camera.createCamera(
         const CameraDescription(
           name: 'Test',
@@ -424,13 +342,7 @@
 
     setUp(() async {
       mockApi = MockCameraApi();
-      MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{
-          'create': <String, dynamic>{'cameraId': 1},
-          'initialize': null
-        },
-      );
+      when(mockApi.create(any, any)).thenAnswer((_) async => 1);
       camera = AVFoundationCamera(api: mockApi);
       cameraId = await camera.createCamera(
         const CameraDescription(
@@ -498,634 +410,300 @@
     });
 
     test('Should take a picture and return an XFile instance', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-          channelName: _channelName,
-          methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+      const String stubPath = '/test/path.jpg';
+      when(mockApi.takePicture()).thenAnswer((_) async => stubPath);
 
-      // Act
       final XFile file = await camera.takePicture(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('takePicture', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
-      expect(file.path, '/test/path.jpg');
+      expect(file.path, stubPath);
     });
 
     test('Should prepare for video recording', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'prepareForVideoRecording': null},
-      );
-
-      // Act
       await camera.prepareForVideoRecording();
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('prepareForVideoRecording', arguments: null),
-      ]);
+      verify(mockApi.prepareForVideoRecording());
     });
 
     test('Should start recording a video', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'startVideoRecording': null},
-      );
-
-      // Act
       await camera.startVideoRecording(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('startVideoRecording', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'maxVideoDuration': null,
-          'enableStream': false,
-        }),
-      ]);
-    });
-
-    test('Should pass maxVideoDuration when starting recording a video',
-        () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'startVideoRecording': null},
-      );
-
-      // Act
-      await camera.startVideoRecording(
-        cameraId,
-        maxVideoDuration: const Duration(seconds: 10),
-      );
-
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('startVideoRecording', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'maxVideoDuration': 10000,
-          'enableStream': false,
-        }),
-      ]);
+      verify(mockApi.startVideoRecording(any));
     });
 
     test(
         'Should pass enableStream if callback is passed when starting recording a video',
         () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'startVideoRecording': null},
-      );
-
-      // Act
       await camera.startVideoCapturing(VideoCaptureOptions(cameraId,
           streamCallback: (CameraImageData imageData) {}));
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('startVideoRecording', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'maxVideoDuration': null,
-          'enableStream': true,
-        }),
-      ]);
+      verify(mockApi.startVideoRecording(true));
     });
 
     test('Should stop a video recording and return the file', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
-      );
+      const String stubPath = '/test/path.mp4';
+      when(mockApi.stopVideoRecording()).thenAnswer((_) async => stubPath);
 
-      // Act
       final XFile file = await camera.stopVideoRecording(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('stopVideoRecording', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
-      expect(file.path, '/test/path.mp4');
+      expect(file.path, stubPath);
     });
 
     test('Should pause a video recording', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'pauseVideoRecording': null},
-      );
-
-      // Act
       await camera.pauseVideoRecording(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('pauseVideoRecording', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      verify(mockApi.pauseVideoRecording());
     });
 
     test('Should resume a video recording', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'resumeVideoRecording': null},
-      );
-
-      // Act
       await camera.resumeVideoRecording(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('resumeVideoRecording', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      verify(mockApi.resumeVideoRecording());
     });
 
     test('Should set the description while recording', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setDescriptionWhileRecording': null},
-      );
       const CameraDescription camera2Description = CameraDescription(
           name: 'Test2',
           lensDirection: CameraLensDirection.front,
           sensorOrientation: 0);
 
-      // Act
       await camera.setDescriptionWhileRecording(camera2Description);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setDescriptionWhileRecording',
-            arguments: <String, Object?>{
-              'cameraName': camera2Description.name,
-            }),
-      ]);
+      verify(mockApi.updateDescriptionWhileRecording(camera2Description.name));
     });
 
-    test('Should set the flash mode', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setFlashMode': null},
-      );
-
-      // Act
+    test('Should set the flash mode to torch', () async {
       await camera.setFlashMode(cameraId, FlashMode.torch);
+
+      verify(mockApi.setFlashMode(PlatformFlashMode.torch));
+    });
+
+    test('Should set the flash mode to always', () async {
       await camera.setFlashMode(cameraId, FlashMode.always);
+
+      verify(mockApi.setFlashMode(PlatformFlashMode.always));
+    });
+
+    test('Should set the flash mode to auto', () async {
       await camera.setFlashMode(cameraId, FlashMode.auto);
+
+      verify(mockApi.setFlashMode(PlatformFlashMode.auto));
+    });
+
+    test('Should set the flash mode to off', () async {
       await camera.setFlashMode(cameraId, FlashMode.off);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setFlashMode', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'mode': 'torch'
-        }),
-        isMethodCall('setFlashMode', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'mode': 'always'
-        }),
-        isMethodCall('setFlashMode',
-            arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
-        isMethodCall('setFlashMode',
-            arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'off'}),
-      ]);
+      verify(mockApi.setFlashMode(PlatformFlashMode.off));
     });
 
-    test('Should set the exposure mode', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setExposureMode': null},
-      );
-
-      // Act
+    test('Should set the exposure mode to auto', () async {
       await camera.setExposureMode(cameraId, ExposureMode.auto);
+
+      verify(mockApi.setExposureMode(PlatformExposureMode.auto));
+    });
+
+    test('Should set the exposure mode to locked', () async {
       await camera.setExposureMode(cameraId, ExposureMode.locked);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setExposureMode',
-            arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
-        isMethodCall('setExposureMode', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'mode': 'locked'
-        }),
-      ]);
+      verify(mockApi.setExposureMode(PlatformExposureMode.locked));
     });
 
-    test('Should set the exposure point', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setExposurePoint': null},
-      );
+    test('Should set the exposure point to a value', () async {
+      const Point<double> point = Point<double>(0.4, 0.6);
+      await camera.setExposurePoint(cameraId, point);
 
-      // Act
-      await camera.setExposurePoint(cameraId, const Point<double>(0.5, 0.5));
+      final VerificationResult verification =
+          verify(mockApi.setExposurePoint(captureAny));
+      final PlatformPoint? passedPoint =
+          verification.captured[0] as PlatformPoint?;
+      expect(passedPoint?.x, point.x);
+      expect(passedPoint?.y, point.y);
+    });
+
+    test('Should set the exposure point to null for reset', () async {
       await camera.setExposurePoint(cameraId, null);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setExposurePoint', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'x': 0.5,
-          'y': 0.5,
-          'reset': false
-        }),
-        isMethodCall('setExposurePoint', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'x': null,
-          'y': null,
-          'reset': true
-        }),
-      ]);
+      final VerificationResult verification =
+          verify(mockApi.setExposurePoint(captureAny));
+      final PlatformPoint? passedPoint =
+          verification.captured[0] as PlatformPoint?;
+      expect(passedPoint, null);
     });
 
     test('Should get the min exposure offset', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'getMinExposureOffset': 2.0},
-      );
+      const double stubMinOffset = 2.0;
+      when(mockApi.getMinExposureOffset())
+          .thenAnswer((_) async => stubMinOffset);
 
-      // Act
       final double minExposureOffset =
           await camera.getMinExposureOffset(cameraId);
 
-      // Assert
-      expect(minExposureOffset, 2.0);
-      expect(channel.log, <Matcher>[
-        isMethodCall('getMinExposureOffset', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      expect(minExposureOffset, stubMinOffset);
     });
 
     test('Should get the max exposure offset', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'getMaxExposureOffset': 2.0},
-      );
+      const double stubMaxOffset = 2.0;
+      when(mockApi.getMaxExposureOffset())
+          .thenAnswer((_) async => stubMaxOffset);
 
-      // Act
       final double maxExposureOffset =
           await camera.getMaxExposureOffset(cameraId);
 
-      // Assert
-      expect(maxExposureOffset, 2.0);
-      expect(channel.log, <Matcher>[
-        isMethodCall('getMaxExposureOffset', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      expect(maxExposureOffset, stubMaxOffset);
     });
 
-    test('Should get the exposure offset step size', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'getExposureOffsetStepSize': 0.25},
-      );
-
-      // Act
+    test('Exposure offset step size should always return zero', () async {
       final double stepSize = await camera.getExposureOffsetStepSize(cameraId);
 
-      // Assert
-      expect(stepSize, 0.25);
-      expect(channel.log, <Matcher>[
-        isMethodCall('getExposureOffsetStepSize', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      expect(stepSize, 0.0);
     });
 
     test('Should set the exposure offset', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setExposureOffset': 0.6},
-      );
-
-      // Act
+      const double stubOffset = 0.5;
       final double actualOffset = await camera.setExposureOffset(cameraId, 0.5);
 
-      // Assert
-      expect(actualOffset, 0.6);
-      expect(channel.log, <Matcher>[
-        isMethodCall('setExposureOffset', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'offset': 0.5,
-        }),
-      ]);
+      verify(mockApi.setExposureOffset(stubOffset));
+      // iOS never adjusts the offset.
+      expect(actualOffset, stubOffset);
     });
 
-    test('Should set the focus mode', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setFocusMode': null},
-      );
-
-      // Act
+    test('Should set the focus mode to auto', () async {
       await camera.setFocusMode(cameraId, FocusMode.auto);
+
+      verify(mockApi.setFocusMode(PlatformFocusMode.auto));
+    });
+
+    test('Should set the focus mode to locked', () async {
       await camera.setFocusMode(cameraId, FocusMode.locked);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setFocusMode',
-            arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
-        isMethodCall('setFocusMode', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'mode': 'locked'
-        }),
-      ]);
+      verify(mockApi.setFocusMode(PlatformFocusMode.locked));
     });
 
-    test('Should set the exposure point', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setFocusPoint': null},
-      );
+    test('Should set the focus point to a value', () async {
+      const Point<double> point = Point<double>(0.4, 0.6);
+      await camera.setFocusPoint(cameraId, point);
 
-      // Act
-      await camera.setFocusPoint(cameraId, const Point<double>(0.5, 0.5));
+      final VerificationResult verification =
+          verify(mockApi.setFocusPoint(captureAny));
+      final PlatformPoint? passedPoint =
+          verification.captured[0] as PlatformPoint?;
+      expect(passedPoint?.x, point.x);
+      expect(passedPoint?.y, point.y);
+    });
+
+    test('Should set the focus point to null for reset', () async {
       await camera.setFocusPoint(cameraId, null);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setFocusPoint', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'x': 0.5,
-          'y': 0.5,
-          'reset': false
-        }),
-        isMethodCall('setFocusPoint', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'x': null,
-          'y': null,
-          'reset': true
-        }),
-      ]);
+      final VerificationResult verification =
+          verify(mockApi.setFocusPoint(captureAny));
+      final PlatformPoint? passedPoint =
+          verification.captured[0] as PlatformPoint?;
+      expect(passedPoint, null);
     });
 
     test('Should build a texture widget as preview widget', () async {
-      // Act
       final Widget widget = camera.buildPreview(cameraId);
 
-      // Act
       expect(widget is Texture, isTrue);
       expect((widget as Texture).textureId, cameraId);
     });
 
     test('Should get the max zoom level', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'getMaxZoomLevel': 10.0},
-      );
+      const double stubZoomLevel = 10.0;
+      when(mockApi.getMaxZoomLevel()).thenAnswer((_) async => stubZoomLevel);
 
-      // Act
       final double maxZoomLevel = await camera.getMaxZoomLevel(cameraId);
 
-      // Assert
-      expect(maxZoomLevel, 10.0);
-      expect(channel.log, <Matcher>[
-        isMethodCall('getMaxZoomLevel', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      expect(maxZoomLevel, stubZoomLevel);
     });
 
     test('Should get the min zoom level', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'getMinZoomLevel': 1.0},
-      );
+      const double stubZoomLevel = 10.0;
+      when(mockApi.getMinZoomLevel()).thenAnswer((_) async => stubZoomLevel);
 
-      // Act
-      final double maxZoomLevel = await camera.getMinZoomLevel(cameraId);
+      final double minZoomLevel = await camera.getMinZoomLevel(cameraId);
 
-      // Assert
-      expect(maxZoomLevel, 1.0);
-      expect(channel.log, <Matcher>[
-        isMethodCall('getMinZoomLevel', arguments: <String, Object?>{
-          'cameraId': cameraId,
-        }),
-      ]);
+      expect(minZoomLevel, stubZoomLevel);
     });
 
     test('Should set the zoom level', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setZoomLevel': null},
-      );
+      const double zoom = 2.0;
 
-      // Act
-      await camera.setZoomLevel(cameraId, 2.0);
+      await camera.setZoomLevel(cameraId, zoom);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setZoomLevel',
-            arguments: <String, Object?>{'cameraId': cameraId, 'zoom': 2.0}),
-      ]);
+      verify(mockApi.setZoomLevel(zoom));
     });
 
     test('Should throw CameraException when illegal zoom level is supplied',
         () async {
-      // Arrange
-      MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{
-          'setZoomLevel': PlatformException(
-            code: 'ZOOM_ERROR',
-            message: 'Illegal zoom error',
-          )
-        },
-      );
+      const String code = 'ZOOM_ERROR';
+      const String message = 'Illegal zoom error';
+      when(mockApi.setZoomLevel(any)).thenAnswer(
+          (_) async => throw PlatformException(code: code, message: message));
 
-      // Act & assert
       expect(
           () => camera.setZoomLevel(cameraId, -1.0),
           throwsA(isA<CameraException>()
-              .having((CameraException e) => e.code, 'code', 'ZOOM_ERROR')
+              .having((CameraException e) => e.code, 'code', code)
               .having((CameraException e) => e.description, 'description',
-                  'Illegal zoom error')));
+                  message)));
     });
 
     test('Should lock the capture orientation', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'lockCaptureOrientation': null},
-      );
-
-      // Act
       await camera.lockCaptureOrientation(
           cameraId, DeviceOrientation.portraitUp);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('lockCaptureOrientation', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'orientation': 'portraitUp'
-        }),
-      ]);
+      verify(
+          mockApi.lockCaptureOrientation(PlatformDeviceOrientation.portraitUp));
     });
 
     test('Should unlock the capture orientation', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'unlockCaptureOrientation': null},
-      );
-
-      // Act
       await camera.unlockCaptureOrientation(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('unlockCaptureOrientation',
-            arguments: <String, Object?>{'cameraId': cameraId}),
-      ]);
+      verify(mockApi.unlockCaptureOrientation());
     });
 
     test('Should pause the camera preview', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'pausePreview': null},
-      );
-
-      // Act
       await camera.pausePreview(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('pausePreview',
-            arguments: <String, Object?>{'cameraId': cameraId}),
-      ]);
+      verify(mockApi.pausePreview());
     });
 
     test('Should resume the camera preview', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'resumePreview': null},
-      );
-
-      // Act
       await camera.resumePreview(cameraId);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('resumePreview',
-            arguments: <String, Object?>{'cameraId': cameraId}),
-      ]);
+      verify(mockApi.resumePreview());
     });
 
     test('Should start streaming', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{
-          'startImageStream': null,
-          'stopImageStream': null,
-        },
-      );
-
-      // Act
       final StreamSubscription<CameraImageData> subscription = camera
           .onStreamedFrameAvailable(cameraId)
           .listen((CameraImageData imageData) {});
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('startImageStream', arguments: null),
-      ]);
+      verify(mockApi.startImageStream());
 
       await subscription.cancel();
     });
 
     test('Should stop streaming', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{
-          'startImageStream': null,
-          'stopImageStream': null,
-        },
-      );
-
-      // Act
       final StreamSubscription<CameraImageData> subscription = camera
           .onStreamedFrameAvailable(cameraId)
           .listen((CameraImageData imageData) {});
       await subscription.cancel();
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('startImageStream', arguments: null),
-        isMethodCall('stopImageStream', arguments: null),
-      ]);
+      verify(mockApi.startImageStream());
+      verify(mockApi.stopImageStream());
     });
 
     test('Should set the ImageFileFormat to heif', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setImageFileFormat': 'heif'},
-      );
-
-      // Act
       await camera.setImageFileFormat(cameraId, ImageFileFormat.heif);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setImageFileFormat', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'fileFormat': 'heif',
-        }),
-      ]);
+      verify(mockApi.setImageFileFormat(PlatformImageFileFormat.heif));
     });
 
     test('Should set the ImageFileFormat to jpeg', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{
-          'setImageFileFormat': 'jpeg',
-        },
-      );
-
-      // Act
       await camera.setImageFileFormat(cameraId, ImageFileFormat.jpeg);
 
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setImageFileFormat', arguments: <String, Object?>{
-          'cameraId': cameraId,
-          'fileFormat': 'jpeg',
-        }),
-      ]);
+      verify(mockApi.setImageFileFormat(PlatformImageFileFormat.jpeg));
     });
   });
 }
diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart
index 066815c..729b39f 100644
--- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart
+++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart
@@ -7,6 +7,7 @@
 
 import 'package:camera_avfoundation/src/messages.g.dart' as _i2;
 import 'package:mockito/mockito.dart' as _i1;
+import 'package:mockito/src/dummies.dart' as _i4;
 
 // ignore_for_file: type=lint
 // ignore_for_file: avoid_redundant_argument_values
@@ -25,10 +26,6 @@
 ///
 /// See the documentation for Mockito's code generation for more information.
 class MockCameraApi extends _i1.Mock implements _i2.CameraApi {
-  MockCameraApi() {
-    _i1.throwOnMissingStub(this);
-  }
-
   @override
   _i3.Future<List<_i2.PlatformCameraDescription?>> getAvailableCameras() =>
       (super.noSuchMethod(
@@ -38,5 +35,348 @@
         ),
         returnValue: _i3.Future<List<_i2.PlatformCameraDescription?>>.value(
             <_i2.PlatformCameraDescription?>[]),
+        returnValueForMissingStub:
+            _i3.Future<List<_i2.PlatformCameraDescription?>>.value(
+                <_i2.PlatformCameraDescription?>[]),
       ) as _i3.Future<List<_i2.PlatformCameraDescription?>>);
+
+  @override
+  _i3.Future<int> create(
+    String? cameraName,
+    _i2.PlatformMediaSettings? settings,
+  ) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #create,
+          [
+            cameraName,
+            settings,
+          ],
+        ),
+        returnValue: _i3.Future<int>.value(0),
+        returnValueForMissingStub: _i3.Future<int>.value(0),
+      ) as _i3.Future<int>);
+
+  @override
+  _i3.Future<void> initialize(
+    int? cameraId,
+    _i2.PlatformImageFormatGroup? imageFormat,
+  ) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #initialize,
+          [
+            cameraId,
+            imageFormat,
+          ],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> startImageStream() => (super.noSuchMethod(
+        Invocation.method(
+          #startImageStream,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> stopImageStream() => (super.noSuchMethod(
+        Invocation.method(
+          #stopImageStream,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> receivedImageStreamData() => (super.noSuchMethod(
+        Invocation.method(
+          #receivedImageStreamData,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> dispose(int? cameraId) => (super.noSuchMethod(
+        Invocation.method(
+          #dispose,
+          [cameraId],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> lockCaptureOrientation(
+          _i2.PlatformDeviceOrientation? orientation) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #lockCaptureOrientation,
+          [orientation],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> unlockCaptureOrientation() => (super.noSuchMethod(
+        Invocation.method(
+          #unlockCaptureOrientation,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<String> takePicture() => (super.noSuchMethod(
+        Invocation.method(
+          #takePicture,
+          [],
+        ),
+        returnValue: _i3.Future<String>.value(_i4.dummyValue<String>(
+          this,
+          Invocation.method(
+            #takePicture,
+            [],
+          ),
+        )),
+        returnValueForMissingStub:
+            _i3.Future<String>.value(_i4.dummyValue<String>(
+          this,
+          Invocation.method(
+            #takePicture,
+            [],
+          ),
+        )),
+      ) as _i3.Future<String>);
+
+  @override
+  _i3.Future<void> prepareForVideoRecording() => (super.noSuchMethod(
+        Invocation.method(
+          #prepareForVideoRecording,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> startVideoRecording(bool? enableStream) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #startVideoRecording,
+          [enableStream],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<String> stopVideoRecording() => (super.noSuchMethod(
+        Invocation.method(
+          #stopVideoRecording,
+          [],
+        ),
+        returnValue: _i3.Future<String>.value(_i4.dummyValue<String>(
+          this,
+          Invocation.method(
+            #stopVideoRecording,
+            [],
+          ),
+        )),
+        returnValueForMissingStub:
+            _i3.Future<String>.value(_i4.dummyValue<String>(
+          this,
+          Invocation.method(
+            #stopVideoRecording,
+            [],
+          ),
+        )),
+      ) as _i3.Future<String>);
+
+  @override
+  _i3.Future<void> pauseVideoRecording() => (super.noSuchMethod(
+        Invocation.method(
+          #pauseVideoRecording,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> resumeVideoRecording() => (super.noSuchMethod(
+        Invocation.method(
+          #resumeVideoRecording,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> setFlashMode(_i2.PlatformFlashMode? mode) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #setFlashMode,
+          [mode],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> setExposureMode(_i2.PlatformExposureMode? mode) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #setExposureMode,
+          [mode],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> setExposurePoint(_i2.PlatformPoint? point) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #setExposurePoint,
+          [point],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<double> getMinExposureOffset() => (super.noSuchMethod(
+        Invocation.method(
+          #getMinExposureOffset,
+          [],
+        ),
+        returnValue: _i3.Future<double>.value(0.0),
+        returnValueForMissingStub: _i3.Future<double>.value(0.0),
+      ) as _i3.Future<double>);
+
+  @override
+  _i3.Future<double> getMaxExposureOffset() => (super.noSuchMethod(
+        Invocation.method(
+          #getMaxExposureOffset,
+          [],
+        ),
+        returnValue: _i3.Future<double>.value(0.0),
+        returnValueForMissingStub: _i3.Future<double>.value(0.0),
+      ) as _i3.Future<double>);
+
+  @override
+  _i3.Future<void> setExposureOffset(double? offset) => (super.noSuchMethod(
+        Invocation.method(
+          #setExposureOffset,
+          [offset],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> setFocusMode(_i2.PlatformFocusMode? mode) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #setFocusMode,
+          [mode],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> setFocusPoint(_i2.PlatformPoint? point) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #setFocusPoint,
+          [point],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<double> getMinZoomLevel() => (super.noSuchMethod(
+        Invocation.method(
+          #getMinZoomLevel,
+          [],
+        ),
+        returnValue: _i3.Future<double>.value(0.0),
+        returnValueForMissingStub: _i3.Future<double>.value(0.0),
+      ) as _i3.Future<double>);
+
+  @override
+  _i3.Future<double> getMaxZoomLevel() => (super.noSuchMethod(
+        Invocation.method(
+          #getMaxZoomLevel,
+          [],
+        ),
+        returnValue: _i3.Future<double>.value(0.0),
+        returnValueForMissingStub: _i3.Future<double>.value(0.0),
+      ) as _i3.Future<double>);
+
+  @override
+  _i3.Future<void> setZoomLevel(double? zoom) => (super.noSuchMethod(
+        Invocation.method(
+          #setZoomLevel,
+          [zoom],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> pausePreview() => (super.noSuchMethod(
+        Invocation.method(
+          #pausePreview,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> resumePreview() => (super.noSuchMethod(
+        Invocation.method(
+          #resumePreview,
+          [],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> updateDescriptionWhileRecording(String? cameraName) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #updateDescriptionWhileRecording,
+          [cameraName],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
+
+  @override
+  _i3.Future<void> setImageFileFormat(_i2.PlatformImageFileFormat? format) =>
+      (super.noSuchMethod(
+        Invocation.method(
+          #setImageFileFormat,
+          [format],
+        ),
+        returnValue: _i3.Future<void>.value(),
+        returnValueForMissingStub: _i3.Future<void>.value(),
+      ) as _i3.Future<void>);
 }
diff --git a/packages/camera/camera_avfoundation/test/method_channel_mock.dart b/packages/camera/camera_avfoundation/test/method_channel_mock.dart
deleted file mode 100644
index a7362d0..0000000
--- a/packages/camera/camera_avfoundation/test/method_channel_mock.dart
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-import 'package:flutter/services.dart';
-import 'package:flutter_test/flutter_test.dart';
-
-class MethodChannelMock {
-  MethodChannelMock({
-    required String channelName,
-    this.delay,
-    required this.methods,
-  }) : methodChannel = MethodChannel(channelName) {
-    TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger
-        .setMockMethodCallHandler(methodChannel, _handler);
-  }
-
-  final Duration? delay;
-  final MethodChannel methodChannel;
-  final Map<String, dynamic> methods;
-  final List<MethodCall> log = <MethodCall>[];
-
-  Future<dynamic> _handler(MethodCall methodCall) async {
-    log.add(methodCall);
-
-    if (!methods.containsKey(methodCall.method)) {
-      throw MissingPluginException('No implementation found for method '
-          '${methodCall.method} on channel ${methodChannel.name}');
-    }
-
-    return Future<dynamic>.delayed(delay ?? Duration.zero, () {
-      final dynamic result = methods[methodCall.method];
-      if (result is Exception) {
-        throw result;
-      }
-
-      return Future<dynamic>.value(result);
-    });
-  }
-}
diff --git a/packages/camera/camera_avfoundation/test/utils_test.dart b/packages/camera/camera_avfoundation/test/utils_test.dart
index 53fc72b..0e45e39 100644
--- a/packages/camera/camera_avfoundation/test/utils_test.dart
+++ b/packages/camera/camera_avfoundation/test/utils_test.dart
@@ -27,13 +27,13 @@
 
     test('serializeDeviceOrientation() should serialize correctly', () {
       expect(serializeDeviceOrientation(DeviceOrientation.portraitUp),
-          'portraitUp');
+          PlatformDeviceOrientation.portraitUp);
       expect(serializeDeviceOrientation(DeviceOrientation.portraitDown),
-          'portraitDown');
+          PlatformDeviceOrientation.portraitDown);
       expect(serializeDeviceOrientation(DeviceOrientation.landscapeRight),
-          'landscapeRight');
+          PlatformDeviceOrientation.landscapeRight);
       expect(serializeDeviceOrientation(DeviceOrientation.landscapeLeft),
-          'landscapeLeft');
+          PlatformDeviceOrientation.landscapeLeft);
     });
 
     test('deviceOrientationFromPlatform() should convert correctly', () {