[camera]refactor FLTCam outside of CameraPlugin (#4708)

diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md
index dd4042e..ea27720 100644
--- a/packages/camera/camera/CHANGELOG.md
+++ b/packages/camera/camera/CHANGELOG.md
@@ -1,5 +1,6 @@
 ## NEXT
 
+* Minor iOS internal code cleanup related to camera class and its delegate. 
 * Minor iOS internal code cleanup related to resolution preset, video format, focus mode, exposure mode and device orientation.
 * Minor iOS internal code cleanup related to flash mode.
 
diff --git a/packages/camera/camera/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera/example/ios/RunnerTests/CameraFocusTests.m
index fdc2be9..e0f5fda 100644
--- a/packages/camera/camera/example/ios/RunnerTests/CameraFocusTests.m
+++ b/packages/camera/camera/example/ios/RunnerTests/CameraFocusTests.m
@@ -3,25 +3,11 @@
 // found in the LICENSE file.
 
 @import camera;
+@import camera.Test;
 @import XCTest;
 @import AVFoundation;
 #import <OCMock/OCMock.h>
 
-// Mirrors FocusMode in camera.dart
-typedef enum {
-  FocusModeAuto,
-  FocusModeLocked,
-} FocusMode;
-
-@interface FLTCam : NSObject <FlutterTexture,
-                              AVCaptureVideoDataOutputSampleBufferDelegate,
-                              AVCaptureAudioDataOutputSampleBufferDelegate>
-
-- (void)applyFocusMode;
-- (void)applyFocusMode:(FocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice;
-- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
-@end
-
 @interface CameraFocusTests : XCTestCase
 @property(readonly, nonatomic) FLTCam *camera;
 @property(readonly, nonatomic) id mockDevice;
@@ -51,7 +37,7 @@
   [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus];
 
   // Run test
-  [_camera applyFocusMode:FocusModeAuto onDevice:_mockDevice];
+  [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice];
 
   // Expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus
   OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]);
@@ -68,7 +54,7 @@
   [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
 
   // Run test
-  [_camera applyFocusMode:FocusModeAuto onDevice:_mockDevice];
+  [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice];
 
   // Expect setFocusMode:AVCaptureFocusModeAutoFocus
   OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]);
@@ -86,7 +72,7 @@
   [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus];
 
   // Run test
-  [_camera applyFocusMode:FocusModeAuto onDevice:_mockDevice];
+  [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice];
 }
 
 - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus {
@@ -99,7 +85,7 @@
   [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
 
   // Run test
-  [_camera applyFocusMode:FocusModeLocked onDevice:_mockDevice];
+  [_camera applyFocusMode:FLTFocusModeLocked onDevice:_mockDevice];
 
   // Expect setFocusMode:AVCaptureFocusModeAutoFocus
   OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]);
@@ -116,7 +102,7 @@
   [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus];
 
   // Run test
-  [_camera applyFocusMode:FocusModeLocked onDevice:_mockDevice];
+  [_camera applyFocusMode:FLTFocusModeLocked onDevice:_mockDevice];
 }
 
 - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest {
diff --git a/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m
index eb6c007..2c1adbe 100644
--- a/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m
+++ b/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m
@@ -3,21 +3,12 @@
 // found in the LICENSE file.
 
 @import camera;
+@import camera.Test;
 @import XCTest;
 @import AVFoundation;
 #import <OCMock/OCMock.h>
 #import "MockFLTThreadSafeFlutterResult.h"
 
-@interface FLTCam : NSObject <FlutterTexture,
-                              AVCaptureVideoDataOutputSampleBufferDelegate,
-                              AVCaptureAudioDataOutputSampleBufferDelegate>
-@property(assign, nonatomic) BOOL isPreviewPaused;
-
-- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
-
-- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
-@end
-
 @interface CameraPreviewPauseTests : XCTestCase
 @end
 
diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m
index ae55188..a0adb70 100644
--- a/packages/camera/camera/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera/ios/Classes/CameraPlugin.m
@@ -5,1121 +5,15 @@
 #import "CameraPlugin.h"
 #import "CameraPlugin_Test.h"
 
-#import <AVFoundation/AVFoundation.h>
-#import <Accelerate/Accelerate.h>
-#import <CoreMotion/CoreMotion.h>
-#import <libkern/OSAtomic.h>
-#import <uuid/uuid.h>
+@import AVFoundation;
+
 #import "CameraProperties.h"
+#import "FLTCam.h"
 #import "FLTThreadSafeEventChannel.h"
 #import "FLTThreadSafeFlutterResult.h"
 #import "FLTThreadSafeMethodChannel.h"
 #import "FLTThreadSafeTextureRegistry.h"
 
-@interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
-@property(readonly, nonatomic) NSString *path;
-@property(readonly, nonatomic) FLTThreadSafeFlutterResult *result;
-@end
-
-@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
-// The queue on which `eventSink` property should be accessed
-@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
-// `eventSink` property should be accessed on `captureSessionQueue`.
-// The block itself should be invoked on the main queue.
-@property FlutterEventSink eventSink;
-@end
-
-@implementation FLTImageStreamHandler
-
-- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
-  self = [super init];
-  NSAssert(self, @"super init cannot be nil");
-  _captureSessionQueue = captureSessionQueue;
-  return self;
-}
-
-- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments {
-  dispatch_async(self.captureSessionQueue, ^{
-    self.eventSink = nil;
-  });
-  return nil;
-}
-
-- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments
-                                       eventSink:(nonnull FlutterEventSink)events {
-  dispatch_async(self.captureSessionQueue, ^{
-    self.eventSink = events;
-  });
-  return nil;
-}
-@end
-
-@implementation FLTSavePhotoDelegate {
-  /// Used to keep the delegate alive until didFinishProcessingPhotoSampleBuffer.
-  FLTSavePhotoDelegate *selfReference;
-}
-
-- initWithPath:(NSString *)path result:(FLTThreadSafeFlutterResult *)result {
-  self = [super init];
-  NSAssert(self, @"super init cannot be nil");
-  _path = path;
-  selfReference = self;
-  _result = result;
-  return self;
-}
-
-- (void)captureOutput:(AVCapturePhotoOutput *)output
-    didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
-                previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
-                        resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
-                         bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
-                                   error:(NSError *)error API_AVAILABLE(ios(10)) {
-  selfReference = nil;
-  if (error) {
-    [_result sendError:error];
-    return;
-  }
-
-  NSData *data = [AVCapturePhotoOutput
-      JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer
-                            previewPhotoSampleBuffer:previewPhotoSampleBuffer];
-
-  // TODO(sigurdm): Consider writing file asynchronously.
-  bool success = [data writeToFile:_path atomically:YES];
-
-  if (!success) {
-    [_result sendErrorWithCode:@"IOError" message:@"Unable to write file" details:nil];
-    return;
-  }
-  [_result sendSuccessWithData:_path];
-}
-
-- (void)captureOutput:(AVCapturePhotoOutput *)output
-    didFinishProcessingPhoto:(AVCapturePhoto *)photo
-                       error:(NSError *)error API_AVAILABLE(ios(11.0)) {
-  selfReference = nil;
-  if (error) {
-    [_result sendError:error];
-    return;
-  }
-
-  NSData *photoData = [photo fileDataRepresentation];
-
-  bool success = [photoData writeToFile:_path atomically:YES];
-  if (!success) {
-    [_result sendErrorWithCode:@"IOError" message:@"Unable to write file" details:nil];
-    return;
-  }
-  [_result sendSuccessWithData:_path];
-}
-@end
-
-@interface FLTCam : NSObject <FlutterTexture,
-                              AVCaptureVideoDataOutputSampleBufferDelegate,
-                              AVCaptureAudioDataOutputSampleBufferDelegate>
-@property(readonly, nonatomic) int64_t textureId;
-@property(nonatomic, copy) void (^onFrameAvailable)(void);
-@property BOOL enableAudio;
-@property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
-@property(nonatomic) FLTThreadSafeMethodChannel *methodChannel;
-@property(readonly, nonatomic) AVCaptureSession *captureSession;
-@property(readonly, nonatomic) AVCaptureDevice *captureDevice;
-@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
-@property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
-@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
-@property(readonly) CVPixelBufferRef volatile latestPixelBuffer;
-@property(readonly, nonatomic) CGSize previewSize;
-@property(readonly, nonatomic) CGSize captureSize;
-@property(strong, nonatomic) AVAssetWriter *videoWriter;
-@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput;
-@property(strong, nonatomic) AVAssetWriterInput *audioWriterInput;
-@property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
-@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
-@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
-@property(strong, nonatomic) NSString *videoRecordingPath;
-@property(assign, nonatomic) BOOL isRecording;
-@property(assign, nonatomic) BOOL isRecordingPaused;
-@property(assign, nonatomic) BOOL videoIsDisconnected;
-@property(assign, nonatomic) BOOL audioIsDisconnected;
-@property(assign, nonatomic) BOOL isAudioSetup;
-@property(assign, nonatomic) BOOL isStreamingImages;
-@property(assign, nonatomic) BOOL isPreviewPaused;
-@property(assign, nonatomic) FLTResolutionPreset resolutionPreset;
-@property(assign, nonatomic) FLTExposureMode exposureMode;
-@property(assign, nonatomic) FLTFocusMode focusMode;
-@property(assign, nonatomic) FLTFlashMode flashMode;
-@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
-@property(assign, nonatomic) CMTime lastVideoSampleTime;
-@property(assign, nonatomic) CMTime lastAudioSampleTime;
-@property(assign, nonatomic) CMTime videoTimeOffset;
-@property(assign, nonatomic) CMTime audioTimeOffset;
-// Format used for video and image streaming.
-@property(assign, nonatomic) FourCharCode videoFormat;
-@property(nonatomic) CMMotionManager *motionManager;
-@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
-@end
-
-@implementation FLTCam {
-  // All FLTCam's state access and capture session related operations should be on run on this
-  // queue.
-  dispatch_queue_t _captureSessionQueue;
-  UIDeviceOrientation _deviceOrientation;
-}
-NSString *const errorMethod = @"error";
-
-- (instancetype)initWithCameraName:(NSString *)cameraName
-                  resolutionPreset:(NSString *)resolutionPreset
-                       enableAudio:(BOOL)enableAudio
-                       orientation:(UIDeviceOrientation)orientation
-               captureSessionQueue:(dispatch_queue_t)captureSessionQueue
-                             error:(NSError **)error {
-  self = [super init];
-  NSAssert(self, @"super init cannot be nil");
-  @try {
-    _resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
-  } @catch (NSError *e) {
-    *error = e;
-  }
-  _enableAudio = enableAudio;
-  _captureSessionQueue = captureSessionQueue;
-  _captureSession = [[AVCaptureSession alloc] init];
-  _captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
-  _flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
-  _exposureMode = FLTExposureModeAuto;
-  _focusMode = FLTFocusModeAuto;
-  _lockedCaptureOrientation = UIDeviceOrientationUnknown;
-  _deviceOrientation = orientation;
-  _videoFormat = kCVPixelFormatType_32BGRA;
-
-  NSError *localError = nil;
-  _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
-                                                             error:&localError];
-
-  if (localError) {
-    *error = localError;
-    return nil;
-  }
-
-  _captureVideoOutput = [AVCaptureVideoDataOutput new];
-  _captureVideoOutput.videoSettings =
-      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)};
-  [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
-  [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
-
-  AVCaptureConnection *connection =
-      [AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports
-                                             output:_captureVideoOutput];
-
-  if ([_captureDevice position] == AVCaptureDevicePositionFront) {
-    connection.videoMirrored = YES;
-  }
-
-  [_captureSession addInputWithNoConnections:_captureVideoInput];
-  [_captureSession addOutputWithNoConnections:_captureVideoOutput];
-  [_captureSession addConnection:connection];
-
-  if (@available(iOS 10.0, *)) {
-    _capturePhotoOutput = [AVCapturePhotoOutput new];
-    [_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
-    [_captureSession addOutput:_capturePhotoOutput];
-  }
-  _motionManager = [[CMMotionManager alloc] init];
-  [_motionManager startAccelerometerUpdates];
-
-  [self setCaptureSessionPreset:_resolutionPreset];
-  [self updateOrientation];
-
-  return self;
-}
-
-- (void)start {
-  [_captureSession startRunning];
-}
-
-- (void)stop {
-  [_captureSession stopRunning];
-}
-
-- (void)setVideoFormat:(OSType)videoFormat {
-  _videoFormat = videoFormat;
-  _captureVideoOutput.videoSettings =
-      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
-}
-
-- (void)setDeviceOrientation:(UIDeviceOrientation)orientation {
-  if (_deviceOrientation == orientation) {
-    return;
-  }
-
-  _deviceOrientation = orientation;
-  [self updateOrientation];
-}
-
-- (void)updateOrientation {
-  if (_isRecording) {
-    return;
-  }
-
-  UIDeviceOrientation orientation = (_lockedCaptureOrientation != UIDeviceOrientationUnknown)
-                                        ? _lockedCaptureOrientation
-                                        : _deviceOrientation;
-
-  [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput];
-  [self updateOrientation:orientation forCaptureOutput:_captureVideoOutput];
-}
-
-- (void)updateOrientation:(UIDeviceOrientation)orientation
-         forCaptureOutput:(AVCaptureOutput *)captureOutput {
-  if (!captureOutput) {
-    return;
-  }
-
-  AVCaptureConnection *connection = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
-  if (connection && connection.isVideoOrientationSupported) {
-    connection.videoOrientation = [self getVideoOrientationForDeviceOrientation:orientation];
-  }
-}
-
-- (void)captureToFile:(FLTThreadSafeFlutterResult *)result API_AVAILABLE(ios(10)) {
-  AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
-  if (_resolutionPreset == FLTResolutionPresetMax) {
-    [settings setHighResolutionPhotoEnabled:YES];
-  }
-
-  AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(_flashMode);
-  if (avFlashMode != -1) {
-    [settings setFlashMode:avFlashMode];
-  }
-  NSError *error;
-  NSString *path = [self getTemporaryFilePathWithExtension:@"jpg"
-                                                 subfolder:@"pictures"
-                                                    prefix:@"CAP_"
-                                                     error:error];
-  if (error) {
-    [result sendError:error];
-    return;
-  }
-
-  [_capturePhotoOutput capturePhotoWithSettings:settings
-                                       delegate:[[FLTSavePhotoDelegate alloc] initWithPath:path
-                                                                                    result:result]];
-}
-
-- (AVCaptureVideoOrientation)getVideoOrientationForDeviceOrientation:
-    (UIDeviceOrientation)deviceOrientation {
-  if (deviceOrientation == UIDeviceOrientationPortrait) {
-    return AVCaptureVideoOrientationPortrait;
-  } else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
-    // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
-    // is landscape left the video orientation should be landscape right.
-    return AVCaptureVideoOrientationLandscapeRight;
-  } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
-    // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
-    // is landscape right the video orientation should be landscape left.
-    return AVCaptureVideoOrientationLandscapeLeft;
-  } else if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) {
-    return AVCaptureVideoOrientationPortraitUpsideDown;
-  } else {
-    return AVCaptureVideoOrientationPortrait;
-  }
-}
-
-- (NSString *)getTemporaryFilePathWithExtension:(NSString *)extension
-                                      subfolder:(NSString *)subfolder
-                                         prefix:(NSString *)prefix
-                                          error:(NSError *)error {
-  NSString *docDir =
-      NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
-  NSString *fileDir =
-      [[docDir stringByAppendingPathComponent:@"camera"] stringByAppendingPathComponent:subfolder];
-  NSString *fileName = [prefix stringByAppendingString:[[NSUUID UUID] UUIDString]];
-  NSString *file =
-      [[fileDir stringByAppendingPathComponent:fileName] stringByAppendingPathExtension:extension];
-
-  NSFileManager *fm = [NSFileManager defaultManager];
-  if (![fm fileExistsAtPath:fileDir]) {
-    [[NSFileManager defaultManager] createDirectoryAtPath:fileDir
-                              withIntermediateDirectories:true
-                                               attributes:nil
-                                                    error:&error];
-    if (error) {
-      return nil;
-    }
-  }
-
-  return file;
-}
-
-- (void)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset {
-  switch (resolutionPreset) {
-    case FLTResolutionPresetMax:
-    case FLTResolutionPresetUltraHigh:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
-        _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
-        _previewSize = CGSizeMake(3840, 2160);
-        break;
-      }
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
-        _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
-        _previewSize =
-            CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
-                       _captureDevice.activeFormat.highResolutionStillImageDimensions.height);
-        break;
-      }
-    case FLTResolutionPresetVeryHigh:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
-        _captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
-        _previewSize = CGSizeMake(1920, 1080);
-        break;
-      }
-    case FLTResolutionPresetHigh:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
-        _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
-        _previewSize = CGSizeMake(1280, 720);
-        break;
-      }
-    case FLTResolutionPresetMedium:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
-        _captureSession.sessionPreset = AVCaptureSessionPreset640x480;
-        _previewSize = CGSizeMake(640, 480);
-        break;
-      }
-    case FLTResolutionPresetLow:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
-        _captureSession.sessionPreset = AVCaptureSessionPreset352x288;
-        _previewSize = CGSizeMake(352, 288);
-        break;
-      }
-    default:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
-        _captureSession.sessionPreset = AVCaptureSessionPresetLow;
-        _previewSize = CGSizeMake(352, 288);
-      } else {
-        NSError *error =
-            [NSError errorWithDomain:NSCocoaErrorDomain
-                                code:NSURLErrorUnknown
-                            userInfo:@{
-                              NSLocalizedDescriptionKey :
-                                  @"No capture session available for current capture session."
-                            }];
-        @throw error;
-      }
-  }
-}
-
-- (void)captureOutput:(AVCaptureOutput *)output
-    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
-           fromConnection:(AVCaptureConnection *)connection {
-  if (output == _captureVideoOutput) {
-    CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
-    CFRetain(newBuffer);
-    CVPixelBufferRef old = _latestPixelBuffer;
-    while (!OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, (void **)&_latestPixelBuffer)) {
-      old = _latestPixelBuffer;
-    }
-    if (old != nil) {
-      CFRelease(old);
-    }
-    if (_onFrameAvailable) {
-      _onFrameAvailable();
-    }
-  }
-  if (!CMSampleBufferDataIsReady(sampleBuffer)) {
-    [_methodChannel invokeMethod:errorMethod
-                       arguments:@"sample buffer is not ready. Skipping sample"];
-    return;
-  }
-  if (_isStreamingImages) {
-    FlutterEventSink eventSink = _imageStreamHandler.eventSink;
-    if (eventSink) {
-      CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
-      // Must lock base address before accessing the pixel data
-      CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
-
-      size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer);
-      size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer);
-
-      NSMutableArray *planes = [NSMutableArray array];
-
-      const Boolean isPlanar = CVPixelBufferIsPlanar(pixelBuffer);
-      size_t planeCount;
-      if (isPlanar) {
-        planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
-      } else {
-        planeCount = 1;
-      }
-
-      for (int i = 0; i < planeCount; i++) {
-        void *planeAddress;
-        size_t bytesPerRow;
-        size_t height;
-        size_t width;
-
-        if (isPlanar) {
-          planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i);
-          bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i);
-          height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i);
-          width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i);
-        } else {
-          planeAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
-          bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
-          height = CVPixelBufferGetHeight(pixelBuffer);
-          width = CVPixelBufferGetWidth(pixelBuffer);
-        }
-
-        NSNumber *length = @(bytesPerRow * height);
-        NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue];
-
-        NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary];
-        planeBuffer[@"bytesPerRow"] = @(bytesPerRow);
-        planeBuffer[@"width"] = @(width);
-        planeBuffer[@"height"] = @(height);
-        planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes];
-
-        [planes addObject:planeBuffer];
-      }
-      // Before accessing pixel data, we should lock the base address, and unlock it afterwards.
-      // Done accessing the `pixelBuffer` at this point.
-      CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
-
-      NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
-      imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
-      imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
-      imageBuffer[@"format"] = @(_videoFormat);
-      imageBuffer[@"planes"] = planes;
-      imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]];
-      Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]);
-      Float64 nsExposureDuration = 1000000000 * exposureDuration;
-      imageBuffer[@"sensorExposureTime"] = [NSNumber numberWithInt:nsExposureDuration];
-      imageBuffer[@"sensorSensitivity"] = [NSNumber numberWithFloat:[_captureDevice ISO]];
-
-      dispatch_async(dispatch_get_main_queue(), ^{
-        eventSink(imageBuffer);
-      });
-    }
-  }
-  if (_isRecording && !_isRecordingPaused) {
-    if (_videoWriter.status == AVAssetWriterStatusFailed) {
-      [_methodChannel invokeMethod:errorMethod
-                         arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
-      return;
-    }
-
-    CFRetain(sampleBuffer);
-    CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
-
-    if (_videoWriter.status != AVAssetWriterStatusWriting) {
-      [_videoWriter startWriting];
-      [_videoWriter startSessionAtSourceTime:currentSampleTime];
-    }
-
-    if (output == _captureVideoOutput) {
-      if (_videoIsDisconnected) {
-        _videoIsDisconnected = NO;
-
-        if (_videoTimeOffset.value == 0) {
-          _videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
-        } else {
-          CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
-          _videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
-        }
-
-        return;
-      }
-
-      _lastVideoSampleTime = currentSampleTime;
-
-      CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
-      CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset);
-      [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime];
-    } else {
-      CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
-
-      if (dur.value > 0) {
-        currentSampleTime = CMTimeAdd(currentSampleTime, dur);
-      }
-
-      if (_audioIsDisconnected) {
-        _audioIsDisconnected = NO;
-
-        if (_audioTimeOffset.value == 0) {
-          _audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
-        } else {
-          CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
-          _audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
-        }
-
-        return;
-      }
-
-      _lastAudioSampleTime = currentSampleTime;
-
-      if (_audioTimeOffset.value != 0) {
-        CFRelease(sampleBuffer);
-        sampleBuffer = [self adjustTime:sampleBuffer by:_audioTimeOffset];
-      }
-
-      [self newAudioSample:sampleBuffer];
-    }
-
-    CFRelease(sampleBuffer);
-  }
-}
-
-- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset CF_RETURNS_RETAINED {
-  CMItemCount count;
-  CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
-  CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
-  CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
-  for (CMItemCount i = 0; i < count; i++) {
-    pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
-    pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
-  }
-  CMSampleBufferRef sout;
-  CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
-  free(pInfo);
-  return sout;
-}
-
-- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer {
-  if (_videoWriter.status != AVAssetWriterStatusWriting) {
-    if (_videoWriter.status == AVAssetWriterStatusFailed) {
-      [_methodChannel invokeMethod:errorMethod
-                         arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
-    }
-    return;
-  }
-  if (_videoWriterInput.readyForMoreMediaData) {
-    if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) {
-      [_methodChannel
-          invokeMethod:errorMethod
-             arguments:[NSString stringWithFormat:@"%@", @"Unable to write to video input"]];
-    }
-  }
-}
-
-- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer {
-  if (_videoWriter.status != AVAssetWriterStatusWriting) {
-    if (_videoWriter.status == AVAssetWriterStatusFailed) {
-      [_methodChannel invokeMethod:errorMethod
-                         arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
-    }
-    return;
-  }
-  if (_audioWriterInput.readyForMoreMediaData) {
-    if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) {
-      [_methodChannel
-          invokeMethod:errorMethod
-             arguments:[NSString stringWithFormat:@"%@", @"Unable to write to audio input"]];
-    }
-  }
-}
-
-- (void)close {
-  [_captureSession stopRunning];
-  for (AVCaptureInput *input in [_captureSession inputs]) {
-    [_captureSession removeInput:input];
-  }
-  for (AVCaptureOutput *output in [_captureSession outputs]) {
-    [_captureSession removeOutput:output];
-  }
-}
-
-- (void)dealloc {
-  if (_latestPixelBuffer) {
-    CFRelease(_latestPixelBuffer);
-  }
-  [_motionManager stopAccelerometerUpdates];
-}
-
-- (CVPixelBufferRef)copyPixelBuffer {
-  CVPixelBufferRef pixelBuffer = _latestPixelBuffer;
-  while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) {
-    pixelBuffer = _latestPixelBuffer;
-  }
-
-  return pixelBuffer;
-}
-
-- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
-  if (!_isRecording) {
-    NSError *error;
-    _videoRecordingPath = [self getTemporaryFilePathWithExtension:@"mp4"
-                                                        subfolder:@"videos"
-                                                           prefix:@"REC_"
-                                                            error:error];
-    if (error) {
-      [result sendError:error];
-      return;
-    }
-    if (![self setupWriterForPath:_videoRecordingPath]) {
-      [result sendErrorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil];
-      return;
-    }
-    _isRecording = YES;
-    _isRecordingPaused = NO;
-    _videoTimeOffset = CMTimeMake(0, 1);
-    _audioTimeOffset = CMTimeMake(0, 1);
-    _videoIsDisconnected = NO;
-    _audioIsDisconnected = NO;
-    [result sendSuccess];
-  } else {
-    [result sendErrorWithCode:@"Error" message:@"Video is already recording" details:nil];
-  }
-}
-
-- (void)stopVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
-  if (_isRecording) {
-    _isRecording = NO;
-
-    if (_videoWriter.status != AVAssetWriterStatusUnknown) {
-      [_videoWriter finishWritingWithCompletionHandler:^{
-        if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
-          [self updateOrientation];
-          [result sendSuccessWithData:self->_videoRecordingPath];
-          self->_videoRecordingPath = nil;
-        } else {
-          [result sendErrorWithCode:@"IOError"
-                            message:@"AVAssetWriter could not finish writing!"
-                            details:nil];
-        }
-      }];
-    }
-  } else {
-    NSError *error =
-        [NSError errorWithDomain:NSCocoaErrorDomain
-                            code:NSURLErrorResourceUnavailable
-                        userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
-    [result sendError:error];
-  }
-}
-
-- (void)pauseVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
-  _isRecordingPaused = YES;
-  _videoIsDisconnected = YES;
-  _audioIsDisconnected = YES;
-  [result sendSuccess];
-}
-
-- (void)resumeVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
-  _isRecordingPaused = NO;
-  [result sendSuccess];
-}
-
-- (void)lockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result
-                             orientation:(NSString *)orientationStr {
-  UIDeviceOrientation orientation;
-  @try {
-    orientation = FLTGetUIDeviceOrientationForString(orientationStr);
-  } @catch (NSError *e) {
-    [result sendError:e];
-    return;
-  }
-
-  if (_lockedCaptureOrientation != orientation) {
-    _lockedCaptureOrientation = orientation;
-    [self updateOrientation];
-  }
-
-  [result sendSuccess];
-}
-
-- (void)unlockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result {
-  _lockedCaptureOrientation = UIDeviceOrientationUnknown;
-  [self updateOrientation];
-  [result sendSuccess];
-}
-
-- (void)setFlashModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
-  FLTFlashMode mode;
-  @try {
-    mode = FLTGetFLTFlashModeForString(modeStr);
-  } @catch (NSError *e) {
-    [result sendError:e];
-    return;
-  }
-  if (mode == FLTFlashModeTorch) {
-    if (!_captureDevice.hasTorch) {
-      [result sendErrorWithCode:@"setFlashModeFailed"
-                        message:@"Device does not support torch mode"
-                        details:nil];
-      return;
-    }
-    if (!_captureDevice.isTorchAvailable) {
-      [result sendErrorWithCode:@"setFlashModeFailed"
-                        message:@"Torch mode is currently not available"
-                        details:nil];
-      return;
-    }
-    if (_captureDevice.torchMode != AVCaptureTorchModeOn) {
-      [_captureDevice lockForConfiguration:nil];
-      [_captureDevice setTorchMode:AVCaptureTorchModeOn];
-      [_captureDevice unlockForConfiguration];
-    }
-  } else {
-    if (!_captureDevice.hasFlash) {
-      [result sendErrorWithCode:@"setFlashModeFailed"
-                        message:@"Device does not have flash capabilities"
-                        details:nil];
-      return;
-    }
-    AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(mode);
-    if (![_capturePhotoOutput.supportedFlashModes
-            containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) {
-      [result sendErrorWithCode:@"setFlashModeFailed"
-                        message:@"Device does not support this specific flash mode"
-                        details:nil];
-      return;
-    }
-    if (_captureDevice.torchMode != AVCaptureTorchModeOff) {
-      [_captureDevice lockForConfiguration:nil];
-      [_captureDevice setTorchMode:AVCaptureTorchModeOff];
-      [_captureDevice unlockForConfiguration];
-    }
-  }
-  _flashMode = mode;
-  [result sendSuccess];
-}
-
-- (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
-  FLTExposureMode mode;
-  @try {
-    mode = FLTGetFLTExposureModeForString(modeStr);
-  } @catch (NSError *e) {
-    [result sendError:e];
-    return;
-  }
-  _exposureMode = mode;
-  [self applyExposureMode];
-  [result sendSuccess];
-}
-
-- (void)applyExposureMode {
-  [_captureDevice lockForConfiguration:nil];
-  switch (_exposureMode) {
-    case FLTExposureModeLocked:
-      [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
-      break;
-    case FLTExposureModeAuto:
-      if ([_captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
-        [_captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
-      } else {
-        [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
-      }
-      break;
-  }
-  [_captureDevice unlockForConfiguration];
-}
-
-- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
-  FLTFocusMode mode;
-  @try {
-    mode = FLTGetFLTFocusModeForString(modeStr);
-  } @catch (NSError *e) {
-    [result sendError:e];
-    return;
-  }
-  _focusMode = mode;
-  [self applyFocusMode];
-  [result sendSuccess];
-}
-
-- (void)applyFocusMode {
-  [self applyFocusMode:_focusMode onDevice:_captureDevice];
-}
-
-/**
- * Applies FocusMode on the AVCaptureDevice.
- *
- * If the @c focusMode is set to FocusModeAuto the AVCaptureDevice is configured to use
- * AVCaptureFocusModeContinuousModeAutoFocus when supported, otherwise it is set to
- * AVCaptureFocusModeAutoFocus. If neither AVCaptureFocusModeContinuousModeAutoFocus nor
- * AVCaptureFocusModeAutoFocus are supported focus mode will not be set.
- * If @c focusMode is set to FocusModeLocked the AVCaptureDevice is configured to use
- * AVCaptureFocusModeAutoFocus. If AVCaptureFocusModeAutoFocus is not supported focus mode will not
- * be set.
- *
- * @param focusMode The focus mode that should be applied to the @captureDevice instance.
- * @param captureDevice The AVCaptureDevice to which the @focusMode will be applied.
- */
-- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice {
-  [captureDevice lockForConfiguration:nil];
-  switch (focusMode) {
-    case FLTFocusModeLocked:
-      if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
-        [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
-      }
-      break;
-    case FLTFocusModeAuto:
-      if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
-        [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
-      } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
-        [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
-      }
-      break;
-  }
-  [captureDevice unlockForConfiguration];
-}
-
-- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
-  _isPreviewPaused = true;
-  [result sendSuccess];
-}
-
-- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
-  _isPreviewPaused = false;
-  [result sendSuccess];
-}
-
-- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
-                                            x:(double)x
-                                            y:(double)y {
-  double oldX = x, oldY = y;
-  switch (orientation) {
-    case UIDeviceOrientationPortrait:  // 90 ccw
-      y = 1 - oldX;
-      x = oldY;
-      break;
-    case UIDeviceOrientationPortraitUpsideDown:  // 90 cw
-      x = 1 - oldY;
-      y = oldX;
-      break;
-    case UIDeviceOrientationLandscapeRight:  // 180
-      x = 1 - x;
-      y = 1 - y;
-      break;
-    case UIDeviceOrientationLandscapeLeft:
-    default:
-      // No rotation required
-      break;
-  }
-  return CGPointMake(x, y);
-}
-
-- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
-  if (!_captureDevice.isExposurePointOfInterestSupported) {
-    [result sendErrorWithCode:@"setExposurePointFailed"
-                      message:@"Device does not have exposure point capabilities"
-                      details:nil];
-    return;
-  }
-  UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
-  [_captureDevice lockForConfiguration:nil];
-  [_captureDevice setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
-                                                                                    x:x
-                                                                                    y:y]];
-  [_captureDevice unlockForConfiguration];
-  // Retrigger auto exposure
-  [self applyExposureMode];
-  [result sendSuccess];
-}
-
-- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
-  if (!_captureDevice.isFocusPointOfInterestSupported) {
-    [result sendErrorWithCode:@"setFocusPointFailed"
-                      message:@"Device does not have focus point capabilities"
-                      details:nil];
-    return;
-  }
-  UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
-  [_captureDevice lockForConfiguration:nil];
-
-  [_captureDevice setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
-                                                                                 x:x
-                                                                                 y:y]];
-  [_captureDevice unlockForConfiguration];
-  // Retrigger auto focus
-  [self applyFocusMode];
-  [result sendSuccess];
-}
-
-- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset {
-  [_captureDevice lockForConfiguration:nil];
-  [_captureDevice setExposureTargetBias:offset completionHandler:nil];
-  [_captureDevice unlockForConfiguration];
-  [result sendSuccessWithData:@(offset)];
-}
-
-- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
-  if (!_isStreamingImages) {
-    FlutterEventChannel *eventChannel =
-        [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream"
-                                  binaryMessenger:messenger];
-    FLTThreadSafeEventChannel *threadSafeEventChannel =
-        [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
-
-    _imageStreamHandler =
-        [[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue];
-    [threadSafeEventChannel setStreamHandler:_imageStreamHandler
-                                  completion:^{
-                                    dispatch_async(self->_captureSessionQueue, ^{
-                                      self.isStreamingImages = YES;
-                                    });
-                                  }];
-  } else {
-    [_methodChannel invokeMethod:errorMethod
-                       arguments:@"Images from camera are already streaming!"];
-  }
-}
-
-- (void)stopImageStream {
-  if (_isStreamingImages) {
-    _isStreamingImages = NO;
-    _imageStreamHandler = nil;
-  } else {
-    [_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are not streaming!"];
-  }
-}
-
-- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
-  CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
-
-  [result sendSuccessWithData:[NSNumber numberWithFloat:maxZoomFactor]];
-}
-
-- (void)getMinZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
-  CGFloat minZoomFactor = [self getMinAvailableZoomFactor];
-  [result sendSuccessWithData:[NSNumber numberWithFloat:minZoomFactor]];
-}
-
-- (void)setZoomLevel:(CGFloat)zoom Result:(FLTThreadSafeFlutterResult *)result {
-  CGFloat maxAvailableZoomFactor = [self getMaxAvailableZoomFactor];
-  CGFloat minAvailableZoomFactor = [self getMinAvailableZoomFactor];
-
-  if (maxAvailableZoomFactor < zoom || minAvailableZoomFactor > zoom) {
-    NSString *errorMessage = [NSString
-        stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).",
-                         minAvailableZoomFactor, maxAvailableZoomFactor];
-
-    [result sendErrorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil];
-    return;
-  }
-
-  NSError *error = nil;
-  if (![_captureDevice lockForConfiguration:&error]) {
-    [result sendError:error];
-    return;
-  }
-  _captureDevice.videoZoomFactor = zoom;
-  [_captureDevice unlockForConfiguration];
-
-  [result sendSuccess];
-}
-
-- (CGFloat)getMinAvailableZoomFactor {
-  if (@available(iOS 11.0, *)) {
-    return _captureDevice.minAvailableVideoZoomFactor;
-  } else {
-    return 1.0;
-  }
-}
-
-- (CGFloat)getMaxAvailableZoomFactor {
-  if (@available(iOS 11.0, *)) {
-    return _captureDevice.maxAvailableVideoZoomFactor;
-  } else {
-    return _captureDevice.activeFormat.videoMaxZoomFactor;
-  }
-}
-
-- (BOOL)setupWriterForPath:(NSString *)path {
-  NSError *error = nil;
-  NSURL *outputURL;
-  if (path != nil) {
-    outputURL = [NSURL fileURLWithPath:path];
-  } else {
-    return NO;
-  }
-  if (_enableAudio && !_isAudioSetup) {
-    [self setUpCaptureSessionForAudio];
-  }
-
-  _videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL
-                                           fileType:AVFileTypeMPEG4
-                                              error:&error];
-  NSParameterAssert(_videoWriter);
-  if (error) {
-    [_methodChannel invokeMethod:errorMethod arguments:error.description];
-    return NO;
-  }
-
-  NSDictionary *videoSettings = [_captureVideoOutput
-      recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4];
-  _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
-                                                         outputSettings:videoSettings];
-
-  _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
-      assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
-                                 sourcePixelBufferAttributes:@{
-                                   (NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)
-                                 }];
-
-  NSParameterAssert(_videoWriterInput);
-
-  _videoWriterInput.expectsMediaDataInRealTime = YES;
-
-  // Add the audio input
-  if (_enableAudio) {
-    AudioChannelLayout acl;
-    bzero(&acl, sizeof(acl));
-    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
-    NSDictionary *audioOutputSettings = nil;
-    // Both type of audio inputs causes output video file to be corrupted.
-    audioOutputSettings = @{
-      AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC],
-      AVSampleRateKey : [NSNumber numberWithFloat:44100.0],
-      AVNumberOfChannelsKey : [NSNumber numberWithInt:1],
-      AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(acl)],
-    };
-    _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
-                                                           outputSettings:audioOutputSettings];
-    _audioWriterInput.expectsMediaDataInRealTime = YES;
-
-    [_videoWriter addInput:_audioWriterInput];
-    [_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
-  }
-
-  if (_flashMode == FLTFlashModeTorch) {
-    [self.captureDevice lockForConfiguration:nil];
-    [self.captureDevice setTorchMode:AVCaptureTorchModeOn];
-    [self.captureDevice unlockForConfiguration];
-  }
-
-  [_videoWriter addInput:_videoWriterInput];
-
-  [_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
-
-  return YES;
-}
-
-- (void)setUpCaptureSessionForAudio {
-  NSError *error = nil;
-  // Create a device input with the device and add it to the session.
-  // Setup the audio input.
-  AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
-  AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice
-                                                                           error:&error];
-  if (error) {
-    [_methodChannel invokeMethod:errorMethod arguments:error.description];
-  }
-  // Setup the audio output.
-  _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
-
-  if ([_captureSession canAddInput:audioInput]) {
-    [_captureSession addInput:audioInput];
-
-    if ([_captureSession canAddOutput:_audioOutput]) {
-      [_captureSession addOutput:_audioOutput];
-      _isAudioSetup = YES;
-    } else {
-      [_methodChannel invokeMethod:errorMethod
-                         arguments:@"Unable to add Audio input/output to session capture"];
-      _isAudioSetup = NO;
-    }
-  }
-}
-@end
-
 @interface CameraPlugin ()
 @property(readonly, nonatomic) FLTThreadSafeTextureRegistry *registry;
 @property(readonly, nonatomic) NSObject<FlutterBinaryMessenger> *messenger;
diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.modulemap b/packages/camera/camera/ios/Classes/CameraPlugin.modulemap
index 0593741..1ad3437 100644
--- a/packages/camera/camera/ios/Classes/CameraPlugin.modulemap
+++ b/packages/camera/camera/ios/Classes/CameraPlugin.modulemap
@@ -7,5 +7,6 @@
   explicit module Test {
     header "CameraPlugin_Test.h"
     header "CameraProperties.h"
+    header "FLTCam.h"
   }
 }
diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h
new file mode 100644
index 0000000..417a1d7
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/FLTCam.h
@@ -0,0 +1,86 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import AVFoundation;
+@import Foundation;
+@import Flutter;
+
+#import "CameraProperties.h"
+#import "FLTThreadSafeEventChannel.h"
+#import "FLTThreadSafeFlutterResult.h"
+#import "FLTThreadSafeMethodChannel.h"
+#import "FLTThreadSafeTextureRegistry.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A class that manages camera's state and performs camera operations.
+ */
+@interface FLTCam : NSObject <FlutterTexture>
+
+@property(readonly, nonatomic) AVCaptureDevice *captureDevice;
+@property(readonly, nonatomic) CGSize previewSize;
+@property(assign, nonatomic) BOOL isPreviewPaused;
+@property(nonatomic, copy) void (^onFrameAvailable)(void);
+@property(nonatomic) FLTThreadSafeMethodChannel *methodChannel;
+@property(assign, nonatomic) FLTResolutionPreset resolutionPreset;
+@property(assign, nonatomic) FLTExposureMode exposureMode;
+@property(assign, nonatomic) FLTFocusMode focusMode;
+@property(assign, nonatomic) FLTFlashMode flashMode;
+// Format used for video and image streaming.
+@property(assign, nonatomic) FourCharCode videoFormat;
+
+- (instancetype)initWithCameraName:(NSString *)cameraName
+                  resolutionPreset:(NSString *)resolutionPreset
+                       enableAudio:(BOOL)enableAudio
+                       orientation:(UIDeviceOrientation)orientation
+               captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+                             error:(NSError **)error;
+- (void)start;
+- (void)stop;
+- (void)setDeviceOrientation:(UIDeviceOrientation)orientation;
+- (void)captureToFile:(FLTThreadSafeFlutterResult *)result API_AVAILABLE(ios(10));
+- (void)close;
+- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)stopVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)pauseVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)resumeVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)lockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result
+                             orientation:(NSString *)orientationStr;
+- (void)unlockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setFlashModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
+- (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
+- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
+- (void)applyFocusMode;
+
+/**
+ * Applies FocusMode on the AVCaptureDevice.
+ *
+ * If the @c focusMode is set to FocusModeAuto the AVCaptureDevice is configured to use
+ * AVCaptureFocusModeContinuousModeAutoFocus when supported, otherwise it is set to
+ * AVCaptureFocusModeAutoFocus. If neither AVCaptureFocusModeContinuousModeAutoFocus nor
+ * AVCaptureFocusModeAutoFocus are supported focus mode will not be set.
+ * If @c focusMode is set to FocusModeLocked the AVCaptureDevice is configured to use
+ * AVCaptureFocusModeAutoFocus. If AVCaptureFocusModeAutoFocus is not supported focus mode will not
+ * be set.
+ *
+ * @param focusMode The focus mode that should be applied to the @captureDevice instance.
+ * @param captureDevice The AVCaptureDevice to which the @focusMode will be applied.
+ */
+- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice;
+- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
+- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
+- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset;
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger;
+- (void)stopImageStream;
+- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)getMinZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setZoomLevel:(CGFloat)zoom Result:(FLTThreadSafeFlutterResult *)result;
+- (void)setUpCaptureSessionForAudio;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m
new file mode 100644
index 0000000..11be4ad
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/FLTCam.m
@@ -0,0 +1,1087 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTCam.h"
+
+@import CoreMotion;
+#import <libkern/OSAtomic.h>
+
+@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
+// The queue on which `eventSink` property should be accessed
+@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
+// `eventSink` property should be accessed on `captureSessionQueue`.
+// The block itself should be invoked on the main queue.
+@property FlutterEventSink eventSink;
+@end
+
+@implementation FLTImageStreamHandler
+
+- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
+  self = [super init];
+  NSAssert(self, @"super init cannot be nil");
+  _captureSessionQueue = captureSessionQueue;
+  return self;
+}
+
+- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments {
+  dispatch_async(self.captureSessionQueue, ^{
+    self.eventSink = nil;
+  });
+  return nil;
+}
+
+- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments
+                                       eventSink:(nonnull FlutterEventSink)events {
+  dispatch_async(self.captureSessionQueue, ^{
+    self.eventSink = events;
+  });
+  return nil;
+}
+@end
+
+@interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
+@property(readonly, nonatomic) NSString *path;
+@property(readonly, nonatomic) FLTThreadSafeFlutterResult *result;
+@end
+
+@implementation FLTSavePhotoDelegate {
+  /// Used to keep the delegate alive until didFinishProcessingPhotoSampleBuffer.
+  FLTSavePhotoDelegate *selfReference;
+}
+
+- initWithPath:(NSString *)path result:(FLTThreadSafeFlutterResult *)result {
+  self = [super init];
+  NSAssert(self, @"super init cannot be nil");
+  _path = path;
+  selfReference = self;
+  _result = result;
+  return self;
+}
+
+- (void)captureOutput:(AVCapturePhotoOutput *)output
+    didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
+                previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
+                        resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
+                         bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
+                                   error:(NSError *)error API_AVAILABLE(ios(10)) {
+  selfReference = nil;
+  if (error) {
+    [_result sendError:error];
+    return;
+  }
+
+  NSData *data = [AVCapturePhotoOutput
+      JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer
+                            previewPhotoSampleBuffer:previewPhotoSampleBuffer];
+
+  // TODO(sigurdm): Consider writing file asynchronously.
+  bool success = [data writeToFile:_path atomically:YES];
+
+  if (!success) {
+    [_result sendErrorWithCode:@"IOError" message:@"Unable to write file" details:nil];
+    return;
+  }
+  [_result sendSuccessWithData:_path];
+}
+
+- (void)captureOutput:(AVCapturePhotoOutput *)output
+    didFinishProcessingPhoto:(AVCapturePhoto *)photo
+                       error:(NSError *)error API_AVAILABLE(ios(11.0)) {
+  selfReference = nil;
+  if (error) {
+    [_result sendError:error];
+    return;
+  }
+
+  NSData *photoData = [photo fileDataRepresentation];
+
+  bool success = [photoData writeToFile:_path atomically:YES];
+  if (!success) {
+    [_result sendErrorWithCode:@"IOError" message:@"Unable to write file" details:nil];
+    return;
+  }
+  [_result sendSuccessWithData:_path];
+}
+@end
+
+@interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
+                      AVCaptureAudioDataOutputSampleBufferDelegate>
+
+@property(readonly, nonatomic) int64_t textureId;
+@property BOOL enableAudio;
+@property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
+@property(readonly, nonatomic) AVCaptureSession *captureSession;
+
+@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
+@property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
+@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
+@property(readonly) CVPixelBufferRef volatile latestPixelBuffer;
+@property(readonly, nonatomic) CGSize captureSize;
+@property(strong, nonatomic) AVAssetWriter *videoWriter;
+@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput;
+@property(strong, nonatomic) AVAssetWriterInput *audioWriterInput;
+@property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
+@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
+@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
+@property(strong, nonatomic) NSString *videoRecordingPath;
+@property(assign, nonatomic) BOOL isRecording;
+@property(assign, nonatomic) BOOL isRecordingPaused;
+@property(assign, nonatomic) BOOL videoIsDisconnected;
+@property(assign, nonatomic) BOOL audioIsDisconnected;
+@property(assign, nonatomic) BOOL isAudioSetup;
+@property(assign, nonatomic) BOOL isStreamingImages;
+@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
+@property(assign, nonatomic) CMTime lastVideoSampleTime;
+@property(assign, nonatomic) CMTime lastAudioSampleTime;
+@property(assign, nonatomic) CMTime videoTimeOffset;
+@property(assign, nonatomic) CMTime audioTimeOffset;
+@property(nonatomic) CMMotionManager *motionManager;
+@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
+// All FLTCam's state access and capture session related operations should be on run on this queue.
+@property(strong, nonatomic) dispatch_queue_t captureSessionQueue;
+@property(assign, nonatomic) UIDeviceOrientation deviceOrientation;
+@end
+
+@implementation FLTCam
+
+NSString *const errorMethod = @"error";
+
+- (instancetype)initWithCameraName:(NSString *)cameraName
+                  resolutionPreset:(NSString *)resolutionPreset
+                       enableAudio:(BOOL)enableAudio
+                       orientation:(UIDeviceOrientation)orientation
+               captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+                             error:(NSError **)error {
+  self = [super init];
+  NSAssert(self, @"super init cannot be nil");
+  @try {
+    _resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
+  } @catch (NSError *e) {
+    *error = e;
+  }
+  _enableAudio = enableAudio;
+  _captureSessionQueue = captureSessionQueue;
+  _captureSession = [[AVCaptureSession alloc] init];
+  _captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
+  _flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
+  _exposureMode = FLTExposureModeAuto;
+  _focusMode = FLTFocusModeAuto;
+  _lockedCaptureOrientation = UIDeviceOrientationUnknown;
+  _deviceOrientation = orientation;
+  _videoFormat = kCVPixelFormatType_32BGRA;
+
+  NSError *localError = nil;
+  _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
+                                                             error:&localError];
+
+  if (localError) {
+    *error = localError;
+    return nil;
+  }
+
+  _captureVideoOutput = [AVCaptureVideoDataOutput new];
+  _captureVideoOutput.videoSettings =
+      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)};
+  [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
+  [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
+
+  AVCaptureConnection *connection =
+      [AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports
+                                             output:_captureVideoOutput];
+
+  if ([_captureDevice position] == AVCaptureDevicePositionFront) {
+    connection.videoMirrored = YES;
+  }
+
+  [_captureSession addInputWithNoConnections:_captureVideoInput];
+  [_captureSession addOutputWithNoConnections:_captureVideoOutput];
+  [_captureSession addConnection:connection];
+
+  if (@available(iOS 10.0, *)) {
+    _capturePhotoOutput = [AVCapturePhotoOutput new];
+    [_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
+    [_captureSession addOutput:_capturePhotoOutput];
+  }
+  _motionManager = [[CMMotionManager alloc] init];
+  [_motionManager startAccelerometerUpdates];
+
+  [self setCaptureSessionPreset:_resolutionPreset];
+  [self updateOrientation];
+
+  return self;
+}
+
+- (void)start {
+  [_captureSession startRunning];
+}
+
+- (void)stop {
+  [_captureSession stopRunning];
+}
+
+- (void)setVideoFormat:(OSType)videoFormat {
+  _videoFormat = videoFormat;
+  _captureVideoOutput.videoSettings =
+      @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
+}
+
+- (void)setDeviceOrientation:(UIDeviceOrientation)orientation {
+  if (_deviceOrientation == orientation) {
+    return;
+  }
+
+  _deviceOrientation = orientation;
+  [self updateOrientation];
+}
+
+- (void)updateOrientation {
+  if (_isRecording) {
+    return;
+  }
+
+  UIDeviceOrientation orientation = (_lockedCaptureOrientation != UIDeviceOrientationUnknown)
+                                        ? _lockedCaptureOrientation
+                                        : _deviceOrientation;
+
+  [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput];
+  [self updateOrientation:orientation forCaptureOutput:_captureVideoOutput];
+}
+
+- (void)updateOrientation:(UIDeviceOrientation)orientation
+         forCaptureOutput:(AVCaptureOutput *)captureOutput {
+  if (!captureOutput) {
+    return;
+  }
+
+  AVCaptureConnection *connection = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
+  if (connection && connection.isVideoOrientationSupported) {
+    connection.videoOrientation = [self getVideoOrientationForDeviceOrientation:orientation];
+  }
+}
+
+- (void)captureToFile:(FLTThreadSafeFlutterResult *)result API_AVAILABLE(ios(10)) {
+  AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
+  if (_resolutionPreset == FLTResolutionPresetMax) {
+    [settings setHighResolutionPhotoEnabled:YES];
+  }
+
+  AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(_flashMode);
+  if (avFlashMode != -1) {
+    [settings setFlashMode:avFlashMode];
+  }
+  NSError *error;
+  NSString *path = [self getTemporaryFilePathWithExtension:@"jpg"
+                                                 subfolder:@"pictures"
+                                                    prefix:@"CAP_"
+                                                     error:error];
+  if (error) {
+    [result sendError:error];
+    return;
+  }
+
+  [_capturePhotoOutput capturePhotoWithSettings:settings
+                                       delegate:[[FLTSavePhotoDelegate alloc] initWithPath:path
+                                                                                    result:result]];
+}
+
+- (AVCaptureVideoOrientation)getVideoOrientationForDeviceOrientation:
+    (UIDeviceOrientation)deviceOrientation {
+  if (deviceOrientation == UIDeviceOrientationPortrait) {
+    return AVCaptureVideoOrientationPortrait;
+  } else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+    // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
+    // is landscape left the video orientation should be landscape right.
+    return AVCaptureVideoOrientationLandscapeRight;
+  } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+    // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
+    // is landscape right the video orientation should be landscape left.
+    return AVCaptureVideoOrientationLandscapeLeft;
+  } else if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) {
+    return AVCaptureVideoOrientationPortraitUpsideDown;
+  } else {
+    return AVCaptureVideoOrientationPortrait;
+  }
+}
+
+- (NSString *)getTemporaryFilePathWithExtension:(NSString *)extension
+                                      subfolder:(NSString *)subfolder
+                                         prefix:(NSString *)prefix
+                                          error:(NSError *)error {
+  NSString *docDir =
+      NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
+  NSString *fileDir =
+      [[docDir stringByAppendingPathComponent:@"camera"] stringByAppendingPathComponent:subfolder];
+  NSString *fileName = [prefix stringByAppendingString:[[NSUUID UUID] UUIDString]];
+  NSString *file =
+      [[fileDir stringByAppendingPathComponent:fileName] stringByAppendingPathExtension:extension];
+
+  NSFileManager *fm = [NSFileManager defaultManager];
+  if (![fm fileExistsAtPath:fileDir]) {
+    [[NSFileManager defaultManager] createDirectoryAtPath:fileDir
+                              withIntermediateDirectories:true
+                                               attributes:nil
+                                                    error:&error];
+    if (error) {
+      return nil;
+    }
+  }
+
+  return file;
+}
+
+- (void)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset {
+  switch (resolutionPreset) {
+    case FLTResolutionPresetMax:
+    case FLTResolutionPresetUltraHigh:
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
+        _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
+        _previewSize = CGSizeMake(3840, 2160);
+        break;
+      }
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
+        _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
+        _previewSize =
+            CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
+                       _captureDevice.activeFormat.highResolutionStillImageDimensions.height);
+        break;
+      }
+    case FLTResolutionPresetVeryHigh:
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
+        _captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
+        _previewSize = CGSizeMake(1920, 1080);
+        break;
+      }
+    case FLTResolutionPresetHigh:
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
+        _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
+        _previewSize = CGSizeMake(1280, 720);
+        break;
+      }
+    case FLTResolutionPresetMedium:
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
+        _captureSession.sessionPreset = AVCaptureSessionPreset640x480;
+        _previewSize = CGSizeMake(640, 480);
+        break;
+      }
+    case FLTResolutionPresetLow:
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
+        _captureSession.sessionPreset = AVCaptureSessionPreset352x288;
+        _previewSize = CGSizeMake(352, 288);
+        break;
+      }
+    default:
+      if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
+        _captureSession.sessionPreset = AVCaptureSessionPresetLow;
+        _previewSize = CGSizeMake(352, 288);
+      } else {
+        NSError *error =
+            [NSError errorWithDomain:NSCocoaErrorDomain
+                                code:NSURLErrorUnknown
+                            userInfo:@{
+                              NSLocalizedDescriptionKey :
+                                  @"No capture session available for current capture session."
+                            }];
+        @throw error;
+      }
+  }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)output
+    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+           fromConnection:(AVCaptureConnection *)connection {
+  if (output == _captureVideoOutput) {
+    CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+    CFRetain(newBuffer);
+    CVPixelBufferRef old = _latestPixelBuffer;
+    while (!OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, (void **)&_latestPixelBuffer)) {
+      old = _latestPixelBuffer;
+    }
+    if (old != nil) {
+      CFRelease(old);
+    }
+    if (_onFrameAvailable) {
+      _onFrameAvailable();
+    }
+  }
+  if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+    [_methodChannel invokeMethod:errorMethod
+                       arguments:@"sample buffer is not ready. Skipping sample"];
+    return;
+  }
+  if (_isStreamingImages) {
+    FlutterEventSink eventSink = _imageStreamHandler.eventSink;
+    if (eventSink) {
+      CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+      // Must lock base address before accessing the pixel data
+      CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+      size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer);
+      size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer);
+
+      NSMutableArray *planes = [NSMutableArray array];
+
+      const Boolean isPlanar = CVPixelBufferIsPlanar(pixelBuffer);
+      size_t planeCount;
+      if (isPlanar) {
+        planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
+      } else {
+        planeCount = 1;
+      }
+
+      for (int i = 0; i < planeCount; i++) {
+        void *planeAddress;
+        size_t bytesPerRow;
+        size_t height;
+        size_t width;
+
+        if (isPlanar) {
+          planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i);
+          bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i);
+          height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i);
+          width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i);
+        } else {
+          planeAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
+          bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
+          height = CVPixelBufferGetHeight(pixelBuffer);
+          width = CVPixelBufferGetWidth(pixelBuffer);
+        }
+
+        NSNumber *length = @(bytesPerRow * height);
+        NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue];
+
+        NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary];
+        planeBuffer[@"bytesPerRow"] = @(bytesPerRow);
+        planeBuffer[@"width"] = @(width);
+        planeBuffer[@"height"] = @(height);
+        planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes];
+
+        [planes addObject:planeBuffer];
+      }
+      // Before accessing pixel data, we should lock the base address, and unlock it afterwards.
+      // Done accessing the `pixelBuffer` at this point.
+      CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+      NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
+      imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
+      imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
+      imageBuffer[@"format"] = @(_videoFormat);
+      imageBuffer[@"planes"] = planes;
+      imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]];
+      Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]);
+      Float64 nsExposureDuration = 1000000000 * exposureDuration;
+      imageBuffer[@"sensorExposureTime"] = [NSNumber numberWithInt:nsExposureDuration];
+      imageBuffer[@"sensorSensitivity"] = [NSNumber numberWithFloat:[_captureDevice ISO]];
+
+      dispatch_async(dispatch_get_main_queue(), ^{
+        eventSink(imageBuffer);
+      });
+    }
+  }
+  if (_isRecording && !_isRecordingPaused) {
+    if (_videoWriter.status == AVAssetWriterStatusFailed) {
+      [_methodChannel invokeMethod:errorMethod
+                         arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
+      return;
+    }
+
+    CFRetain(sampleBuffer);
+    CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+
+    if (_videoWriter.status != AVAssetWriterStatusWriting) {
+      [_videoWriter startWriting];
+      [_videoWriter startSessionAtSourceTime:currentSampleTime];
+    }
+
+    if (output == _captureVideoOutput) {
+      if (_videoIsDisconnected) {
+        _videoIsDisconnected = NO;
+
+        if (_videoTimeOffset.value == 0) {
+          _videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
+        } else {
+          CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
+          _videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
+        }
+
+        return;
+      }
+
+      _lastVideoSampleTime = currentSampleTime;
+
+      CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+      CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset);
+      [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime];
+    } else {
+      CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
+
+      if (dur.value > 0) {
+        currentSampleTime = CMTimeAdd(currentSampleTime, dur);
+      }
+
+      if (_audioIsDisconnected) {
+        _audioIsDisconnected = NO;
+
+        if (_audioTimeOffset.value == 0) {
+          _audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
+        } else {
+          CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
+          _audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
+        }
+
+        return;
+      }
+
+      _lastAudioSampleTime = currentSampleTime;
+
+      if (_audioTimeOffset.value != 0) {
+        CFRelease(sampleBuffer);
+        sampleBuffer = [self adjustTime:sampleBuffer by:_audioTimeOffset];
+      }
+
+      [self newAudioSample:sampleBuffer];
+    }
+
+    CFRelease(sampleBuffer);
+  }
+}
+
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset CF_RETURNS_RETAINED {
+  CMItemCount count;
+  CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
+  CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
+  CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
+  for (CMItemCount i = 0; i < count; i++) {
+    pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
+    pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
+  }
+  CMSampleBufferRef sout;
+  CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
+  free(pInfo);
+  return sout;
+}
+
+- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer {
+  if (_videoWriter.status != AVAssetWriterStatusWriting) {
+    if (_videoWriter.status == AVAssetWriterStatusFailed) {
+      [_methodChannel invokeMethod:errorMethod
+                         arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
+    }
+    return;
+  }
+  if (_videoWriterInput.readyForMoreMediaData) {
+    if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) {
+      [_methodChannel
+          invokeMethod:errorMethod
+             arguments:[NSString stringWithFormat:@"%@", @"Unable to write to video input"]];
+    }
+  }
+}
+
+- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer {
+  if (_videoWriter.status != AVAssetWriterStatusWriting) {
+    if (_videoWriter.status == AVAssetWriterStatusFailed) {
+      [_methodChannel invokeMethod:errorMethod
+                         arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
+    }
+    return;
+  }
+  if (_audioWriterInput.readyForMoreMediaData) {
+    if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) {
+      [_methodChannel
+          invokeMethod:errorMethod
+             arguments:[NSString stringWithFormat:@"%@", @"Unable to write to audio input"]];
+    }
+  }
+}
+
+- (void)close {
+  [_captureSession stopRunning];
+  for (AVCaptureInput *input in [_captureSession inputs]) {
+    [_captureSession removeInput:input];
+  }
+  for (AVCaptureOutput *output in [_captureSession outputs]) {
+    [_captureSession removeOutput:output];
+  }
+}
+
+- (void)dealloc {
+  if (_latestPixelBuffer) {
+    CFRelease(_latestPixelBuffer);
+  }
+  [_motionManager stopAccelerometerUpdates];
+}
+
+- (CVPixelBufferRef)copyPixelBuffer {
+  CVPixelBufferRef pixelBuffer = _latestPixelBuffer;
+  while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) {
+    pixelBuffer = _latestPixelBuffer;
+  }
+
+  return pixelBuffer;
+}
+
+- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+  if (!_isRecording) {
+    NSError *error;
+    _videoRecordingPath = [self getTemporaryFilePathWithExtension:@"mp4"
+                                                        subfolder:@"videos"
+                                                           prefix:@"REC_"
+                                                            error:error];
+    if (error) {
+      [result sendError:error];
+      return;
+    }
+    if (![self setupWriterForPath:_videoRecordingPath]) {
+      [result sendErrorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil];
+      return;
+    }
+    _isRecording = YES;
+    _isRecordingPaused = NO;
+    _videoTimeOffset = CMTimeMake(0, 1);
+    _audioTimeOffset = CMTimeMake(0, 1);
+    _videoIsDisconnected = NO;
+    _audioIsDisconnected = NO;
+    [result sendSuccess];
+  } else {
+    [result sendErrorWithCode:@"Error" message:@"Video is already recording" details:nil];
+  }
+}
+
+- (void)stopVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+  if (_isRecording) {
+    _isRecording = NO;
+
+    if (_videoWriter.status != AVAssetWriterStatusUnknown) {
+      [_videoWriter finishWritingWithCompletionHandler:^{
+        if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
+          [self updateOrientation];
+          [result sendSuccessWithData:self->_videoRecordingPath];
+          self->_videoRecordingPath = nil;
+        } else {
+          [result sendErrorWithCode:@"IOError"
+                            message:@"AVAssetWriter could not finish writing!"
+                            details:nil];
+        }
+      }];
+    }
+  } else {
+    NSError *error =
+        [NSError errorWithDomain:NSCocoaErrorDomain
+                            code:NSURLErrorResourceUnavailable
+                        userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
+    [result sendError:error];
+  }
+}
+
+- (void)pauseVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+  _isRecordingPaused = YES;
+  _videoIsDisconnected = YES;
+  _audioIsDisconnected = YES;
+  [result sendSuccess];
+}
+
+- (void)resumeVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+  _isRecordingPaused = NO;
+  [result sendSuccess];
+}
+
+- (void)lockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result
+                             orientation:(NSString *)orientationStr {
+  UIDeviceOrientation orientation;
+  @try {
+    orientation = FLTGetUIDeviceOrientationForString(orientationStr);
+  } @catch (NSError *e) {
+    [result sendError:e];
+    return;
+  }
+
+  if (_lockedCaptureOrientation != orientation) {
+    _lockedCaptureOrientation = orientation;
+    [self updateOrientation];
+  }
+
+  [result sendSuccess];
+}
+
+- (void)unlockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result {
+  _lockedCaptureOrientation = UIDeviceOrientationUnknown;
+  [self updateOrientation];
+  [result sendSuccess];
+}
+
+- (void)setFlashModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
+  FLTFlashMode mode;
+  @try {
+    mode = FLTGetFLTFlashModeForString(modeStr);
+  } @catch (NSError *e) {
+    [result sendError:e];
+    return;
+  }
+  if (mode == FLTFlashModeTorch) {
+    if (!_captureDevice.hasTorch) {
+      [result sendErrorWithCode:@"setFlashModeFailed"
+                        message:@"Device does not support torch mode"
+                        details:nil];
+      return;
+    }
+    if (!_captureDevice.isTorchAvailable) {
+      [result sendErrorWithCode:@"setFlashModeFailed"
+                        message:@"Torch mode is currently not available"
+                        details:nil];
+      return;
+    }
+    if (_captureDevice.torchMode != AVCaptureTorchModeOn) {
+      [_captureDevice lockForConfiguration:nil];
+      [_captureDevice setTorchMode:AVCaptureTorchModeOn];
+      [_captureDevice unlockForConfiguration];
+    }
+  } else {
+    if (!_captureDevice.hasFlash) {
+      [result sendErrorWithCode:@"setFlashModeFailed"
+                        message:@"Device does not have flash capabilities"
+                        details:nil];
+      return;
+    }
+    AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(mode);
+    if (![_capturePhotoOutput.supportedFlashModes
+            containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) {
+      [result sendErrorWithCode:@"setFlashModeFailed"
+                        message:@"Device does not support this specific flash mode"
+                        details:nil];
+      return;
+    }
+    if (_captureDevice.torchMode != AVCaptureTorchModeOff) {
+      [_captureDevice lockForConfiguration:nil];
+      [_captureDevice setTorchMode:AVCaptureTorchModeOff];
+      [_captureDevice unlockForConfiguration];
+    }
+  }
+  _flashMode = mode;
+  [result sendSuccess];
+}
+
+- (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
+  FLTExposureMode mode;
+  @try {
+    mode = FLTGetFLTExposureModeForString(modeStr);
+  } @catch (NSError *e) {
+    [result sendError:e];
+    return;
+  }
+  _exposureMode = mode;
+  [self applyExposureMode];
+  [result sendSuccess];
+}
+
+- (void)applyExposureMode {
+  [_captureDevice lockForConfiguration:nil];
+  switch (_exposureMode) {
+    case FLTExposureModeLocked:
+      [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
+      break;
+    case FLTExposureModeAuto:
+      if ([_captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
+        [_captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
+      } else {
+        [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
+      }
+      break;
+  }
+  [_captureDevice unlockForConfiguration];
+}
+
+- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
+  FLTFocusMode mode;
+  @try {
+    mode = FLTGetFLTFocusModeForString(modeStr);
+  } @catch (NSError *e) {
+    [result sendError:e];
+    return;
+  }
+  _focusMode = mode;
+  [self applyFocusMode];
+  [result sendSuccess];
+}
+
+- (void)applyFocusMode {
+  [self applyFocusMode:_focusMode onDevice:_captureDevice];
+}
+
+- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice {
+  [captureDevice lockForConfiguration:nil];
+  switch (focusMode) {
+    case FLTFocusModeLocked:
+      if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
+        [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+      }
+      break;
+    case FLTFocusModeAuto:
+      if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
+        [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+      } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
+        [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+      }
+      break;
+  }
+  [captureDevice unlockForConfiguration];
+}
+
+- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
+  _isPreviewPaused = true;
+  [result sendSuccess];
+}
+
+- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
+  _isPreviewPaused = false;
+  [result sendSuccess];
+}
+
+- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
+                                            x:(double)x
+                                            y:(double)y {
+  double oldX = x, oldY = y;
+  switch (orientation) {
+    case UIDeviceOrientationPortrait:  // 90 ccw
+      y = 1 - oldX;
+      x = oldY;
+      break;
+    case UIDeviceOrientationPortraitUpsideDown:  // 90 cw
+      x = 1 - oldY;
+      y = oldX;
+      break;
+    case UIDeviceOrientationLandscapeRight:  // 180
+      x = 1 - x;
+      y = 1 - y;
+      break;
+    case UIDeviceOrientationLandscapeLeft:
+    default:
+      // No rotation required
+      break;
+  }
+  return CGPointMake(x, y);
+}
+
+- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
+  if (!_captureDevice.isExposurePointOfInterestSupported) {
+    [result sendErrorWithCode:@"setExposurePointFailed"
+                      message:@"Device does not have exposure point capabilities"
+                      details:nil];
+    return;
+  }
+  UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
+  [_captureDevice lockForConfiguration:nil];
+  [_captureDevice setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
+                                                                                    x:x
+                                                                                    y:y]];
+  [_captureDevice unlockForConfiguration];
+  // Retrigger auto exposure
+  [self applyExposureMode];
+  [result sendSuccess];
+}
+
+- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
+  if (!_captureDevice.isFocusPointOfInterestSupported) {
+    [result sendErrorWithCode:@"setFocusPointFailed"
+                      message:@"Device does not have focus point capabilities"
+                      details:nil];
+    return;
+  }
+  UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
+  [_captureDevice lockForConfiguration:nil];
+
+  [_captureDevice setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
+                                                                                 x:x
+                                                                                 y:y]];
+  [_captureDevice unlockForConfiguration];
+  // Retrigger auto focus
+  [self applyFocusMode];
+  [result sendSuccess];
+}
+
+- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset {
+  [_captureDevice lockForConfiguration:nil];
+  [_captureDevice setExposureTargetBias:offset completionHandler:nil];
+  [_captureDevice unlockForConfiguration];
+  [result sendSuccessWithData:@(offset)];
+}
+
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
+  if (!_isStreamingImages) {
+    FlutterEventChannel *eventChannel =
+        [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream"
+                                  binaryMessenger:messenger];
+    FLTThreadSafeEventChannel *threadSafeEventChannel =
+        [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
+
+    _imageStreamHandler =
+        [[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue];
+    [threadSafeEventChannel setStreamHandler:_imageStreamHandler
+                                  completion:^{
+                                    dispatch_async(self->_captureSessionQueue, ^{
+                                      self.isStreamingImages = YES;
+                                    });
+                                  }];
+  } else {
+    [_methodChannel invokeMethod:errorMethod
+                       arguments:@"Images from camera are already streaming!"];
+  }
+}
+
+- (void)stopImageStream {
+  if (_isStreamingImages) {
+    _isStreamingImages = NO;
+    _imageStreamHandler = nil;
+  } else {
+    [_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are not streaming!"];
+  }
+}
+
+- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
+  CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
+
+  [result sendSuccessWithData:[NSNumber numberWithFloat:maxZoomFactor]];
+}
+
+- (void)getMinZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
+  CGFloat minZoomFactor = [self getMinAvailableZoomFactor];
+  [result sendSuccessWithData:[NSNumber numberWithFloat:minZoomFactor]];
+}
+
+- (void)setZoomLevel:(CGFloat)zoom Result:(FLTThreadSafeFlutterResult *)result {
+  CGFloat maxAvailableZoomFactor = [self getMaxAvailableZoomFactor];
+  CGFloat minAvailableZoomFactor = [self getMinAvailableZoomFactor];
+
+  if (maxAvailableZoomFactor < zoom || minAvailableZoomFactor > zoom) {
+    NSString *errorMessage = [NSString
+        stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).",
+                         minAvailableZoomFactor, maxAvailableZoomFactor];
+
+    [result sendErrorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil];
+    return;
+  }
+
+  NSError *error = nil;
+  if (![_captureDevice lockForConfiguration:&error]) {
+    [result sendError:error];
+    return;
+  }
+  _captureDevice.videoZoomFactor = zoom;
+  [_captureDevice unlockForConfiguration];
+
+  [result sendSuccess];
+}
+
+- (CGFloat)getMinAvailableZoomFactor {
+  if (@available(iOS 11.0, *)) {
+    return _captureDevice.minAvailableVideoZoomFactor;
+  } else {
+    return 1.0;
+  }
+}
+
+- (CGFloat)getMaxAvailableZoomFactor {
+  if (@available(iOS 11.0, *)) {
+    return _captureDevice.maxAvailableVideoZoomFactor;
+  } else {
+    return _captureDevice.activeFormat.videoMaxZoomFactor;
+  }
+}
+
+- (BOOL)setupWriterForPath:(NSString *)path {
+  NSError *error = nil;
+  NSURL *outputURL;
+  if (path != nil) {
+    outputURL = [NSURL fileURLWithPath:path];
+  } else {
+    return NO;
+  }
+  if (_enableAudio && !_isAudioSetup) {
+    [self setUpCaptureSessionForAudio];
+  }
+
+  _videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL
+                                           fileType:AVFileTypeMPEG4
+                                              error:&error];
+  NSParameterAssert(_videoWriter);
+  if (error) {
+    [_methodChannel invokeMethod:errorMethod arguments:error.description];
+    return NO;
+  }
+
+  NSDictionary *videoSettings = [_captureVideoOutput
+      recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4];
+  _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
+                                                         outputSettings:videoSettings];
+
+  _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
+      assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
+                                 sourcePixelBufferAttributes:@{
+                                   (NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)
+                                 }];
+
+  NSParameterAssert(_videoWriterInput);
+
+  _videoWriterInput.expectsMediaDataInRealTime = YES;
+
+  // Add the audio input
+  if (_enableAudio) {
+    AudioChannelLayout acl;
+    bzero(&acl, sizeof(acl));
+    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
+    NSDictionary *audioOutputSettings = nil;
+    // Both type of audio inputs causes output video file to be corrupted.
+    audioOutputSettings = @{
+      AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC],
+      AVSampleRateKey : [NSNumber numberWithFloat:44100.0],
+      AVNumberOfChannelsKey : [NSNumber numberWithInt:1],
+      AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(acl)],
+    };
+    _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
+                                                           outputSettings:audioOutputSettings];
+    _audioWriterInput.expectsMediaDataInRealTime = YES;
+
+    [_videoWriter addInput:_audioWriterInput];
+    [_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
+  }
+
+  if (_flashMode == FLTFlashModeTorch) {
+    [self.captureDevice lockForConfiguration:nil];
+    [self.captureDevice setTorchMode:AVCaptureTorchModeOn];
+    [self.captureDevice unlockForConfiguration];
+  }
+
+  [_videoWriter addInput:_videoWriterInput];
+
+  [_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
+
+  return YES;
+}
+
+- (void)setUpCaptureSessionForAudio {
+  NSError *error = nil;
+  // Create a device input with the device and add it to the session.
+  // Setup the audio input.
+  AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
+  AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice
+                                                                           error:&error];
+  if (error) {
+    [_methodChannel invokeMethod:errorMethod arguments:error.description];
+  }
+  // Setup the audio output.
+  _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
+
+  if ([_captureSession canAddInput:audioInput]) {
+    [_captureSession addInput:audioInput];
+
+    if ([_captureSession canAddOutput:_audioOutput]) {
+      [_captureSession addOutput:_audioOutput];
+      _isAudioSetup = YES;
+    } else {
+      [_methodChannel invokeMethod:errorMethod
+                         arguments:@"Unable to add Audio input/output to session capture"];
+      _isAudioSetup = NO;
+    }
+  }
+}
+@end