Fix unguarded availability in Camera plugin (#2928)

* Fix unguarded availability in Camera plugin
diff --git a/.cirrus.yml b/.cirrus.yml
index 8120b79..d27ce68 100644
--- a/.cirrus.yml
+++ b/.cirrus.yml
@@ -157,7 +157,7 @@
         - find . -name "*.podspec" | xargs grep -l "osx" | xargs rm
         # Skip the dummy podspecs used to placate the tool.
         - find . -name "*_web*.podspec" -o -name "*_mac*.podspec" | xargs rm
-        - ./script/incremental_build.sh podspecs --no-analyze camera --ignore-warnings camera
+        - ./script/incremental_build.sh podspecs
     - name: build-ipas+drive-examples
       env:
         PATH: $PATH:/usr/local/bin
diff --git a/packages/camera/CHANGELOG.md b/packages/camera/CHANGELOG.md
index 88c0ed8..2cf34dd 100644
--- a/packages/camera/CHANGELOG.md
+++ b/packages/camera/CHANGELOG.md
@@ -1,10 +1,14 @@
+## 0.5.8+5
+
+* Fix compilation/availability issues on iOS.
+
 ## 0.5.8+4
 
 * Fixed bug caused by casting a `CameraAccessException` on Android.
 
 ## 0.5.8+3
 
-* Fix bug in usage example in README.md 
+* Fix bug in usage example in README.md
 
 ## 0.5.8+2
 
diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m
index 42cdb6d..525c128 100644
--- a/packages/camera/ios/Classes/CameraPlugin.m
+++ b/packages/camera/ios/Classes/CameraPlugin.m
@@ -19,11 +19,6 @@
 @property(readonly, nonatomic) FlutterResult result;
 @property(readonly, nonatomic) CMMotionManager *motionManager;
 @property(readonly, nonatomic) AVCaptureDevicePosition cameraPosition;
-
-- initWithPath:(NSString *)filename
-            result:(FlutterResult)result
-     motionManager:(CMMotionManager *)motionManager
-    cameraPosition:(AVCaptureDevicePosition)cameraPosition;
 @end
 
 @interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
@@ -68,7 +63,7 @@
                 previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
                         resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
                          bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
-                                   error:(NSError *)error {
+                                   error:(NSError *)error API_AVAILABLE(ios(10)) {
   selfReference = nil;
   if (error) {
     _result(getFlutterError(error));
@@ -160,14 +155,14 @@
                               AVCaptureAudioDataOutputSampleBufferDelegate,
                               FlutterStreamHandler>
 @property(readonly, nonatomic) int64_t textureId;
-@property(nonatomic, copy) void (^onFrameAvailable)();
+@property(nonatomic, copy) void (^onFrameAvailable)(void);
 @property BOOL enableAudio;
 @property(nonatomic) FlutterEventChannel *eventChannel;
 @property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
 @property(nonatomic) FlutterEventSink eventSink;
 @property(readonly, nonatomic) AVCaptureSession *captureSession;
 @property(readonly, nonatomic) AVCaptureDevice *captureDevice;
-@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput;
+@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
 @property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
 @property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
 @property(readonly) CVPixelBufferRef volatile latestPixelBuffer;
@@ -192,19 +187,6 @@
 @property(assign, nonatomic) CMTime audioTimeOffset;
 @property(nonatomic) CMMotionManager *motionManager;
 @property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
-- (instancetype)initWithCameraName:(NSString *)cameraName
-                  resolutionPreset:(NSString *)resolutionPreset
-                       enableAudio:(BOOL)enableAudio
-                     dispatchQueue:(dispatch_queue_t)dispatchQueue
-                             error:(NSError **)error;
-
-- (void)start;
-- (void)stop;
-- (void)startVideoRecordingAtPath:(NSString *)path result:(FlutterResult)result;
-- (void)stopVideoRecordingWithResult:(FlutterResult)result;
-- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger;
-- (void)stopImageStream;
-- (void)captureToFile:(NSString *)filename result:(FlutterResult)result;
 @end
 
 @implementation FLTCam {
@@ -254,9 +236,12 @@
   [_captureSession addInputWithNoConnections:_captureVideoInput];
   [_captureSession addOutputWithNoConnections:_captureVideoOutput];
   [_captureSession addConnection:connection];
-  _capturePhotoOutput = [AVCapturePhotoOutput new];
-  [_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
-  [_captureSession addOutput:_capturePhotoOutput];
+
+  if (@available(iOS 10.0, *)) {
+    _capturePhotoOutput = [AVCapturePhotoOutput new];
+    [_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
+    [_captureSession addOutput:_capturePhotoOutput];
+  }
   _motionManager = [[CMMotionManager alloc] init];
   [_motionManager startAccelerometerUpdates];
 
@@ -272,7 +257,7 @@
   [_captureSession stopRunning];
 }
 
-- (void)captureToFile:(NSString *)path result:(FlutterResult)result {
+- (void)captureToFile:(NSString *)path result:(FlutterResult)result API_AVAILABLE(ios(10)) {
   AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
   if (_resolutionPreset == max) {
     [settings setHighResolutionPhotoEnabled:YES];
@@ -288,6 +273,14 @@
 - (void)setCaptureSessionPreset:(ResolutionPreset)resolutionPreset {
   switch (resolutionPreset) {
     case max:
+    case ultraHigh:
+      if (@available(iOS 9.0, *)) {
+        if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
+          _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
+          _previewSize = CGSizeMake(3840, 2160);
+          break;
+        }
+      }
       if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
         _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
         _previewSize =
@@ -295,12 +288,6 @@
                        _captureDevice.activeFormat.highResolutionStillImageDimensions.height);
         break;
       }
-    case ultraHigh:
-      if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
-        _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
-        _previewSize = CGSizeMake(3840, 2160);
-        break;
-      }
     case veryHigh:
       if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
         _captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
@@ -495,7 +482,7 @@
   }
 }
 
-- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset {
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset CF_RETURNS_RETAINED {
   CMItemCount count;
   CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
   CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
@@ -801,33 +788,37 @@
 
 - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)result {
   if ([@"availableCameras" isEqualToString:call.method]) {
-    AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
-        discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
-                              mediaType:AVMediaTypeVideo
-                               position:AVCaptureDevicePositionUnspecified];
-    NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
-    NSMutableArray<NSDictionary<NSString *, NSObject *> *> *reply =
-        [[NSMutableArray alloc] initWithCapacity:devices.count];
-    for (AVCaptureDevice *device in devices) {
-      NSString *lensFacing;
-      switch ([device position]) {
-        case AVCaptureDevicePositionBack:
-          lensFacing = @"back";
-          break;
-        case AVCaptureDevicePositionFront:
-          lensFacing = @"front";
-          break;
-        case AVCaptureDevicePositionUnspecified:
-          lensFacing = @"external";
-          break;
+    if (@available(iOS 10.0, *)) {
+      AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+          discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
+                                mediaType:AVMediaTypeVideo
+                                 position:AVCaptureDevicePositionUnspecified];
+      NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
+      NSMutableArray<NSDictionary<NSString *, NSObject *> *> *reply =
+          [[NSMutableArray alloc] initWithCapacity:devices.count];
+      for (AVCaptureDevice *device in devices) {
+        NSString *lensFacing;
+        switch ([device position]) {
+          case AVCaptureDevicePositionBack:
+            lensFacing = @"back";
+            break;
+          case AVCaptureDevicePositionFront:
+            lensFacing = @"front";
+            break;
+          case AVCaptureDevicePositionUnspecified:
+            lensFacing = @"external";
+            break;
+        }
+        [reply addObject:@{
+          @"name" : [device uniqueID],
+          @"lensFacing" : lensFacing,
+          @"sensorOrientation" : @90,
+        }];
       }
-      [reply addObject:@{
-        @"name" : [device uniqueID],
-        @"lensFacing" : lensFacing,
-        @"sensorOrientation" : @90,
-      }];
+      result(reply);
+    } else {
+      result(FlutterMethodNotImplemented);
     }
-    result(reply);
   } else if ([@"initialize" isEqualToString:call.method]) {
     NSString *cameraName = call.arguments[@"cameraName"];
     NSString *resolutionPreset = call.arguments[@"resolutionPreset"];
@@ -846,8 +837,9 @@
       }
       int64_t textureId = [_registry registerTexture:cam];
       _camera = cam;
+      __weak CameraPlugin *weakSelf = self;
       cam.onFrameAvailable = ^{
-        [_registry textureFrameAvailable:textureId];
+        [weakSelf.registry textureFrameAvailable:textureId];
       };
       FlutterEventChannel *eventChannel = [FlutterEventChannel
           eventChannelWithName:[NSString
@@ -880,9 +872,12 @@
   } else {
     NSDictionary *argsMap = call.arguments;
     NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue;
-
     if ([@"takePicture" isEqualToString:call.method]) {
-      [_camera captureToFile:call.arguments[@"path"] result:result];
+      if (@available(iOS 10.0, *)) {
+        [_camera captureToFile:call.arguments[@"path"] result:result];
+      } else {
+        result(FlutterMethodNotImplemented);
+      }
     } else if ([@"dispose" isEqualToString:call.method]) {
       [_registry unregisterTexture:textureId];
       [_camera close];
diff --git a/packages/camera/ios/camera.podspec b/packages/camera/ios/camera.podspec
index dfe566c..960f102 100644
--- a/packages/camera/ios/camera.podspec
+++ b/packages/camera/ios/camera.podspec
@@ -4,19 +4,21 @@
 Pod::Spec.new do |s|
   s.name             = 'camera'
   s.version          = '0.0.1'
-  s.summary          = 'A new flutter plugin project.'
+  s.summary          = 'Flutter Camera'
   s.description      = <<-DESC
-A new flutter plugin project.
+A Flutter plugin to use the camera from your Flutter app.
                        DESC
-  s.homepage         = 'http://example.com'
-  s.license          = { :file => '../LICENSE' }
-  s.author           = { 'Your Company' => 'email@example.com' }
-  s.source           = { :path => '.' }
+  s.homepage         = 'https://github.com/flutter/plugins'
+  s.license          = { :type => 'BSD', :file => '../LICENSE' }
+  s.author           = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' }
+  s.source           = { :http => 'https://github.com/flutter/plugins/tree/master/packages/camera' }
+  s.documentation_url = 'https://pub.dev/packages/camera'
   s.source_files = 'Classes/**/*'
   s.public_header_files = 'Classes/**/*.h'
   s.dependency 'Flutter'
+
   s.platform = :ios, '8.0'
-  s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS' => 'armv7 arm64 x86_64' }
+  s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
 
   s.test_spec 'Tests' do |test_spec|
     test_spec.source_files = 'Tests/**/*'
diff --git a/packages/camera/pubspec.yaml b/packages/camera/pubspec.yaml
index a93c016..01c7907 100644
--- a/packages/camera/pubspec.yaml
+++ b/packages/camera/pubspec.yaml
@@ -2,7 +2,7 @@
 description: A Flutter plugin for getting information about and controlling the
   camera on Android and iOS. Supports previewing the camera feed, capturing images, capturing video,
   and streaming image buffers to dart.
-version: 0.5.8+4
+version: 0.5.8+5
 
 homepage: https://github.com/flutter/plugins/tree/master/packages/camera