[camera] Revert Android part of #3272 (#3405)

Reverts the Android part of https://github.com/flutter/packages/pull/3272 (commit d311478b6d3697bd957626b6c1515e8386533736), which introduced significant crash flake in the tests.
diff --git a/packages/camera/camera_android/CHANGELOG.md b/packages/camera/camera_android/CHANGELOG.md
index 53bf1ac..8b0b6c1 100644
--- a/packages/camera/camera_android/CHANGELOG.md
+++ b/packages/camera/camera_android/CHANGELOG.md
@@ -1,7 +1,3 @@
-## 0.10.5
-
-* Allows camera to be switched while video recording.
-
 ## 0.10.4+2
 
 * Aligns Dart and Flutter SDK constraints.
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
index afdc383..264ab72 100644
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
@@ -96,28 +96,13 @@
    * Holds all of the camera features/settings and will be used to update the request builder when
    * one changes.
    */
-  private CameraFeatures cameraFeatures;
-
-  private String imageFormatGroup;
-
-  /**
-   * Takes an input/output surface and orients the recording correctly. This is needed because
-   * switching cameras while recording causes the wrong orientation.
-   */
-  private VideoRenderer videoRenderer;
-
-  /**
-   * Whether or not the camera aligns with the initial way the camera was facing if the camera was
-   * flipped.
-   */
-  private int initialCameraFacing;
+  private final CameraFeatures cameraFeatures;
 
   private final SurfaceTextureEntry flutterTexture;
-  private final ResolutionPreset resolutionPreset;
   private final boolean enableAudio;
   private final Context applicationContext;
   private final DartMessenger dartMessenger;
-  private CameraProperties cameraProperties;
+  private final CameraProperties cameraProperties;
   private final CameraFeatureFactory cameraFeatureFactory;
   private final Activity activity;
   /** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */
@@ -207,7 +192,6 @@
     this.applicationContext = activity.getApplicationContext();
     this.cameraProperties = cameraProperties;
     this.cameraFeatureFactory = cameraFeatureFactory;
-    this.resolutionPreset = resolutionPreset;
     this.cameraFeatures =
         CameraFeatures.init(
             cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset);
@@ -248,7 +232,6 @@
     if (mediaRecorder != null) {
       mediaRecorder.release();
     }
-    closeRenderer();
 
     final PlatformChannel.DeviceOrientation lockedOrientation =
         cameraFeatures.getSensorOrientation().getLockedCaptureOrientation();
@@ -276,7 +259,6 @@
 
   @SuppressLint("MissingPermission")
   public void open(String imageFormatGroup) throws CameraAccessException {
-    this.imageFormatGroup = imageFormatGroup;
     final ResolutionFeature resolutionFeature = cameraFeatures.getResolution();
 
     if (!resolutionFeature.checkIsSupported()) {
@@ -321,16 +303,14 @@
             cameraDevice = new DefaultCameraDeviceWrapper(device);
             try {
               startPreview();
-              if (!recordingVideo) // only send initialization if we werent already recording and switching cameras
               dartMessenger.sendCameraInitializedEvent(
-                    resolutionFeature.getPreviewSize().getWidth(),
-                    resolutionFeature.getPreviewSize().getHeight(),
-                    cameraFeatures.getExposureLock().getValue(),
-                    cameraFeatures.getAutoFocus().getValue(),
-                    cameraFeatures.getExposurePoint().checkIsSupported(),
-                    cameraFeatures.getFocusPoint().checkIsSupported());
-
-            } catch (CameraAccessException | InterruptedException e) {
+                  resolutionFeature.getPreviewSize().getWidth(),
+                  resolutionFeature.getPreviewSize().getHeight(),
+                  cameraFeatures.getExposureLock().getValue(),
+                  cameraFeatures.getAutoFocus().getValue(),
+                  cameraFeatures.getExposurePoint().checkIsSupported(),
+                  cameraFeatures.getFocusPoint().checkIsSupported());
+            } catch (CameraAccessException e) {
               dartMessenger.sendCameraErrorEvent(e.getMessage());
               close();
             }
@@ -340,8 +320,7 @@
           public void onClosed(@NonNull CameraDevice camera) {
             Log.i(TAG, "open | onClosed");
 
-            // Prevents calls to methods that would otherwise result in IllegalStateException
-            // exceptions.
+            // Prevents calls to methods that would otherwise result in IllegalStateException exceptions.
             cameraDevice = null;
             closeCaptureSession();
             dartMessenger.sendCameraClosingEvent();
@@ -756,7 +735,7 @@
     if (imageStreamChannel != null) {
       setStreamHandler(imageStreamChannel);
     }
-    initialCameraFacing = cameraProperties.getLensFacing();
+
     recordingVideo = true;
     try {
       startCapture(true, imageStreamChannel != null);
@@ -768,13 +747,6 @@
     }
   }
 
-  private void closeRenderer() {
-    if (videoRenderer != null) {
-      videoRenderer.close();
-      videoRenderer = null;
-    }
-  }
-
   public void stopVideoRecording(@NonNull final Result result) {
     if (!recordingVideo) {
       result.success(null);
@@ -785,7 +757,6 @@
         cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false));
     recordingVideo = false;
     try {
-      closeRenderer();
       captureSession.abortCaptures();
       mediaRecorder.stop();
     } catch (CameraAccessException | IllegalStateException e) {
@@ -794,7 +765,7 @@
     mediaRecorder.reset();
     try {
       startPreview();
-    } catch (CameraAccessException | IllegalStateException | InterruptedException e) {
+    } catch (CameraAccessException | IllegalStateException e) {
       result.error("videoRecordingFailed", e.getMessage(), null);
       return;
     }
@@ -1078,50 +1049,13 @@
         null, (code, message) -> dartMessenger.sendCameraErrorEvent(message));
   }
 
-  public void startPreview() throws CameraAccessException, InterruptedException {
-    // If recording is already in progress, the camera is being flipped, so send it through the VideoRenderer to keep the correct orientation.
-    if (recordingVideo) {
-      startPreviewWithVideoRendererStream();
-    } else {
-      startRegularPreview();
-    }
-  }
-
-  private void startRegularPreview() throws CameraAccessException {
+  public void startPreview() throws CameraAccessException {
     if (pictureImageReader == null || pictureImageReader.getSurface() == null) return;
     Log.i(TAG, "startPreview");
+
     createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface());
   }
 
-  private void startPreviewWithVideoRendererStream()
-      throws CameraAccessException, InterruptedException {
-    if (videoRenderer == null) return;
-
-    // get rotation for rendered video
-    final PlatformChannel.DeviceOrientation lockedOrientation =
-        cameraFeatures.getSensorOrientation().getLockedCaptureOrientation();
-    DeviceOrientationManager orientationManager =
-        cameraFeatures.getSensorOrientation().getDeviceOrientationManager();
-
-    int rotation = 0;
-    if (orientationManager != null) {
-      rotation =
-          lockedOrientation == null
-              ? orientationManager.getVideoOrientation()
-              : orientationManager.getVideoOrientation(lockedOrientation);
-    }
-
-    if (cameraProperties.getLensFacing() != initialCameraFacing) {
-
-      // If the new camera is facing the opposite way than the initial recording,
-      // the rotation should be flipped 180 degrees.
-      rotation = (rotation + 180) % 360;
-    }
-    videoRenderer.setRotation(rotation);
-
-    createCaptureSession(CameraDevice.TEMPLATE_RECORD, videoRenderer.getInputSurface());
-  }
-
   public void startPreviewWithImageStream(EventChannel imageStreamChannel)
       throws CameraAccessException {
     setStreamHandler(imageStreamChannel);
@@ -1245,7 +1179,17 @@
   public void close() {
     Log.i(TAG, "close");
 
-    stopAndReleaseCamera();
+    if (cameraDevice != null) {
+      cameraDevice.close();
+      cameraDevice = null;
+
+      // Closing the CameraDevice without closing the CameraCaptureSession is recommended
+      // for quickly closing the camera:
+      // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close()
+      captureSession = null;
+    } else {
+      closeCaptureSession();
+    }
 
     if (pictureImageReader != null) {
       pictureImageReader.close();
@@ -1264,75 +1208,6 @@
     stopBackgroundThread();
   }
 
-  private void stopAndReleaseCamera() {
-    if (cameraDevice != null) {
-      cameraDevice.close();
-      cameraDevice = null;
-
-      // Closing the CameraDevice without closing the CameraCaptureSession is recommended
-      // for quickly closing the camera:
-      // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close()
-      captureSession = null;
-    } else {
-      closeCaptureSession();
-    }
-  }
-
-  private void prepareVideoRenderer() {
-    if (videoRenderer != null) return;
-    final ResolutionFeature resolutionFeature = cameraFeatures.getResolution();
-
-    // handle videoRenderer errors
-    Thread.UncaughtExceptionHandler videoRendererUncaughtExceptionHandler =
-        new Thread.UncaughtExceptionHandler() {
-          @Override
-          public void uncaughtException(Thread thread, Throwable ex) {
-            dartMessenger.sendCameraErrorEvent(
-                "Failed to process frames after camera was flipped.");
-          }
-        };
-
-    videoRenderer =
-        new VideoRenderer(
-            mediaRecorder.getSurface(),
-            resolutionFeature.getCaptureSize().getWidth(),
-            resolutionFeature.getCaptureSize().getHeight(),
-            videoRendererUncaughtExceptionHandler);
-  }
-
-  public void setDescriptionWhileRecording(
-      @NonNull final Result result, CameraProperties properties) {
-
-    if (!recordingVideo) {
-      result.error("setDescriptionWhileRecordingFailed", "Device was not recording", null);
-      return;
-    }
-
-    // See VideoRenderer.java requires API 26 to switch camera while recording
-    if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) {
-      result.error(
-          "setDescriptionWhileRecordingFailed",
-          "Device does not support switching the camera while recording",
-          null);
-      return;
-    }
-
-    stopAndReleaseCamera();
-    prepareVideoRenderer();
-    cameraProperties = properties;
-    cameraFeatures =
-        CameraFeatures.init(
-            cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset);
-    cameraFeatures.setAutoFocus(
-        cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));
-    try {
-      open(imageFormatGroup);
-    } catch (CameraAccessException e) {
-      result.error("setDescriptionWhileRecordingFailed", e.getMessage(), null);
-    }
-    result.success(null);
-  }
-
   public void dispose() {
     Log.i(TAG, "dispose");
 
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
index aad62bb..432344a 100644
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
@@ -354,18 +354,6 @@
           result.success(null);
           break;
         }
-      case "setDescriptionWhileRecording":
-        {
-          try {
-            String cameraName = call.argument("cameraName");
-            CameraProperties cameraProperties =
-                new CameraPropertiesImpl(cameraName, CameraUtils.getCameraManager(activity));
-            camera.setDescriptionWhileRecording(result, cameraProperties);
-          } catch (Exception e) {
-            handleException(e, result);
-          }
-          break;
-        }
       case "dispose":
         {
           if (camera != null) {
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java
deleted file mode 100644
index 62a7064..0000000
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java
+++ /dev/null
@@ -1,364 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package io.flutter.plugins.camera;
-
-import static android.os.SystemClock.uptimeMillis;
-
-import android.graphics.SurfaceTexture;
-import android.opengl.EGL14;
-import android.opengl.EGLConfig;
-import android.opengl.EGLContext;
-import android.opengl.EGLDisplay;
-import android.opengl.EGLExt;
-import android.opengl.EGLSurface;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
-import android.opengl.GLUtils;
-import android.opengl.Matrix;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.util.Log;
-import android.view.Surface;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-
-/**
- * Renders video onto texture after performing a matrix rotation on each frame.
- *
- * <p>VideoRenderer is needed because when switching between cameras mid recording, the orientation
- * of the recording from the new camera usually becomes flipped. MediaRecorder has
- * setOrientationHint, but that cannot be called mid recording and therefore isn't useful. Android
- * Camera2 has no setDisplayOrientation on the camera itself as it is supposed to 'just work' (see
- * https://stackoverflow.com/questions/33479004/what-is-the-camera2-api-equivalent-of-setdisplayorientation).
- * Therefore it cannot be used to set the camera's orientation either.
- *
- * <p>This leaves the solution to be routing the recording through a surface texture and performing
- * a matrix transformation on it manually to get the correct orientation. This only happens when
- * setDescription is called mid video recording.
- */
-public class VideoRenderer {
-
-  private static String TAG = "VideoRenderer";
-
-  private static final String vertexShaderCode =
-      "  precision highp float;\n"
-          + "            attribute vec3 vertexPosition;\n"
-          + "            attribute vec2 uvs;\n"
-          + "            varying vec2 varUvs;\n"
-          + "            uniform mat4 texMatrix;\n"
-          + "            uniform mat4 mvp;\n"
-          + "\n"
-          + "            void main()\n"
-          + "            {\n"
-          + "                varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;\n"
-          + "                gl_Position = mvp * vec4(vertexPosition, 1.0);\n"
-          + "            }";
-
-  private static final String fragmentShaderCode =
-      " #extension GL_OES_EGL_image_external : require\n"
-          + "            precision mediump float;\n"
-          + "\n"
-          + "            varying vec2 varUvs;\n"
-          + "            uniform samplerExternalOES texSampler;\n"
-          + "\n"
-          + "            void main()\n"
-          + "            {\n"
-          + "                vec4 c = texture2D(texSampler, varUvs);\n"
-          + "                gl_FragColor = vec4(c.r, c.g, c.b, c.a);\n"
-          + "            }";
-
-  private final int[] textureHandles = new int[1];
-
-  private final float[] vertices =
-      new float[] {
-        -1.0f, -1.0f, 0.0f, 0f, 0f, -1.0f, 1.0f, 0.0f, 0f, 1f, 1.0f, 1.0f, 0.0f, 1f, 1f, 1.0f,
-        -1.0f, 0.0f, 1f, 0f
-      };
-
-  private final int[] indices = new int[] {2, 1, 0, 0, 3, 2};
-
-  private int program;
-  private int vertexHandle = 0;
-  private final int[] bufferHandles = new int[2];
-  private int uvsHandle = 0;
-  private int texMatrixHandle = 0;
-  private int mvpHandle = 0;
-
-  EGLDisplay display;
-  EGLContext context;
-  EGLSurface surface;
-  private Thread thread;
-  private final Surface outputSurface;
-  private SurfaceTexture inputSurfaceTexture;
-  private Surface inputSurface;
-
-  private HandlerThread surfaceTextureFrameAvailableHandler;
-  private final Object surfaceTextureAvailableFrameLock = new Object();
-  private Boolean surfaceTextureFrameAvailable = false;
-
-  private final int recordingWidth;
-  private final int recordingHeight;
-  private int rotation = 0;
-
-  private final Object lock = new Object();
-
-  private final Thread.UncaughtExceptionHandler uncaughtExceptionHandler;
-
-  /** Gets surface for input. Blocks until surface is ready. */
-  public Surface getInputSurface() throws InterruptedException {
-    synchronized (lock) {
-      while (inputSurface == null) {
-        lock.wait();
-      }
-    }
-    return inputSurface;
-  }
-
-  public VideoRenderer(
-      Surface outputSurface,
-      int recordingWidth,
-      int recordingHeight,
-      Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
-    this.outputSurface = outputSurface;
-    this.recordingHeight = recordingHeight;
-    this.recordingWidth = recordingWidth;
-    this.uncaughtExceptionHandler = uncaughtExceptionHandler;
-    startOpenGL();
-    Log.d(TAG, "VideoRenderer setup complete");
-  }
-
-  /** Stop rendering and cleanup resources. */
-  public void close() {
-    thread.interrupt();
-    surfaceTextureFrameAvailableHandler.quitSafely();
-    cleanupOpenGL();
-    inputSurfaceTexture.release();
-  }
-
-  private void cleanupOpenGL() {
-    GLES20.glDeleteBuffers(2, bufferHandles, 0);
-    GLES20.glDeleteTextures(1, textureHandles, 0);
-    EGL14.eglDestroyContext(display, context);
-    EGL14.eglDestroySurface(display, surface);
-    GLES20.glDeleteProgram(program);
-  }
-
-  /** Configures openGL. Must be called in same thread as draw is called. */
-  private void configureOpenGL() {
-    synchronized (lock) {
-      display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
-      if (display == EGL14.EGL_NO_DISPLAY)
-        throw new RuntimeException(
-            "eglDisplay == EGL14.EGL_NO_DISPLAY: "
-                + GLUtils.getEGLErrorString(EGL14.eglGetError()));
-
-      int[] version = new int[2];
-      if (!EGL14.eglInitialize(display, version, 0, version, 1))
-        throw new RuntimeException(
-            "eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
-
-      String eglExtensions = EGL14.eglQueryString(display, EGL14.EGL_EXTENSIONS);
-      if (!eglExtensions.contains("EGL_ANDROID_presentation_time"))
-        throw new RuntimeException(
-            "cannot configure OpenGL. missing EGL_ANDROID_presentation_time");
-
-      int[] attribList =
-          new int[] {
-            EGL14.EGL_RED_SIZE, 8,
-            EGL14.EGL_GREEN_SIZE, 8,
-            EGL14.EGL_BLUE_SIZE, 8,
-            EGL14.EGL_ALPHA_SIZE, 8,
-            EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
-            EGLExt.EGL_RECORDABLE_ANDROID, 1,
-            EGL14.EGL_NONE
-          };
-
-      EGLConfig[] configs = new EGLConfig[1];
-      int[] numConfigs = new int[1];
-      if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, configs.length, numConfigs, 0))
-        throw new RuntimeException(GLUtils.getEGLErrorString(EGL14.eglGetError()));
-
-      int err = EGL14.eglGetError();
-      if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
-
-      int[] ctxAttribs = new int[] {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
-      context = EGL14.eglCreateContext(display, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0);
-
-      err = EGL14.eglGetError();
-      if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
-
-      int[] surfaceAttribs = new int[] {EGL14.EGL_NONE};
-
-      surface = EGL14.eglCreateWindowSurface(display, configs[0], outputSurface, surfaceAttribs, 0);
-
-      err = EGL14.eglGetError();
-      if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
-
-      if (!EGL14.eglMakeCurrent(display, surface, surface, context))
-        throw new RuntimeException(
-            "eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
-
-      ByteBuffer vertexBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
-      vertexBuffer.order(ByteOrder.nativeOrder());
-      vertexBuffer.asFloatBuffer().put(vertices);
-      vertexBuffer.asFloatBuffer().position(0);
-
-      ByteBuffer indexBuffer = ByteBuffer.allocateDirect(indices.length * 4);
-      indexBuffer.order(ByteOrder.nativeOrder());
-      indexBuffer.asIntBuffer().put(indices);
-      indexBuffer.position(0);
-
-      int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
-      int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
-
-      program = GLES20.glCreateProgram();
-
-      GLES20.glAttachShader(program, vertexShader);
-      GLES20.glAttachShader(program, fragmentShader);
-      GLES20.glLinkProgram(program);
-
-      deleteShader(vertexShader);
-      deleteShader(fragmentShader);
-
-      vertexHandle = GLES20.glGetAttribLocation(program, "vertexPosition");
-      uvsHandle = GLES20.glGetAttribLocation(program, "uvs");
-      texMatrixHandle = GLES20.glGetUniformLocation(program, "texMatrix");
-      mvpHandle = GLES20.glGetUniformLocation(program, "mvp");
-
-      // Initialize buffers
-      GLES20.glGenBuffers(2, bufferHandles, 0);
-
-      GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
-      GLES20.glBufferData(
-          GLES20.GL_ARRAY_BUFFER, vertices.length * 4, vertexBuffer, GLES20.GL_DYNAMIC_DRAW);
-
-      GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
-      GLES20.glBufferData(
-          GLES20.GL_ELEMENT_ARRAY_BUFFER, indices.length * 4, indexBuffer, GLES20.GL_DYNAMIC_DRAW);
-
-      // Init texture that will receive decoded frames
-      GLES20.glGenTextures(1, textureHandles, 0);
-      GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureHandles[0]);
-
-      inputSurfaceTexture = new SurfaceTexture(getTexId());
-      inputSurfaceTexture.setDefaultBufferSize(recordingWidth, recordingHeight);
-      surfaceTextureFrameAvailableHandler = new HandlerThread("FrameHandlerThread");
-      surfaceTextureFrameAvailableHandler.start();
-      inputSurface = new Surface(inputSurfaceTexture);
-
-      inputSurfaceTexture.setOnFrameAvailableListener(
-          new SurfaceTexture.OnFrameAvailableListener() {
-            @Override
-            public void onFrameAvailable(SurfaceTexture surfaceTexture) {
-              synchronized (surfaceTextureAvailableFrameLock) {
-                if (surfaceTextureFrameAvailable)
-                  Log.w(TAG, "Frame available before processing other frames. dropping frames");
-                surfaceTextureFrameAvailable = true;
-                surfaceTextureAvailableFrameLock.notifyAll();
-              }
-            }
-          },
-          new Handler(surfaceTextureFrameAvailableHandler.getLooper()));
-      lock.notifyAll();
-    }
-  }
-
-  /** Starts and configures Video Renderer. */
-  private void startOpenGL() {
-    Log.d(TAG, "Starting OpenGL Thread");
-    thread =
-        new Thread() {
-          @Override
-          public void run() {
-
-            configureOpenGL();
-
-            try {
-              // Continuously pull frames from input surface texture and use videoRenderer to modify
-              // to correct rotation.
-              while (!Thread.interrupted()) {
-
-                synchronized (surfaceTextureAvailableFrameLock) {
-                  while (!surfaceTextureFrameAvailable) {
-                    surfaceTextureAvailableFrameLock.wait(500);
-                  }
-                  surfaceTextureFrameAvailable = false;
-                }
-
-                inputSurfaceTexture.updateTexImage();
-
-                float[] surfaceTextureMatrix = new float[16];
-                inputSurfaceTexture.getTransformMatrix(surfaceTextureMatrix);
-
-                draw(recordingWidth, recordingHeight, surfaceTextureMatrix);
-              }
-            } catch (InterruptedException e) {
-              Log.d(TAG, "thread interrupted while waiting for frames");
-            }
-          }
-        };
-    thread.setUncaughtExceptionHandler(uncaughtExceptionHandler);
-    thread.start();
-  }
-
-  public int getTexId() {
-    return textureHandles[0];
-  }
-
-  public float[] moveMatrix() {
-    float[] m = new float[16];
-    Matrix.setIdentityM(m, 0);
-    Matrix.rotateM(m, 0, rotation, 0, 0, 1);
-    return m;
-  }
-
-  public void setRotation(int rotation) {
-    this.rotation = rotation;
-  }
-
-  private int loadShader(int type, String code) {
-
-    int shader = GLES20.glCreateShader(type);
-
-    GLES20.glShaderSource(shader, code);
-    GLES20.glCompileShader(shader);
-    return shader;
-  }
-
-  private void deleteShader(int shader) {
-    GLES20.glDeleteShader(shader);
-  }
-
-  public void draw(int viewportWidth, int viewportHeight, float[] texMatrix) {
-
-    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
-    GLES20.glClearColor(0f, 0f, 0f, 0f);
-
-    GLES20.glViewport(0, 0, viewportWidth, viewportHeight);
-
-    GLES20.glUseProgram(program);
-
-    // Pass transformations to shader
-    GLES20.glUniformMatrix4fv(texMatrixHandle, 1, false, texMatrix, 0);
-    GLES20.glUniformMatrix4fv(mvpHandle, 1, false, moveMatrix(), 0);
-
-    // Prepare buffers with vertices and indices & draw
-    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
-    GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
-
-    GLES20.glEnableVertexAttribArray(vertexHandle);
-    GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0);
-
-    GLES20.glEnableVertexAttribArray(uvsHandle);
-    GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4);
-
-    GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0);
-
-    EGLExt.eglPresentationTimeANDROID(display, surface, uptimeMillis() * 1000000);
-    if (!EGL14.eglSwapBuffers(display, surface)) {
-      Log.w(TAG, "eglSwapBuffers() " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
-    }
-  }
-}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
index 9de33e3..9a67901 100644
--- a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
@@ -603,123 +603,6 @@
   }
 
   @Test
-  public void setDescriptionWhileRecording() {
-    MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
-    MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
-    VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
-    TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
-    TestUtils.setPrivateField(camera, "recordingVideo", true);
-    TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
-
-    final CameraProperties newCameraProperties = mock(CameraProperties.class);
-    camera.setDescriptionWhileRecording(mockResult, newCameraProperties);
-
-    if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) {
-      verify(mockResult, times(1))
-          .error(
-              eq("setDescriptionWhileRecordingFailed"),
-              eq("Device does not support switching the camera while recording"),
-              eq(null));
-    } else {
-      verify(mockResult, times(1)).success(null);
-      verify(mockResult, never()).error(any(), any(), any());
-    }
-  }
-
-  @Test
-  public void startPreview_shouldPullStreamFromVideoRenderer()
-      throws InterruptedException, CameraAccessException {
-    VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
-    ArrayList<CaptureRequest.Builder> mockRequestBuilders = new ArrayList<>();
-    mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
-    SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
-    Size mockSize = mock(Size.class);
-    TestUtils.setPrivateField(camera, "recordingVideo", true);
-    TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
-    CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
-    TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
-
-    TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
-        (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
-    ResolutionFeature resolutionFeature =
-        (ResolutionFeature)
-            TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
-
-    when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
-    when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
-
-    camera.startPreview();
-    verify(mockVideoRenderer, times(1))
-        .getInputSurface(); // stream pulled from videoRenderer's surface.
-  }
-
-  @Test
-  public void startPreview_shouldPullStreamFromImageReader()
-      throws InterruptedException, CameraAccessException {
-    ArrayList<CaptureRequest.Builder> mockRequestBuilders = new ArrayList<>();
-    mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
-    SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
-    Size mockSize = mock(Size.class);
-    ImageReader mockImageReader = mock(ImageReader.class);
-    TestUtils.setPrivateField(camera, "recordingVideo", false);
-    TestUtils.setPrivateField(camera, "pictureImageReader", mockImageReader);
-    CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
-    TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
-
-    TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
-        (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
-    ResolutionFeature resolutionFeature =
-        (ResolutionFeature)
-            TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
-
-    when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
-    when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
-
-    camera.startPreview();
-    verify(mockImageReader, times(1))
-        .getSurface(); // stream pulled from regular imageReader's surface.
-  }
-
-  @Test
-  public void startPreview_shouldFlipRotation() throws InterruptedException, CameraAccessException {
-    VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
-    ArrayList<CaptureRequest.Builder> mockRequestBuilders = new ArrayList<>();
-    mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
-    SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
-    Size mockSize = mock(Size.class);
-    TestUtils.setPrivateField(camera, "recordingVideo", true);
-    TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
-    TestUtils.setPrivateField(camera, "initialCameraFacing", CameraMetadata.LENS_FACING_BACK);
-    CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
-    TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
-
-    TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
-        (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
-    ResolutionFeature resolutionFeature =
-        (ResolutionFeature)
-            TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
-
-    when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
-    when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
-    when(mockCameraProperties.getLensFacing()).thenReturn(CameraMetadata.LENS_FACING_FRONT);
-
-    camera.startPreview();
-    verify(mockVideoRenderer, times(1)).setRotation(180);
-  }
-
-  @Test
-  public void setDescriptionWhileRecording_shouldErrorWhenNotRecording() {
-    MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
-    TestUtils.setPrivateField(camera, "recordingVideo", false);
-    final CameraProperties newCameraProperties = mock(CameraProperties.class);
-    camera.setDescriptionWhileRecording(mockResult, newCameraProperties);
-
-    verify(mockResult, times(1))
-        .error("setDescriptionWhileRecordingFailed", "Device was not recording", null);
-    verify(mockResult, never()).success(any());
-  }
-
-  @Test
   public void
       resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThanN() {
     TestUtils.setPrivateField(camera, "recordingVideo", true);
diff --git a/packages/camera/camera_android/example/integration_test/camera_test.dart b/packages/camera/camera_android/example/integration_test/camera_test.dart
index 8d66307..e499872 100644
--- a/packages/camera/camera_android/example/integration_test/camera_test.dart
+++ b/packages/camera/camera_android/example/integration_test/camera_test.dart
@@ -9,7 +9,6 @@
 import 'package:camera_example/camera_controller.dart';
 import 'package:camera_platform_interface/camera_platform_interface.dart';
 import 'package:flutter/painting.dart';
-import 'package:flutter/services.dart';
 import 'package:flutter_test/flutter_test.dart';
 import 'package:integration_test/integration_test.dart';
 import 'package:path_provider/path_provider.dart';
@@ -206,66 +205,6 @@
     expect(duration, lessThan(recordingTime - timePaused));
   });
 
-  testWidgets('Set description while recording', (WidgetTester tester) async {
-    final List<CameraDescription> cameras =
-        await CameraPlatform.instance.availableCameras();
-    if (cameras.length < 2) {
-      return;
-    }
-
-    final CameraController controller = CameraController(
-      cameras[0],
-      ResolutionPreset.low,
-      enableAudio: false,
-    );
-
-    await controller.initialize();
-    await controller.prepareForVideoRecording();
-
-    await controller.startVideoRecording();
-
-    // SDK < 26 will throw a platform error when trying to switch and keep the same camera
-    // we accept either outcome here, while the native unit tests check the outcome based on the current Android SDK
-    bool failed = false;
-    try {
-      await controller.setDescription(cameras[1]);
-    } catch (err) {
-      expect(err, isA<PlatformException>());
-      expect(
-          (err as PlatformException).message,
-          equals(
-              'Device does not support switching the camera while recording'));
-      failed = true;
-    }
-
-    if (failed) {
-      // cameras did not switch
-      expect(controller.description, cameras[0]);
-    } else {
-      // cameras switched
-      expect(controller.description, cameras[1]);
-    }
-  });
-
-  testWidgets('Set description', (WidgetTester tester) async {
-    final List<CameraDescription> cameras =
-        await CameraPlatform.instance.availableCameras();
-    if (cameras.length < 2) {
-      return;
-    }
-
-    final CameraController controller = CameraController(
-      cameras[0],
-      ResolutionPreset.low,
-      enableAudio: false,
-    );
-
-    await controller.initialize();
-    await controller.setDescription(cameras[1]);
-
-    expect(controller.description, cameras[1]);
-  });
-
   testWidgets(
     'image streaming',
     (WidgetTester tester) async {
diff --git a/packages/camera/camera_android/example/lib/camera_controller.dart b/packages/camera/camera_android/example/lib/camera_controller.dart
index fd4f09a..8139dcd 100644
--- a/packages/camera/camera_android/example/lib/camera_controller.dart
+++ b/packages/camera/camera_android/example/lib/camera_controller.dart
@@ -24,7 +24,6 @@
     required this.exposureMode,
     required this.focusMode,
     required this.deviceOrientation,
-    required this.description,
     this.lockedCaptureOrientation,
     this.recordingOrientation,
     this.isPreviewPaused = false,
@@ -32,7 +31,7 @@
   });
 
   /// Creates a new camera controller state for an uninitialized controller.
-  const CameraValue.uninitialized(CameraDescription description)
+  const CameraValue.uninitialized()
       : this(
           isInitialized: false,
           isRecordingVideo: false,
@@ -44,7 +43,6 @@
           focusMode: FocusMode.auto,
           deviceOrientation: DeviceOrientation.portraitUp,
           isPreviewPaused: false,
-          description: description,
         );
 
   /// True after [CameraController.initialize] has completed successfully.
@@ -94,9 +92,6 @@
   /// The orientation of the currently running video recording.
   final DeviceOrientation? recordingOrientation;
 
-  /// The properties of the camera device controlled by this controller.
-  final CameraDescription description;
-
   /// Creates a modified copy of the object.
   ///
   /// Explicitly specified fields get the specified value, all other fields get
@@ -117,7 +112,6 @@
     Optional<DeviceOrientation>? lockedCaptureOrientation,
     Optional<DeviceOrientation>? recordingOrientation,
     bool? isPreviewPaused,
-    CameraDescription? description,
     Optional<DeviceOrientation>? previewPauseOrientation,
   }) {
     return CameraValue(
@@ -138,7 +132,6 @@
           ? this.recordingOrientation
           : recordingOrientation.orNull,
       isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
-      description: description ?? this.description,
       previewPauseOrientation: previewPauseOrientation == null
           ? this.previewPauseOrientation
           : previewPauseOrientation.orNull,
@@ -172,14 +165,14 @@
 class CameraController extends ValueNotifier<CameraValue> {
   /// Creates a new camera controller in an uninitialized state.
   CameraController(
-    CameraDescription cameraDescription,
+    this.description,
     this.resolutionPreset, {
     this.enableAudio = true,
     this.imageFormatGroup,
-  }) : super(CameraValue.uninitialized(cameraDescription));
+  }) : super(const CameraValue.uninitialized());
 
   /// The properties of the camera device controlled by this controller.
-  CameraDescription get description => value.description;
+  final CameraDescription description;
 
   /// The resolution this controller is targeting.
   ///
@@ -209,9 +202,7 @@
   int get cameraId => _cameraId;
 
   /// Initializes the camera on the device.
-  Future<void> initialize() => _initializeWithDescription(description);
-
-  Future<void> _initializeWithDescription(CameraDescription description) async {
+  Future<void> initialize() async {
     final Completer<CameraInitializedEvent> initializeCompleter =
         Completer<CameraInitializedEvent>();
 
@@ -243,7 +234,6 @@
 
     value = value.copyWith(
       isInitialized: true,
-      description: description,
       previewSize: await initializeCompleter.future
           .then((CameraInitializedEvent event) => Size(
                 event.previewWidth,
@@ -284,16 +274,6 @@
         previewPauseOrientation: const Optional<DeviceOrientation>.absent());
   }
 
-  /// Sets the description of the camera.
-  Future<void> setDescription(CameraDescription description) async {
-    if (value.isRecordingVideo) {
-      await CameraPlatform.instance.setDescriptionWhileRecording(description);
-      value = value.copyWith(description: description);
-    } else {
-      await _initializeWithDescription(description);
-    }
-  }
-
   /// Captures an image and returns the file where it was saved.
   ///
   /// Throws a [CameraException] if the capture fails.
diff --git a/packages/camera/camera_android/example/lib/main.dart b/packages/camera/camera_android/example/lib/main.dart
index dd02be3..79c2138 100644
--- a/packages/camera/camera_android/example/lib/main.dart
+++ b/packages/camera/camera_android/example/lib/main.dart
@@ -123,7 +123,7 @@
     if (state == AppLifecycleState.inactive) {
       cameraController.dispose();
     } else if (state == AppLifecycleState.resumed) {
-      _initializeCameraController(cameraController.description);
+      onNewCameraSelected(cameraController.description);
     }
   }
 
@@ -603,7 +603,10 @@
               title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
               groupValue: controller?.description,
               value: cameraDescription,
-              onChanged: onChanged,
+              onChanged:
+                  controller != null && controller!.value.isRecordingVideo
+                      ? null
+                      : onChanged,
             ),
           ),
         );
@@ -636,15 +639,17 @@
   }
 
   Future<void> onNewCameraSelected(CameraDescription cameraDescription) async {
-    if (controller != null) {
-      return controller!.setDescription(cameraDescription);
-    } else {
-      return _initializeCameraController(cameraDescription);
+    final CameraController? oldController = controller;
+    if (oldController != null) {
+      // `controller` needs to be set to null before getting disposed,
+      // to avoid a race condition when we use the controller that is being
+      // disposed. This happens when camera permission dialog shows up,
+      // which triggers `didChangeAppLifecycleState`, which disposes and
+      // re-creates the controller.
+      controller = null;
+      await oldController.dispose();
     }
-  }
 
-  Future<void> _initializeCameraController(
-      CameraDescription cameraDescription) async {
     final CameraController cameraController = CameraController(
       cameraDescription,
       kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
diff --git a/packages/camera/camera_android/example/pubspec.yaml b/packages/camera/camera_android/example/pubspec.yaml
index 8218273..aacd47f 100644
--- a/packages/camera/camera_android/example/pubspec.yaml
+++ b/packages/camera/camera_android/example/pubspec.yaml
@@ -14,7 +14,7 @@
     # The example app is bundled with the plugin so we use a path dependency on
     # the parent directory to use the current plugin's version.
     path: ../
-  camera_platform_interface: ^2.4.0
+  camera_platform_interface: ^2.3.1
   flutter:
     sdk: flutter
   path_provider: ^2.0.0
@@ -32,4 +32,3 @@
 
 flutter:
   uses-material-design: true
-
diff --git a/packages/camera/camera_android/lib/src/android_camera.dart b/packages/camera/camera_android/lib/src/android_camera.dart
index eca1003..9ab9b57 100644
--- a/packages/camera/camera_android/lib/src/android_camera.dart
+++ b/packages/camera/camera_android/lib/src/android_camera.dart
@@ -506,17 +506,6 @@
   }
 
   @override
-  Future<void> setDescriptionWhileRecording(
-      CameraDescription description) async {
-    await _channel.invokeMethod<double>(
-      'setDescriptionWhileRecording',
-      <String, dynamic>{
-        'cameraName': description.name,
-      },
-    );
-  }
-
-  @override
   Widget buildPreview(int cameraId) {
     return Texture(textureId: cameraId);
   }
diff --git a/packages/camera/camera_android/pubspec.yaml b/packages/camera/camera_android/pubspec.yaml
index 04890f2..5aab2cd 100644
--- a/packages/camera/camera_android/pubspec.yaml
+++ b/packages/camera/camera_android/pubspec.yaml
@@ -2,7 +2,7 @@
 description: Android implementation of the camera plugin.
 repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_android
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.10.5
+version: 0.10.4+2
 
 environment:
   sdk: ">=2.17.0 <3.0.0"
@@ -18,7 +18,7 @@
         dartPluginClass: AndroidCamera
 
 dependencies:
-  camera_platform_interface: ^2.4.0
+  camera_platform_interface: ^2.3.1
   flutter:
     sdk: flutter
   flutter_plugin_android_lifecycle: ^2.0.2
diff --git a/packages/camera/camera_android/test/android_camera_test.dart b/packages/camera/camera_android/test/android_camera_test.dart
index b56aa4e..d80bd9c 100644
--- a/packages/camera/camera_android/test/android_camera_test.dart
+++ b/packages/camera/camera_android/test/android_camera_test.dart
@@ -700,29 +700,6 @@
       ]);
     });
 
-    test('Should set the description while recording', () async {
-      // Arrange
-      final MethodChannelMock channel = MethodChannelMock(
-        channelName: _channelName,
-        methods: <String, dynamic>{'setDescriptionWhileRecording': null},
-      );
-      const CameraDescription camera2Description = CameraDescription(
-          name: 'Test2',
-          lensDirection: CameraLensDirection.front,
-          sensorOrientation: 0);
-
-      // Act
-      await camera.setDescriptionWhileRecording(camera2Description);
-
-      // Assert
-      expect(channel.log, <Matcher>[
-        isMethodCall('setDescriptionWhileRecording',
-            arguments: <String, Object?>{
-              'cameraName': camera2Description.name,
-            }),
-      ]);
-    });
-
     test('Should set the flash mode', () async {
       // Arrange
       final MethodChannelMock channel = MethodChannelMock(