From d1ae26ef4502edd65309a78187d78b7d8f5c204f Mon Sep 17 00:00:00 2001 From: Stuart Morgan Date: Tue, 7 Mar 2023 14:44:52 -0500 Subject: [PATCH 1/2] Revert "[camera] Reland implementations of flip/change camera while recording (#3272)" This reverts commit d311478b6d3697bd957626b6c1515e8386533736. --- packages/camera/camera_android/CHANGELOG.md | 4 - .../io/flutter/plugins/camera/Camera.java | 175 ++------- .../plugins/camera/MethodCallHandlerImpl.java | 12 - .../flutter/plugins/camera/VideoRenderer.java | 364 ------------------ .../io/flutter/plugins/camera/CameraTest.java | 117 ------ .../example/integration_test/camera_test.dart | 61 --- .../example/lib/camera_controller.dart | 30 +- .../camera_android/example/lib/main.dart | 23 +- .../camera_android/example/pubspec.yaml | 3 +- .../lib/src/android_camera.dart | 11 - packages/camera/camera_android/pubspec.yaml | 4 +- .../test/android_camera_test.dart | 23 -- .../camera/camera_avfoundation/CHANGELOG.md | 3 +- .../example/integration_test/camera_test.dart | 41 -- .../ios/Runner.xcodeproj/project.pbxproj | 9 +- .../example/ios/Runner/Info.plist | 2 - .../example/ios/RunnerTests/CameraTestUtils.m | 13 +- .../example/lib/camera_controller.dart | 30 +- .../camera_avfoundation/example/lib/main.dart | 23 +- .../camera_avfoundation/example/pubspec.yaml | 2 +- .../ios/Classes/CameraPlugin.m | 2 - .../camera_avfoundation/ios/Classes/FLTCam.h | 2 - .../camera_avfoundation/ios/Classes/FLTCam.m | 175 +++------ .../ios/Classes/FLTCam_Test.h | 3 +- .../lib/src/avfoundation_camera.dart | 11 - .../camera/camera_avfoundation/pubspec.yaml | 4 +- .../test/avfoundation_camera_test.dart | 23 -- 27 files changed, 128 insertions(+), 1042 deletions(-) delete mode 100644 packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java diff --git a/packages/camera/camera_android/CHANGELOG.md b/packages/camera/camera_android/CHANGELOG.md index 53bf1ac1e94..8b0b6c19f28 100644 --- a/packages/camera/camera_android/CHANGELOG.md +++ b/packages/camera/camera_android/CHANGELOG.md @@ -1,7 +1,3 @@ -## 0.10.5 - -* Allows camera to be switched while video recording. - ## 0.10.4+2 * Aligns Dart and Flutter SDK constraints. diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java index afdc3831f03..264ab72f524 100644 --- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java +++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java @@ -96,28 +96,13 @@ class Camera * Holds all of the camera features/settings and will be used to update the request builder when * one changes. */ - private CameraFeatures cameraFeatures; - - private String imageFormatGroup; - - /** - * Takes an input/output surface and orients the recording correctly. This is needed because - * switching cameras while recording causes the wrong orientation. - */ - private VideoRenderer videoRenderer; - - /** - * Whether or not the camera aligns with the initial way the camera was facing if the camera was - * flipped. - */ - private int initialCameraFacing; + private final CameraFeatures cameraFeatures; private final SurfaceTextureEntry flutterTexture; - private final ResolutionPreset resolutionPreset; private final boolean enableAudio; private final Context applicationContext; private final DartMessenger dartMessenger; - private CameraProperties cameraProperties; + private final CameraProperties cameraProperties; private final CameraFeatureFactory cameraFeatureFactory; private final Activity activity; /** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */ @@ -207,7 +192,6 @@ public Camera( this.applicationContext = activity.getApplicationContext(); this.cameraProperties = cameraProperties; this.cameraFeatureFactory = cameraFeatureFactory; - this.resolutionPreset = resolutionPreset; this.cameraFeatures = CameraFeatures.init( cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset); @@ -248,7 +232,6 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException { if (mediaRecorder != null) { mediaRecorder.release(); } - closeRenderer(); final PlatformChannel.DeviceOrientation lockedOrientation = cameraFeatures.getSensorOrientation().getLockedCaptureOrientation(); @@ -276,7 +259,6 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException { @SuppressLint("MissingPermission") public void open(String imageFormatGroup) throws CameraAccessException { - this.imageFormatGroup = imageFormatGroup; final ResolutionFeature resolutionFeature = cameraFeatures.getResolution(); if (!resolutionFeature.checkIsSupported()) { @@ -321,16 +303,14 @@ public void onOpened(@NonNull CameraDevice device) { cameraDevice = new DefaultCameraDeviceWrapper(device); try { startPreview(); - if (!recordingVideo) // only send initialization if we werent already recording and switching cameras dartMessenger.sendCameraInitializedEvent( - resolutionFeature.getPreviewSize().getWidth(), - resolutionFeature.getPreviewSize().getHeight(), - cameraFeatures.getExposureLock().getValue(), - cameraFeatures.getAutoFocus().getValue(), - cameraFeatures.getExposurePoint().checkIsSupported(), - cameraFeatures.getFocusPoint().checkIsSupported()); - - } catch (CameraAccessException | InterruptedException e) { + resolutionFeature.getPreviewSize().getWidth(), + resolutionFeature.getPreviewSize().getHeight(), + cameraFeatures.getExposureLock().getValue(), + cameraFeatures.getAutoFocus().getValue(), + cameraFeatures.getExposurePoint().checkIsSupported(), + cameraFeatures.getFocusPoint().checkIsSupported()); + } catch (CameraAccessException e) { dartMessenger.sendCameraErrorEvent(e.getMessage()); close(); } @@ -340,8 +320,7 @@ public void onOpened(@NonNull CameraDevice device) { public void onClosed(@NonNull CameraDevice camera) { Log.i(TAG, "open | onClosed"); - // Prevents calls to methods that would otherwise result in IllegalStateException - // exceptions. + // Prevents calls to methods that would otherwise result in IllegalStateException exceptions. cameraDevice = null; closeCaptureSession(); dartMessenger.sendCameraClosingEvent(); @@ -756,7 +735,7 @@ public void startVideoRecording( if (imageStreamChannel != null) { setStreamHandler(imageStreamChannel); } - initialCameraFacing = cameraProperties.getLensFacing(); + recordingVideo = true; try { startCapture(true, imageStreamChannel != null); @@ -768,13 +747,6 @@ public void startVideoRecording( } } - private void closeRenderer() { - if (videoRenderer != null) { - videoRenderer.close(); - videoRenderer = null; - } - } - public void stopVideoRecording(@NonNull final Result result) { if (!recordingVideo) { result.success(null); @@ -785,7 +757,6 @@ public void stopVideoRecording(@NonNull final Result result) { cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false)); recordingVideo = false; try { - closeRenderer(); captureSession.abortCaptures(); mediaRecorder.stop(); } catch (CameraAccessException | IllegalStateException e) { @@ -794,7 +765,7 @@ public void stopVideoRecording(@NonNull final Result result) { mediaRecorder.reset(); try { startPreview(); - } catch (CameraAccessException | IllegalStateException | InterruptedException e) { + } catch (CameraAccessException | IllegalStateException e) { result.error("videoRecordingFailed", e.getMessage(), null); return; } @@ -1078,48 +1049,11 @@ public void resumePreview() { null, (code, message) -> dartMessenger.sendCameraErrorEvent(message)); } - public void startPreview() throws CameraAccessException, InterruptedException { - // If recording is already in progress, the camera is being flipped, so send it through the VideoRenderer to keep the correct orientation. - if (recordingVideo) { - startPreviewWithVideoRendererStream(); - } else { - startRegularPreview(); - } - } - - private void startRegularPreview() throws CameraAccessException { + public void startPreview() throws CameraAccessException { if (pictureImageReader == null || pictureImageReader.getSurface() == null) return; Log.i(TAG, "startPreview"); - createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface()); - } - - private void startPreviewWithVideoRendererStream() - throws CameraAccessException, InterruptedException { - if (videoRenderer == null) return; - - // get rotation for rendered video - final PlatformChannel.DeviceOrientation lockedOrientation = - cameraFeatures.getSensorOrientation().getLockedCaptureOrientation(); - DeviceOrientationManager orientationManager = - cameraFeatures.getSensorOrientation().getDeviceOrientationManager(); - - int rotation = 0; - if (orientationManager != null) { - rotation = - lockedOrientation == null - ? orientationManager.getVideoOrientation() - : orientationManager.getVideoOrientation(lockedOrientation); - } - - if (cameraProperties.getLensFacing() != initialCameraFacing) { - - // If the new camera is facing the opposite way than the initial recording, - // the rotation should be flipped 180 degrees. - rotation = (rotation + 180) % 360; - } - videoRenderer.setRotation(rotation); - createCaptureSession(CameraDevice.TEMPLATE_RECORD, videoRenderer.getInputSurface()); + createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface()); } public void startPreviewWithImageStream(EventChannel imageStreamChannel) @@ -1245,7 +1179,17 @@ private void closeCaptureSession() { public void close() { Log.i(TAG, "close"); - stopAndReleaseCamera(); + if (cameraDevice != null) { + cameraDevice.close(); + cameraDevice = null; + + // Closing the CameraDevice without closing the CameraCaptureSession is recommended + // for quickly closing the camera: + // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close() + captureSession = null; + } else { + closeCaptureSession(); + } if (pictureImageReader != null) { pictureImageReader.close(); @@ -1264,75 +1208,6 @@ public void close() { stopBackgroundThread(); } - private void stopAndReleaseCamera() { - if (cameraDevice != null) { - cameraDevice.close(); - cameraDevice = null; - - // Closing the CameraDevice without closing the CameraCaptureSession is recommended - // for quickly closing the camera: - // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close() - captureSession = null; - } else { - closeCaptureSession(); - } - } - - private void prepareVideoRenderer() { - if (videoRenderer != null) return; - final ResolutionFeature resolutionFeature = cameraFeatures.getResolution(); - - // handle videoRenderer errors - Thread.UncaughtExceptionHandler videoRendererUncaughtExceptionHandler = - new Thread.UncaughtExceptionHandler() { - @Override - public void uncaughtException(Thread thread, Throwable ex) { - dartMessenger.sendCameraErrorEvent( - "Failed to process frames after camera was flipped."); - } - }; - - videoRenderer = - new VideoRenderer( - mediaRecorder.getSurface(), - resolutionFeature.getCaptureSize().getWidth(), - resolutionFeature.getCaptureSize().getHeight(), - videoRendererUncaughtExceptionHandler); - } - - public void setDescriptionWhileRecording( - @NonNull final Result result, CameraProperties properties) { - - if (!recordingVideo) { - result.error("setDescriptionWhileRecordingFailed", "Device was not recording", null); - return; - } - - // See VideoRenderer.java requires API 26 to switch camera while recording - if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { - result.error( - "setDescriptionWhileRecordingFailed", - "Device does not support switching the camera while recording", - null); - return; - } - - stopAndReleaseCamera(); - prepareVideoRenderer(); - cameraProperties = properties; - cameraFeatures = - CameraFeatures.init( - cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset); - cameraFeatures.setAutoFocus( - cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true)); - try { - open(imageFormatGroup); - } catch (CameraAccessException e) { - result.error("setDescriptionWhileRecordingFailed", e.getMessage(), null); - } - result.success(null); - } - public void dispose() { Log.i(TAG, "dispose"); diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java index aad62bbaba8..432344ade8c 100644 --- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java +++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java @@ -354,18 +354,6 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result) result.success(null); break; } - case "setDescriptionWhileRecording": - { - try { - String cameraName = call.argument("cameraName"); - CameraProperties cameraProperties = - new CameraPropertiesImpl(cameraName, CameraUtils.getCameraManager(activity)); - camera.setDescriptionWhileRecording(result, cameraProperties); - } catch (Exception e) { - handleException(e, result); - } - break; - } case "dispose": { if (camera != null) { diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java deleted file mode 100644 index 62a70640961..00000000000 --- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java +++ /dev/null @@ -1,364 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -package io.flutter.plugins.camera; - -import static android.os.SystemClock.uptimeMillis; - -import android.graphics.SurfaceTexture; -import android.opengl.EGL14; -import android.opengl.EGLConfig; -import android.opengl.EGLContext; -import android.opengl.EGLDisplay; -import android.opengl.EGLExt; -import android.opengl.EGLSurface; -import android.opengl.GLES11Ext; -import android.opengl.GLES20; -import android.opengl.GLUtils; -import android.opengl.Matrix; -import android.os.Handler; -import android.os.HandlerThread; -import android.util.Log; -import android.view.Surface; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; - -/** - * Renders video onto texture after performing a matrix rotation on each frame. - * - *

VideoRenderer is needed because when switching between cameras mid recording, the orientation - * of the recording from the new camera usually becomes flipped. MediaRecorder has - * setOrientationHint, but that cannot be called mid recording and therefore isn't useful. Android - * Camera2 has no setDisplayOrientation on the camera itself as it is supposed to 'just work' (see - * https://stackoverflow.com/questions/33479004/what-is-the-camera2-api-equivalent-of-setdisplayorientation). - * Therefore it cannot be used to set the camera's orientation either. - * - *

This leaves the solution to be routing the recording through a surface texture and performing - * a matrix transformation on it manually to get the correct orientation. This only happens when - * setDescription is called mid video recording. - */ -public class VideoRenderer { - - private static String TAG = "VideoRenderer"; - - private static final String vertexShaderCode = - " precision highp float;\n" - + " attribute vec3 vertexPosition;\n" - + " attribute vec2 uvs;\n" - + " varying vec2 varUvs;\n" - + " uniform mat4 texMatrix;\n" - + " uniform mat4 mvp;\n" - + "\n" - + " void main()\n" - + " {\n" - + " varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;\n" - + " gl_Position = mvp * vec4(vertexPosition, 1.0);\n" - + " }"; - - private static final String fragmentShaderCode = - " #extension GL_OES_EGL_image_external : require\n" - + " precision mediump float;\n" - + "\n" - + " varying vec2 varUvs;\n" - + " uniform samplerExternalOES texSampler;\n" - + "\n" - + " void main()\n" - + " {\n" - + " vec4 c = texture2D(texSampler, varUvs);\n" - + " gl_FragColor = vec4(c.r, c.g, c.b, c.a);\n" - + " }"; - - private final int[] textureHandles = new int[1]; - - private final float[] vertices = - new float[] { - -1.0f, -1.0f, 0.0f, 0f, 0f, -1.0f, 1.0f, 0.0f, 0f, 1f, 1.0f, 1.0f, 0.0f, 1f, 1f, 1.0f, - -1.0f, 0.0f, 1f, 0f - }; - - private final int[] indices = new int[] {2, 1, 0, 0, 3, 2}; - - private int program; - private int vertexHandle = 0; - private final int[] bufferHandles = new int[2]; - private int uvsHandle = 0; - private int texMatrixHandle = 0; - private int mvpHandle = 0; - - EGLDisplay display; - EGLContext context; - EGLSurface surface; - private Thread thread; - private final Surface outputSurface; - private SurfaceTexture inputSurfaceTexture; - private Surface inputSurface; - - private HandlerThread surfaceTextureFrameAvailableHandler; - private final Object surfaceTextureAvailableFrameLock = new Object(); - private Boolean surfaceTextureFrameAvailable = false; - - private final int recordingWidth; - private final int recordingHeight; - private int rotation = 0; - - private final Object lock = new Object(); - - private final Thread.UncaughtExceptionHandler uncaughtExceptionHandler; - - /** Gets surface for input. Blocks until surface is ready. */ - public Surface getInputSurface() throws InterruptedException { - synchronized (lock) { - while (inputSurface == null) { - lock.wait(); - } - } - return inputSurface; - } - - public VideoRenderer( - Surface outputSurface, - int recordingWidth, - int recordingHeight, - Thread.UncaughtExceptionHandler uncaughtExceptionHandler) { - this.outputSurface = outputSurface; - this.recordingHeight = recordingHeight; - this.recordingWidth = recordingWidth; - this.uncaughtExceptionHandler = uncaughtExceptionHandler; - startOpenGL(); - Log.d(TAG, "VideoRenderer setup complete"); - } - - /** Stop rendering and cleanup resources. */ - public void close() { - thread.interrupt(); - surfaceTextureFrameAvailableHandler.quitSafely(); - cleanupOpenGL(); - inputSurfaceTexture.release(); - } - - private void cleanupOpenGL() { - GLES20.glDeleteBuffers(2, bufferHandles, 0); - GLES20.glDeleteTextures(1, textureHandles, 0); - EGL14.eglDestroyContext(display, context); - EGL14.eglDestroySurface(display, surface); - GLES20.glDeleteProgram(program); - } - - /** Configures openGL. Must be called in same thread as draw is called. */ - private void configureOpenGL() { - synchronized (lock) { - display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); - if (display == EGL14.EGL_NO_DISPLAY) - throw new RuntimeException( - "eglDisplay == EGL14.EGL_NO_DISPLAY: " - + GLUtils.getEGLErrorString(EGL14.eglGetError())); - - int[] version = new int[2]; - if (!EGL14.eglInitialize(display, version, 0, version, 1)) - throw new RuntimeException( - "eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError())); - - String eglExtensions = EGL14.eglQueryString(display, EGL14.EGL_EXTENSIONS); - if (!eglExtensions.contains("EGL_ANDROID_presentation_time")) - throw new RuntimeException( - "cannot configure OpenGL. missing EGL_ANDROID_presentation_time"); - - int[] attribList = - new int[] { - EGL14.EGL_RED_SIZE, 8, - EGL14.EGL_GREEN_SIZE, 8, - EGL14.EGL_BLUE_SIZE, 8, - EGL14.EGL_ALPHA_SIZE, 8, - EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, - EGLExt.EGL_RECORDABLE_ANDROID, 1, - EGL14.EGL_NONE - }; - - EGLConfig[] configs = new EGLConfig[1]; - int[] numConfigs = new int[1]; - if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, configs.length, numConfigs, 0)) - throw new RuntimeException(GLUtils.getEGLErrorString(EGL14.eglGetError())); - - int err = EGL14.eglGetError(); - if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err)); - - int[] ctxAttribs = new int[] {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE}; - context = EGL14.eglCreateContext(display, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0); - - err = EGL14.eglGetError(); - if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err)); - - int[] surfaceAttribs = new int[] {EGL14.EGL_NONE}; - - surface = EGL14.eglCreateWindowSurface(display, configs[0], outputSurface, surfaceAttribs, 0); - - err = EGL14.eglGetError(); - if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err)); - - if (!EGL14.eglMakeCurrent(display, surface, surface, context)) - throw new RuntimeException( - "eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError())); - - ByteBuffer vertexBuffer = ByteBuffer.allocateDirect(vertices.length * 4); - vertexBuffer.order(ByteOrder.nativeOrder()); - vertexBuffer.asFloatBuffer().put(vertices); - vertexBuffer.asFloatBuffer().position(0); - - ByteBuffer indexBuffer = ByteBuffer.allocateDirect(indices.length * 4); - indexBuffer.order(ByteOrder.nativeOrder()); - indexBuffer.asIntBuffer().put(indices); - indexBuffer.position(0); - - int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode); - int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode); - - program = GLES20.glCreateProgram(); - - GLES20.glAttachShader(program, vertexShader); - GLES20.glAttachShader(program, fragmentShader); - GLES20.glLinkProgram(program); - - deleteShader(vertexShader); - deleteShader(fragmentShader); - - vertexHandle = GLES20.glGetAttribLocation(program, "vertexPosition"); - uvsHandle = GLES20.glGetAttribLocation(program, "uvs"); - texMatrixHandle = GLES20.glGetUniformLocation(program, "texMatrix"); - mvpHandle = GLES20.glGetUniformLocation(program, "mvp"); - - // Initialize buffers - GLES20.glGenBuffers(2, bufferHandles, 0); - - GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]); - GLES20.glBufferData( - GLES20.GL_ARRAY_BUFFER, vertices.length * 4, vertexBuffer, GLES20.GL_DYNAMIC_DRAW); - - GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]); - GLES20.glBufferData( - GLES20.GL_ELEMENT_ARRAY_BUFFER, indices.length * 4, indexBuffer, GLES20.GL_DYNAMIC_DRAW); - - // Init texture that will receive decoded frames - GLES20.glGenTextures(1, textureHandles, 0); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureHandles[0]); - - inputSurfaceTexture = new SurfaceTexture(getTexId()); - inputSurfaceTexture.setDefaultBufferSize(recordingWidth, recordingHeight); - surfaceTextureFrameAvailableHandler = new HandlerThread("FrameHandlerThread"); - surfaceTextureFrameAvailableHandler.start(); - inputSurface = new Surface(inputSurfaceTexture); - - inputSurfaceTexture.setOnFrameAvailableListener( - new SurfaceTexture.OnFrameAvailableListener() { - @Override - public void onFrameAvailable(SurfaceTexture surfaceTexture) { - synchronized (surfaceTextureAvailableFrameLock) { - if (surfaceTextureFrameAvailable) - Log.w(TAG, "Frame available before processing other frames. dropping frames"); - surfaceTextureFrameAvailable = true; - surfaceTextureAvailableFrameLock.notifyAll(); - } - } - }, - new Handler(surfaceTextureFrameAvailableHandler.getLooper())); - lock.notifyAll(); - } - } - - /** Starts and configures Video Renderer. */ - private void startOpenGL() { - Log.d(TAG, "Starting OpenGL Thread"); - thread = - new Thread() { - @Override - public void run() { - - configureOpenGL(); - - try { - // Continuously pull frames from input surface texture and use videoRenderer to modify - // to correct rotation. - while (!Thread.interrupted()) { - - synchronized (surfaceTextureAvailableFrameLock) { - while (!surfaceTextureFrameAvailable) { - surfaceTextureAvailableFrameLock.wait(500); - } - surfaceTextureFrameAvailable = false; - } - - inputSurfaceTexture.updateTexImage(); - - float[] surfaceTextureMatrix = new float[16]; - inputSurfaceTexture.getTransformMatrix(surfaceTextureMatrix); - - draw(recordingWidth, recordingHeight, surfaceTextureMatrix); - } - } catch (InterruptedException e) { - Log.d(TAG, "thread interrupted while waiting for frames"); - } - } - }; - thread.setUncaughtExceptionHandler(uncaughtExceptionHandler); - thread.start(); - } - - public int getTexId() { - return textureHandles[0]; - } - - public float[] moveMatrix() { - float[] m = new float[16]; - Matrix.setIdentityM(m, 0); - Matrix.rotateM(m, 0, rotation, 0, 0, 1); - return m; - } - - public void setRotation(int rotation) { - this.rotation = rotation; - } - - private int loadShader(int type, String code) { - - int shader = GLES20.glCreateShader(type); - - GLES20.glShaderSource(shader, code); - GLES20.glCompileShader(shader); - return shader; - } - - private void deleteShader(int shader) { - GLES20.glDeleteShader(shader); - } - - public void draw(int viewportWidth, int viewportHeight, float[] texMatrix) { - - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); - GLES20.glClearColor(0f, 0f, 0f, 0f); - - GLES20.glViewport(0, 0, viewportWidth, viewportHeight); - - GLES20.glUseProgram(program); - - // Pass transformations to shader - GLES20.glUniformMatrix4fv(texMatrixHandle, 1, false, texMatrix, 0); - GLES20.glUniformMatrix4fv(mvpHandle, 1, false, moveMatrix(), 0); - - // Prepare buffers with vertices and indices & draw - GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]); - GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]); - - GLES20.glEnableVertexAttribArray(vertexHandle); - GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0); - - GLES20.glEnableVertexAttribArray(uvsHandle); - GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4); - - GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0); - - EGLExt.eglPresentationTimeANDROID(display, surface, uptimeMillis() * 1000000); - if (!EGL14.eglSwapBuffers(display, surface)) { - Log.w(TAG, "eglSwapBuffers() " + GLUtils.getEGLErrorString(EGL14.eglGetError())); - } - } -} diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java index 9de33e3dc7a..9a679017ded 100644 --- a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java +++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java @@ -602,123 +602,6 @@ public void resumeVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() { verify(mockResult, never()).error(any(), any(), any()); } - @Test - public void setDescriptionWhileRecording() { - MethodChannel.Result mockResult = mock(MethodChannel.Result.class); - MediaRecorder mockMediaRecorder = mock(MediaRecorder.class); - VideoRenderer mockVideoRenderer = mock(VideoRenderer.class); - TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder); - TestUtils.setPrivateField(camera, "recordingVideo", true); - TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer); - - final CameraProperties newCameraProperties = mock(CameraProperties.class); - camera.setDescriptionWhileRecording(mockResult, newCameraProperties); - - if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { - verify(mockResult, times(1)) - .error( - eq("setDescriptionWhileRecordingFailed"), - eq("Device does not support switching the camera while recording"), - eq(null)); - } else { - verify(mockResult, times(1)).success(null); - verify(mockResult, never()).error(any(), any(), any()); - } - } - - @Test - public void startPreview_shouldPullStreamFromVideoRenderer() - throws InterruptedException, CameraAccessException { - VideoRenderer mockVideoRenderer = mock(VideoRenderer.class); - ArrayList mockRequestBuilders = new ArrayList<>(); - mockRequestBuilders.add(mock(CaptureRequest.Builder.class)); - SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class); - Size mockSize = mock(Size.class); - TestUtils.setPrivateField(camera, "recordingVideo", true); - TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer); - CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders); - TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera); - - TextureRegistry.SurfaceTextureEntry cameraFlutterTexture = - (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture"); - ResolutionFeature resolutionFeature = - (ResolutionFeature) - TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature"); - - when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture); - when(resolutionFeature.getPreviewSize()).thenReturn(mockSize); - - camera.startPreview(); - verify(mockVideoRenderer, times(1)) - .getInputSurface(); // stream pulled from videoRenderer's surface. - } - - @Test - public void startPreview_shouldPullStreamFromImageReader() - throws InterruptedException, CameraAccessException { - ArrayList mockRequestBuilders = new ArrayList<>(); - mockRequestBuilders.add(mock(CaptureRequest.Builder.class)); - SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class); - Size mockSize = mock(Size.class); - ImageReader mockImageReader = mock(ImageReader.class); - TestUtils.setPrivateField(camera, "recordingVideo", false); - TestUtils.setPrivateField(camera, "pictureImageReader", mockImageReader); - CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders); - TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera); - - TextureRegistry.SurfaceTextureEntry cameraFlutterTexture = - (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture"); - ResolutionFeature resolutionFeature = - (ResolutionFeature) - TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature"); - - when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture); - when(resolutionFeature.getPreviewSize()).thenReturn(mockSize); - - camera.startPreview(); - verify(mockImageReader, times(1)) - .getSurface(); // stream pulled from regular imageReader's surface. - } - - @Test - public void startPreview_shouldFlipRotation() throws InterruptedException, CameraAccessException { - VideoRenderer mockVideoRenderer = mock(VideoRenderer.class); - ArrayList mockRequestBuilders = new ArrayList<>(); - mockRequestBuilders.add(mock(CaptureRequest.Builder.class)); - SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class); - Size mockSize = mock(Size.class); - TestUtils.setPrivateField(camera, "recordingVideo", true); - TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer); - TestUtils.setPrivateField(camera, "initialCameraFacing", CameraMetadata.LENS_FACING_BACK); - CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders); - TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera); - - TextureRegistry.SurfaceTextureEntry cameraFlutterTexture = - (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture"); - ResolutionFeature resolutionFeature = - (ResolutionFeature) - TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature"); - - when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture); - when(resolutionFeature.getPreviewSize()).thenReturn(mockSize); - when(mockCameraProperties.getLensFacing()).thenReturn(CameraMetadata.LENS_FACING_FRONT); - - camera.startPreview(); - verify(mockVideoRenderer, times(1)).setRotation(180); - } - - @Test - public void setDescriptionWhileRecording_shouldErrorWhenNotRecording() { - MethodChannel.Result mockResult = mock(MethodChannel.Result.class); - TestUtils.setPrivateField(camera, "recordingVideo", false); - final CameraProperties newCameraProperties = mock(CameraProperties.class); - camera.setDescriptionWhileRecording(mockResult, newCameraProperties); - - verify(mockResult, times(1)) - .error("setDescriptionWhileRecordingFailed", "Device was not recording", null); - verify(mockResult, never()).success(any()); - } - @Test public void resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThanN() { diff --git a/packages/camera/camera_android/example/integration_test/camera_test.dart b/packages/camera/camera_android/example/integration_test/camera_test.dart index 8d663074df7..e499872da5f 100644 --- a/packages/camera/camera_android/example/integration_test/camera_test.dart +++ b/packages/camera/camera_android/example/integration_test/camera_test.dart @@ -9,7 +9,6 @@ import 'package:camera_android/camera_android.dart'; import 'package:camera_example/camera_controller.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/painting.dart'; -import 'package:flutter/services.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; import 'package:path_provider/path_provider.dart'; @@ -206,66 +205,6 @@ void main() { expect(duration, lessThan(recordingTime - timePaused)); }); - testWidgets('Set description while recording', (WidgetTester tester) async { - final List cameras = - await CameraPlatform.instance.availableCameras(); - if (cameras.length < 2) { - return; - } - - final CameraController controller = CameraController( - cameras[0], - ResolutionPreset.low, - enableAudio: false, - ); - - await controller.initialize(); - await controller.prepareForVideoRecording(); - - await controller.startVideoRecording(); - - // SDK < 26 will throw a platform error when trying to switch and keep the same camera - // we accept either outcome here, while the native unit tests check the outcome based on the current Android SDK - bool failed = false; - try { - await controller.setDescription(cameras[1]); - } catch (err) { - expect(err, isA()); - expect( - (err as PlatformException).message, - equals( - 'Device does not support switching the camera while recording')); - failed = true; - } - - if (failed) { - // cameras did not switch - expect(controller.description, cameras[0]); - } else { - // cameras switched - expect(controller.description, cameras[1]); - } - }); - - testWidgets('Set description', (WidgetTester tester) async { - final List cameras = - await CameraPlatform.instance.availableCameras(); - if (cameras.length < 2) { - return; - } - - final CameraController controller = CameraController( - cameras[0], - ResolutionPreset.low, - enableAudio: false, - ); - - await controller.initialize(); - await controller.setDescription(cameras[1]); - - expect(controller.description, cameras[1]); - }); - testWidgets( 'image streaming', (WidgetTester tester) async { diff --git a/packages/camera/camera_android/example/lib/camera_controller.dart b/packages/camera/camera_android/example/lib/camera_controller.dart index fd4f09a027b..8139dcdb022 100644 --- a/packages/camera/camera_android/example/lib/camera_controller.dart +++ b/packages/camera/camera_android/example/lib/camera_controller.dart @@ -24,7 +24,6 @@ class CameraValue { required this.exposureMode, required this.focusMode, required this.deviceOrientation, - required this.description, this.lockedCaptureOrientation, this.recordingOrientation, this.isPreviewPaused = false, @@ -32,7 +31,7 @@ class CameraValue { }); /// Creates a new camera controller state for an uninitialized controller. - const CameraValue.uninitialized(CameraDescription description) + const CameraValue.uninitialized() : this( isInitialized: false, isRecordingVideo: false, @@ -44,7 +43,6 @@ class CameraValue { focusMode: FocusMode.auto, deviceOrientation: DeviceOrientation.portraitUp, isPreviewPaused: false, - description: description, ); /// True after [CameraController.initialize] has completed successfully. @@ -94,9 +92,6 @@ class CameraValue { /// The orientation of the currently running video recording. final DeviceOrientation? recordingOrientation; - /// The properties of the camera device controlled by this controller. - final CameraDescription description; - /// Creates a modified copy of the object. /// /// Explicitly specified fields get the specified value, all other fields get @@ -117,7 +112,6 @@ class CameraValue { Optional? lockedCaptureOrientation, Optional? recordingOrientation, bool? isPreviewPaused, - CameraDescription? description, Optional? previewPauseOrientation, }) { return CameraValue( @@ -138,7 +132,6 @@ class CameraValue { ? this.recordingOrientation : recordingOrientation.orNull, isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused, - description: description ?? this.description, previewPauseOrientation: previewPauseOrientation == null ? this.previewPauseOrientation : previewPauseOrientation.orNull, @@ -172,14 +165,14 @@ class CameraValue { class CameraController extends ValueNotifier { /// Creates a new camera controller in an uninitialized state. CameraController( - CameraDescription cameraDescription, + this.description, this.resolutionPreset, { this.enableAudio = true, this.imageFormatGroup, - }) : super(CameraValue.uninitialized(cameraDescription)); + }) : super(const CameraValue.uninitialized()); /// The properties of the camera device controlled by this controller. - CameraDescription get description => value.description; + final CameraDescription description; /// The resolution this controller is targeting. /// @@ -209,9 +202,7 @@ class CameraController extends ValueNotifier { int get cameraId => _cameraId; /// Initializes the camera on the device. - Future initialize() => _initializeWithDescription(description); - - Future _initializeWithDescription(CameraDescription description) async { + Future initialize() async { final Completer initializeCompleter = Completer(); @@ -243,7 +234,6 @@ class CameraController extends ValueNotifier { value = value.copyWith( isInitialized: true, - description: description, previewSize: await initializeCompleter.future .then((CameraInitializedEvent event) => Size( event.previewWidth, @@ -284,16 +274,6 @@ class CameraController extends ValueNotifier { previewPauseOrientation: const Optional.absent()); } - /// Sets the description of the camera. - Future setDescription(CameraDescription description) async { - if (value.isRecordingVideo) { - await CameraPlatform.instance.setDescriptionWhileRecording(description); - value = value.copyWith(description: description); - } else { - await _initializeWithDescription(description); - } - } - /// Captures an image and returns the file where it was saved. /// /// Throws a [CameraException] if the capture fails. diff --git a/packages/camera/camera_android/example/lib/main.dart b/packages/camera/camera_android/example/lib/main.dart index dd02be3d7ae..79c21386439 100644 --- a/packages/camera/camera_android/example/lib/main.dart +++ b/packages/camera/camera_android/example/lib/main.dart @@ -123,7 +123,7 @@ class _CameraExampleHomeState extends State if (state == AppLifecycleState.inactive) { cameraController.dispose(); } else if (state == AppLifecycleState.resumed) { - _initializeCameraController(cameraController.description); + onNewCameraSelected(cameraController.description); } } @@ -603,7 +603,10 @@ class _CameraExampleHomeState extends State title: Icon(getCameraLensIcon(cameraDescription.lensDirection)), groupValue: controller?.description, value: cameraDescription, - onChanged: onChanged, + onChanged: + controller != null && controller!.value.isRecordingVideo + ? null + : onChanged, ), ), ); @@ -636,15 +639,17 @@ class _CameraExampleHomeState extends State } Future onNewCameraSelected(CameraDescription cameraDescription) async { - if (controller != null) { - return controller!.setDescription(cameraDescription); - } else { - return _initializeCameraController(cameraDescription); + final CameraController? oldController = controller; + if (oldController != null) { + // `controller` needs to be set to null before getting disposed, + // to avoid a race condition when we use the controller that is being + // disposed. This happens when camera permission dialog shows up, + // which triggers `didChangeAppLifecycleState`, which disposes and + // re-creates the controller. + controller = null; + await oldController.dispose(); } - } - Future _initializeCameraController( - CameraDescription cameraDescription) async { final CameraController cameraController = CameraController( cameraDescription, kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium, diff --git a/packages/camera/camera_android/example/pubspec.yaml b/packages/camera/camera_android/example/pubspec.yaml index 8218273cdec..aacd47f6773 100644 --- a/packages/camera/camera_android/example/pubspec.yaml +++ b/packages/camera/camera_android/example/pubspec.yaml @@ -14,7 +14,7 @@ dependencies: # The example app is bundled with the plugin so we use a path dependency on # the parent directory to use the current plugin's version. path: ../ - camera_platform_interface: ^2.4.0 + camera_platform_interface: ^2.3.1 flutter: sdk: flutter path_provider: ^2.0.0 @@ -32,4 +32,3 @@ dev_dependencies: flutter: uses-material-design: true - diff --git a/packages/camera/camera_android/lib/src/android_camera.dart b/packages/camera/camera_android/lib/src/android_camera.dart index eca1003247c..9ab9b578616 100644 --- a/packages/camera/camera_android/lib/src/android_camera.dart +++ b/packages/camera/camera_android/lib/src/android_camera.dart @@ -505,17 +505,6 @@ class AndroidCamera extends CameraPlatform { ); } - @override - Future setDescriptionWhileRecording( - CameraDescription description) async { - await _channel.invokeMethod( - 'setDescriptionWhileRecording', - { - 'cameraName': description.name, - }, - ); - } - @override Widget buildPreview(int cameraId) { return Texture(textureId: cameraId); diff --git a/packages/camera/camera_android/pubspec.yaml b/packages/camera/camera_android/pubspec.yaml index 04890f27064..5aab2cda9eb 100644 --- a/packages/camera/camera_android/pubspec.yaml +++ b/packages/camera/camera_android/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_android description: Android implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_android issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.10.5 +version: 0.10.4+2 environment: sdk: ">=2.17.0 <3.0.0" @@ -18,7 +18,7 @@ flutter: dartPluginClass: AndroidCamera dependencies: - camera_platform_interface: ^2.4.0 + camera_platform_interface: ^2.3.1 flutter: sdk: flutter flutter_plugin_android_lifecycle: ^2.0.2 diff --git a/packages/camera/camera_android/test/android_camera_test.dart b/packages/camera/camera_android/test/android_camera_test.dart index b56aa4e352a..d80bd9cac7a 100644 --- a/packages/camera/camera_android/test/android_camera_test.dart +++ b/packages/camera/camera_android/test/android_camera_test.dart @@ -700,29 +700,6 @@ void main() { ]); }); - test('Should set the description while recording', () async { - // Arrange - final MethodChannelMock channel = MethodChannelMock( - channelName: _channelName, - methods: {'setDescriptionWhileRecording': null}, - ); - const CameraDescription camera2Description = CameraDescription( - name: 'Test2', - lensDirection: CameraLensDirection.front, - sensorOrientation: 0); - - // Act - await camera.setDescriptionWhileRecording(camera2Description); - - // Assert - expect(channel.log, [ - isMethodCall('setDescriptionWhileRecording', - arguments: { - 'cameraName': camera2Description.name, - }), - ]); - }); - test('Should set the flash mode', () async { // Arrange final MethodChannelMock channel = MethodChannelMock( diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index 10f4fd804ad..fe4d4e7ee68 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,6 +1,5 @@ -## 0.9.13 +## NEXT -* Allows camera to be switched while video recording. * Aligns Dart and Flutter SDK constraints. ## 0.9.12 diff --git a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart index 315a34e9f9b..34d460d44ec 100644 --- a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart +++ b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart @@ -198,47 +198,6 @@ void main() { expect(duration, lessThan(recordingTime - timePaused)); }); - testWidgets('Set description while recording', (WidgetTester tester) async { - final List cameras = - await CameraPlatform.instance.availableCameras(); - if (cameras.length < 2) { - return; - } - - final CameraController controller = CameraController( - cameras[0], - ResolutionPreset.low, - enableAudio: false, - ); - - await controller.initialize(); - await controller.prepareForVideoRecording(); - - await controller.startVideoRecording(); - await controller.setDescription(cameras[1]); - - expect(controller.description, cameras[1]); - }); - - testWidgets('Set description', (WidgetTester tester) async { - final List cameras = - await CameraPlatform.instance.availableCameras(); - if (cameras.length < 2) { - return; - } - - final CameraController controller = CameraController( - cameras[0], - ResolutionPreset.low, - enableAudio: false, - ); - - await controller.initialize(); - await controller.setDescription(cameras[1]); - - expect(controller.description, cameras[1]); - }); - /// Start streaming with specifying the ImageFormatGroup. Future startStreaming(List cameras, ImageFormatGroup? imageFormatGroup) async { diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index a20638d18e1..d70d27fd8c2 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -290,7 +290,6 @@ }; 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; - DevelopmentTeam = W4ZTW5E78A; }; }; }; @@ -641,7 +640,7 @@ baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = W4ZTW5E78A; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -653,7 +652,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = com.bradenbagby.test; + PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample; PRODUCT_NAME = "$(TARGET_NAME)"; }; name = Debug; @@ -663,7 +662,7 @@ baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = W4ZTW5E78A; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -675,7 +674,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = com.bradenbagby.test; + PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample; PRODUCT_NAME = "$(TARGET_NAME)"; }; name = Release; diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist index ca93baac701..bacd9e54f1e 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist +++ b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist @@ -2,8 +2,6 @@ - CADisableMinimumFrameDurationOnPhone - CFBundleDevelopmentRegion en CFBundleExecutable diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index b42aa34e2a1..0ae4887eb63 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -11,20 +11,15 @@ OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) .andReturn(inputMock); - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + id sessionMock = OCMClassMock([AVCaptureSession class]); + OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op + OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); return [[FLTCam alloc] initWithCameraName:@"camera" resolutionPreset:@"medium" enableAudio:true orientation:UIDeviceOrientationPortrait - videoCaptureSession:videoSessionMock - audioCaptureSession:audioSessionMock + captureSession:sessionMock captureSessionQueue:captureSessionQueue error:nil]; } diff --git a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart index 6e1804328d5..524186816aa 100644 --- a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart +++ b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart @@ -24,7 +24,6 @@ class CameraValue { required this.exposureMode, required this.focusMode, required this.deviceOrientation, - required this.description, this.lockedCaptureOrientation, this.recordingOrientation, this.isPreviewPaused = false, @@ -32,7 +31,7 @@ class CameraValue { }); /// Creates a new camera controller state for an uninitialized controller. - const CameraValue.uninitialized(CameraDescription description) + const CameraValue.uninitialized() : this( isInitialized: false, isRecordingVideo: false, @@ -44,7 +43,6 @@ class CameraValue { focusMode: FocusMode.auto, deviceOrientation: DeviceOrientation.portraitUp, isPreviewPaused: false, - description: description, ); /// True after [CameraController.initialize] has completed successfully. @@ -94,9 +92,6 @@ class CameraValue { /// The orientation of the currently running video recording. final DeviceOrientation? recordingOrientation; - /// The properties of the camera device controlled by this controller. - final CameraDescription description; - /// Creates a modified copy of the object. /// /// Explicitly specified fields get the specified value, all other fields get @@ -117,7 +112,6 @@ class CameraValue { Optional? lockedCaptureOrientation, Optional? recordingOrientation, bool? isPreviewPaused, - CameraDescription? description, Optional? previewPauseOrientation, }) { return CameraValue( @@ -138,7 +132,6 @@ class CameraValue { ? this.recordingOrientation : recordingOrientation.orNull, isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused, - description: description ?? this.description, previewPauseOrientation: previewPauseOrientation == null ? this.previewPauseOrientation : previewPauseOrientation.orNull, @@ -172,14 +165,14 @@ class CameraValue { class CameraController extends ValueNotifier { /// Creates a new camera controller in an uninitialized state. CameraController( - CameraDescription cameraDescription, + this.description, this.resolutionPreset, { this.enableAudio = true, this.imageFormatGroup, - }) : super(CameraValue.uninitialized(cameraDescription)); + }) : super(const CameraValue.uninitialized()); /// The properties of the camera device controlled by this controller. - CameraDescription get description => value.description; + final CameraDescription description; /// The resolution this controller is targeting. /// @@ -209,9 +202,7 @@ class CameraController extends ValueNotifier { int get cameraId => _cameraId; /// Initializes the camera on the device. - Future initialize() => _initializeWithDescription(description); - - Future _initializeWithDescription(CameraDescription description) async { + Future initialize() async { final Completer initializeCompleter = Completer(); @@ -243,7 +234,6 @@ class CameraController extends ValueNotifier { value = value.copyWith( isInitialized: true, - description: description, previewSize: await initializeCompleter.future .then((CameraInitializedEvent event) => Size( event.previewWidth, @@ -284,16 +274,6 @@ class CameraController extends ValueNotifier { previewPauseOrientation: const Optional.absent()); } - /// Sets the description of the camera - Future setDescription(CameraDescription description) async { - if (value.isRecordingVideo) { - await CameraPlatform.instance.setDescriptionWhileRecording(description); - value = value.copyWith(description: description); - } else { - await _initializeWithDescription(description); - } - } - /// Captures an image and returns the file where it was saved. /// /// Throws a [CameraException] if the capture fails. diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart index dd02be3d7ae..79c21386439 100644 --- a/packages/camera/camera_avfoundation/example/lib/main.dart +++ b/packages/camera/camera_avfoundation/example/lib/main.dart @@ -123,7 +123,7 @@ class _CameraExampleHomeState extends State if (state == AppLifecycleState.inactive) { cameraController.dispose(); } else if (state == AppLifecycleState.resumed) { - _initializeCameraController(cameraController.description); + onNewCameraSelected(cameraController.description); } } @@ -603,7 +603,10 @@ class _CameraExampleHomeState extends State title: Icon(getCameraLensIcon(cameraDescription.lensDirection)), groupValue: controller?.description, value: cameraDescription, - onChanged: onChanged, + onChanged: + controller != null && controller!.value.isRecordingVideo + ? null + : onChanged, ), ), ); @@ -636,15 +639,17 @@ class _CameraExampleHomeState extends State } Future onNewCameraSelected(CameraDescription cameraDescription) async { - if (controller != null) { - return controller!.setDescription(cameraDescription); - } else { - return _initializeCameraController(cameraDescription); + final CameraController? oldController = controller; + if (oldController != null) { + // `controller` needs to be set to null before getting disposed, + // to avoid a race condition when we use the controller that is being + // disposed. This happens when camera permission dialog shows up, + // which triggers `didChangeAppLifecycleState`, which disposes and + // re-creates the controller. + controller = null; + await oldController.dispose(); } - } - Future _initializeCameraController( - CameraDescription cameraDescription) async { final CameraController cameraController = CameraController( cameraDescription, kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium, diff --git a/packages/camera/camera_avfoundation/example/pubspec.yaml b/packages/camera/camera_avfoundation/example/pubspec.yaml index 0c471ff4848..1e966b1c584 100644 --- a/packages/camera/camera_avfoundation/example/pubspec.yaml +++ b/packages/camera/camera_avfoundation/example/pubspec.yaml @@ -14,7 +14,7 @@ dependencies: # The example app is bundled with the plugin so we use a path dependency on # the parent directory to use the current plugin's version. path: ../ - camera_platform_interface: ^2.4.0 + camera_platform_interface: ^2.2.0 flutter: sdk: flutter path_provider: ^2.0.0 diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m index 874f37b6c4f..535887622c5 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m @@ -253,8 +253,6 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call [_camera pausePreviewWithResult:result]; } else if ([@"resumePreview" isEqualToString:call.method]) { [_camera resumePreviewWithResult:result]; - } else if ([@"setDescriptionWhileRecording" isEqualToString:call.method]) { - [_camera setDescriptionWhileRecording:(call.arguments[@"cameraName"]) result:result]; } else { [result sendNotImplemented]; } diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h index fbf4ef4882c..50a035494c7 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h @@ -95,8 +95,6 @@ NS_ASSUME_NONNULL_BEGIN - (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice; - (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result; - (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result; -- (void)setDescriptionWhileRecording:(NSString *)cameraName - result:(FLTThreadSafeFlutterResult *)result; - (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y; - (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y; - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset; diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m index 31bffc91794..2bfa3e5f1fa 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m @@ -43,8 +43,7 @@ @interface FLTCam () setDescriptionWhileRecording( - CameraDescription description) async { - await _channel.invokeMethod( - 'setDescriptionWhileRecording', - { - 'cameraName': description.name, - }, - ); - } - @override Widget buildPreview(int cameraId) { return Texture(textureId: cameraId); diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index a7bc2ab8b14..c7483d9e46b 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.13 +version: 0.9.12 environment: sdk: '>=2.18.0 <3.0.0' @@ -17,7 +17,7 @@ flutter: dartPluginClass: AVFoundationCamera dependencies: - camera_platform_interface: ^2.4.0 + camera_platform_interface: ^2.3.1 flutter: sdk: flutter stream_transform: ^2.0.0 diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart index e756f38ff12..5d0b74cf0c0 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart @@ -701,29 +701,6 @@ void main() { ]); }); - test('Should set the description while recording', () async { - // Arrange - final MethodChannelMock channel = MethodChannelMock( - channelName: _channelName, - methods: {'setDescriptionWhileRecording': null}, - ); - const CameraDescription camera2Description = CameraDescription( - name: 'Test2', - lensDirection: CameraLensDirection.front, - sensorOrientation: 0); - - // Act - await camera.setDescriptionWhileRecording(camera2Description); - - // Assert - expect(channel.log, [ - isMethodCall('setDescriptionWhileRecording', - arguments: { - 'cameraName': camera2Description.name, - }), - ]); - }); - test('Should set the flash mode', () async { // Arrange final MethodChannelMock channel = MethodChannelMock( From fee5e7230777cfac306318819f7c268025baa2ed Mon Sep 17 00:00:00 2001 From: Stuart Morgan Date: Tue, 7 Mar 2023 14:45:39 -0500 Subject: [PATCH 2/2] Un-revert camera_avfoundation --- .../camera/camera_avfoundation/CHANGELOG.md | 3 +- .../example/integration_test/camera_test.dart | 41 ++++ .../ios/Runner.xcodeproj/project.pbxproj | 9 +- .../example/ios/Runner/Info.plist | 2 + .../example/ios/RunnerTests/CameraTestUtils.m | 13 +- .../example/lib/camera_controller.dart | 30 ++- .../camera_avfoundation/example/lib/main.dart | 23 +-- .../camera_avfoundation/example/pubspec.yaml | 2 +- .../ios/Classes/CameraPlugin.m | 2 + .../camera_avfoundation/ios/Classes/FLTCam.h | 2 + .../camera_avfoundation/ios/Classes/FLTCam.m | 175 +++++++++++++----- .../ios/Classes/FLTCam_Test.h | 3 +- .../lib/src/avfoundation_camera.dart | 11 ++ .../camera/camera_avfoundation/pubspec.yaml | 4 +- .../test/avfoundation_camera_test.dart | 23 +++ 15 files changed, 262 insertions(+), 81 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index fe4d4e7ee68..10f4fd804ad 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,5 +1,6 @@ -## NEXT +## 0.9.13 +* Allows camera to be switched while video recording. * Aligns Dart and Flutter SDK constraints. ## 0.9.12 diff --git a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart index 34d460d44ec..315a34e9f9b 100644 --- a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart +++ b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart @@ -198,6 +198,47 @@ void main() { expect(duration, lessThan(recordingTime - timePaused)); }); + testWidgets('Set description while recording', (WidgetTester tester) async { + final List cameras = + await CameraPlatform.instance.availableCameras(); + if (cameras.length < 2) { + return; + } + + final CameraController controller = CameraController( + cameras[0], + ResolutionPreset.low, + enableAudio: false, + ); + + await controller.initialize(); + await controller.prepareForVideoRecording(); + + await controller.startVideoRecording(); + await controller.setDescription(cameras[1]); + + expect(controller.description, cameras[1]); + }); + + testWidgets('Set description', (WidgetTester tester) async { + final List cameras = + await CameraPlatform.instance.availableCameras(); + if (cameras.length < 2) { + return; + } + + final CameraController controller = CameraController( + cameras[0], + ResolutionPreset.low, + enableAudio: false, + ); + + await controller.initialize(); + await controller.setDescription(cameras[1]); + + expect(controller.description, cameras[1]); + }); + /// Start streaming with specifying the ImageFormatGroup. Future startStreaming(List cameras, ImageFormatGroup? imageFormatGroup) async { diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index d70d27fd8c2..a20638d18e1 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -290,6 +290,7 @@ }; 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; + DevelopmentTeam = W4ZTW5E78A; }; }; }; @@ -640,7 +641,7 @@ baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = W4ZTW5E78A; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -652,7 +653,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample; + PRODUCT_BUNDLE_IDENTIFIER = com.bradenbagby.test; PRODUCT_NAME = "$(TARGET_NAME)"; }; name = Debug; @@ -662,7 +663,7 @@ baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = W4ZTW5E78A; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -674,7 +675,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample; + PRODUCT_BUNDLE_IDENTIFIER = com.bradenbagby.test; PRODUCT_NAME = "$(TARGET_NAME)"; }; name = Release; diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist index bacd9e54f1e..ca93baac701 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist +++ b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist @@ -2,6 +2,8 @@ + CADisableMinimumFrameDurationOnPhone + CFBundleDevelopmentRegion en CFBundleExecutable diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 0ae4887eb63..b42aa34e2a1 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -11,15 +11,20 @@ OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) .andReturn(inputMock); - id sessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op - OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + id videoSessionMock = OCMClassMock([AVCaptureSession class]); + OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op + OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + + id audioSessionMock = OCMClassMock([AVCaptureSession class]); + OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op + OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); return [[FLTCam alloc] initWithCameraName:@"camera" resolutionPreset:@"medium" enableAudio:true orientation:UIDeviceOrientationPortrait - captureSession:sessionMock + videoCaptureSession:videoSessionMock + audioCaptureSession:audioSessionMock captureSessionQueue:captureSessionQueue error:nil]; } diff --git a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart index 524186816aa..6e1804328d5 100644 --- a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart +++ b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart @@ -24,6 +24,7 @@ class CameraValue { required this.exposureMode, required this.focusMode, required this.deviceOrientation, + required this.description, this.lockedCaptureOrientation, this.recordingOrientation, this.isPreviewPaused = false, @@ -31,7 +32,7 @@ class CameraValue { }); /// Creates a new camera controller state for an uninitialized controller. - const CameraValue.uninitialized() + const CameraValue.uninitialized(CameraDescription description) : this( isInitialized: false, isRecordingVideo: false, @@ -43,6 +44,7 @@ class CameraValue { focusMode: FocusMode.auto, deviceOrientation: DeviceOrientation.portraitUp, isPreviewPaused: false, + description: description, ); /// True after [CameraController.initialize] has completed successfully. @@ -92,6 +94,9 @@ class CameraValue { /// The orientation of the currently running video recording. final DeviceOrientation? recordingOrientation; + /// The properties of the camera device controlled by this controller. + final CameraDescription description; + /// Creates a modified copy of the object. /// /// Explicitly specified fields get the specified value, all other fields get @@ -112,6 +117,7 @@ class CameraValue { Optional? lockedCaptureOrientation, Optional? recordingOrientation, bool? isPreviewPaused, + CameraDescription? description, Optional? previewPauseOrientation, }) { return CameraValue( @@ -132,6 +138,7 @@ class CameraValue { ? this.recordingOrientation : recordingOrientation.orNull, isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused, + description: description ?? this.description, previewPauseOrientation: previewPauseOrientation == null ? this.previewPauseOrientation : previewPauseOrientation.orNull, @@ -165,14 +172,14 @@ class CameraValue { class CameraController extends ValueNotifier { /// Creates a new camera controller in an uninitialized state. CameraController( - this.description, + CameraDescription cameraDescription, this.resolutionPreset, { this.enableAudio = true, this.imageFormatGroup, - }) : super(const CameraValue.uninitialized()); + }) : super(CameraValue.uninitialized(cameraDescription)); /// The properties of the camera device controlled by this controller. - final CameraDescription description; + CameraDescription get description => value.description; /// The resolution this controller is targeting. /// @@ -202,7 +209,9 @@ class CameraController extends ValueNotifier { int get cameraId => _cameraId; /// Initializes the camera on the device. - Future initialize() async { + Future initialize() => _initializeWithDescription(description); + + Future _initializeWithDescription(CameraDescription description) async { final Completer initializeCompleter = Completer(); @@ -234,6 +243,7 @@ class CameraController extends ValueNotifier { value = value.copyWith( isInitialized: true, + description: description, previewSize: await initializeCompleter.future .then((CameraInitializedEvent event) => Size( event.previewWidth, @@ -274,6 +284,16 @@ class CameraController extends ValueNotifier { previewPauseOrientation: const Optional.absent()); } + /// Sets the description of the camera + Future setDescription(CameraDescription description) async { + if (value.isRecordingVideo) { + await CameraPlatform.instance.setDescriptionWhileRecording(description); + value = value.copyWith(description: description); + } else { + await _initializeWithDescription(description); + } + } + /// Captures an image and returns the file where it was saved. /// /// Throws a [CameraException] if the capture fails. diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart index 79c21386439..dd02be3d7ae 100644 --- a/packages/camera/camera_avfoundation/example/lib/main.dart +++ b/packages/camera/camera_avfoundation/example/lib/main.dart @@ -123,7 +123,7 @@ class _CameraExampleHomeState extends State if (state == AppLifecycleState.inactive) { cameraController.dispose(); } else if (state == AppLifecycleState.resumed) { - onNewCameraSelected(cameraController.description); + _initializeCameraController(cameraController.description); } } @@ -603,10 +603,7 @@ class _CameraExampleHomeState extends State title: Icon(getCameraLensIcon(cameraDescription.lensDirection)), groupValue: controller?.description, value: cameraDescription, - onChanged: - controller != null && controller!.value.isRecordingVideo - ? null - : onChanged, + onChanged: onChanged, ), ), ); @@ -639,17 +636,15 @@ class _CameraExampleHomeState extends State } Future onNewCameraSelected(CameraDescription cameraDescription) async { - final CameraController? oldController = controller; - if (oldController != null) { - // `controller` needs to be set to null before getting disposed, - // to avoid a race condition when we use the controller that is being - // disposed. This happens when camera permission dialog shows up, - // which triggers `didChangeAppLifecycleState`, which disposes and - // re-creates the controller. - controller = null; - await oldController.dispose(); + if (controller != null) { + return controller!.setDescription(cameraDescription); + } else { + return _initializeCameraController(cameraDescription); } + } + Future _initializeCameraController( + CameraDescription cameraDescription) async { final CameraController cameraController = CameraController( cameraDescription, kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium, diff --git a/packages/camera/camera_avfoundation/example/pubspec.yaml b/packages/camera/camera_avfoundation/example/pubspec.yaml index 1e966b1c584..0c471ff4848 100644 --- a/packages/camera/camera_avfoundation/example/pubspec.yaml +++ b/packages/camera/camera_avfoundation/example/pubspec.yaml @@ -14,7 +14,7 @@ dependencies: # The example app is bundled with the plugin so we use a path dependency on # the parent directory to use the current plugin's version. path: ../ - camera_platform_interface: ^2.2.0 + camera_platform_interface: ^2.4.0 flutter: sdk: flutter path_provider: ^2.0.0 diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m index 535887622c5..874f37b6c4f 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m @@ -253,6 +253,8 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call [_camera pausePreviewWithResult:result]; } else if ([@"resumePreview" isEqualToString:call.method]) { [_camera resumePreviewWithResult:result]; + } else if ([@"setDescriptionWhileRecording" isEqualToString:call.method]) { + [_camera setDescriptionWhileRecording:(call.arguments[@"cameraName"]) result:result]; } else { [result sendNotImplemented]; } diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h index 50a035494c7..fbf4ef4882c 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h @@ -95,6 +95,8 @@ NS_ASSUME_NONNULL_BEGIN - (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice; - (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result; - (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result; +- (void)setDescriptionWhileRecording:(NSString *)cameraName + result:(FLTThreadSafeFlutterResult *)result; - (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y; - (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y; - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset; diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m index 2bfa3e5f1fa..31bffc91794 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m @@ -43,7 +43,8 @@ @interface FLTCam () setDescriptionWhileRecording( + CameraDescription description) async { + await _channel.invokeMethod( + 'setDescriptionWhileRecording', + { + 'cameraName': description.name, + }, + ); + } + @override Widget buildPreview(int cameraId) { return Texture(textureId: cameraId); diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index c7483d9e46b..a7bc2ab8b14 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.12 +version: 0.9.13 environment: sdk: '>=2.18.0 <3.0.0' @@ -17,7 +17,7 @@ flutter: dartPluginClass: AVFoundationCamera dependencies: - camera_platform_interface: ^2.3.1 + camera_platform_interface: ^2.4.0 flutter: sdk: flutter stream_transform: ^2.0.0 diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart index 5d0b74cf0c0..e756f38ff12 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart @@ -701,6 +701,29 @@ void main() { ]); }); + test('Should set the description while recording', () async { + // Arrange + final MethodChannelMock channel = MethodChannelMock( + channelName: _channelName, + methods: {'setDescriptionWhileRecording': null}, + ); + const CameraDescription camera2Description = CameraDescription( + name: 'Test2', + lensDirection: CameraLensDirection.front, + sensorOrientation: 0); + + // Act + await camera.setDescriptionWhileRecording(camera2Description); + + // Assert + expect(channel.log, [ + isMethodCall('setDescriptionWhileRecording', + arguments: { + 'cameraName': camera2Description.name, + }), + ]); + }); + test('Should set the flash mode', () async { // Arrange final MethodChannelMock channel = MethodChannelMock(