diff --git a/.gitignore b/.gitignore
index c9dae8e685c2..430247c396c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -58,3 +58,5 @@ gradlew.bat
.project
.classpath
.settings
+packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/xcshareddata/swiftpm/Package.resolved
+packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
diff --git a/flutter_packages.code-workspace b/flutter_packages.code-workspace
new file mode 100644
index 000000000000..bbb59b0a5b2d
--- /dev/null
+++ b/flutter_packages.code-workspace
@@ -0,0 +1,27 @@
+{
+ "folders": [
+ {
+ "path": "."
+ },
+ {
+ "path": "./packages/camera"
+ },
+ {
+ "path": "./packages/camera/camera_avfoundation"
+ },
+ {
+ "path": "./packages/camera/camera_avfoundation"
+ }
+ ],
+ "settings": {
+ "editor.quickSuggestions": {
+ "comments": "off",
+ "strings": "off",
+ "other": "off"
+ },
+ "files.associations": {
+ "MLTable": "yaml",
+ "cstring": "cpp"
+ }
+ }
+}
diff --git a/packages/camera/camera/example/.gitattributes b/packages/camera/camera/example/.gitattributes
new file mode 100644
index 000000000000..f087b429e2f8
--- /dev/null
+++ b/packages/camera/camera/example/.gitattributes
@@ -0,0 +1 @@
+*.tar.gz filter=lfs diff=lfs merge=lfs -text
diff --git a/packages/camera/camera/example/.metadata b/packages/camera/camera/example/.metadata
new file mode 100644
index 000000000000..9c11960fa8d4
--- /dev/null
+++ b/packages/camera/camera/example/.metadata
@@ -0,0 +1,30 @@
+# This file tracks properties of this Flutter project.
+# Used by Flutter tool to assess capabilities and perform upgrades etc.
+#
+# This file should be version controlled and should not be manually edited.
+
+version:
+ revision: "603104015dd692ea3403755b55d07813d5cf8965"
+ channel: "[user-branch]"
+
+project_type: app
+
+# Tracks metadata for the flutter migrate command
+migration:
+ platforms:
+ - platform: root
+ create_revision: 603104015dd692ea3403755b55d07813d5cf8965
+ base_revision: 603104015dd692ea3403755b55d07813d5cf8965
+ - platform: linux
+ create_revision: 603104015dd692ea3403755b55d07813d5cf8965
+ base_revision: 603104015dd692ea3403755b55d07813d5cf8965
+
+ # User provided section
+
+ # List of Local paths (relative to this file) that should be
+ # ignored by the migrate tool.
+ #
+ # Files that are not part of the templates will be ignored by default.
+ unmanaged_files:
+ - 'lib/main.dart'
+ - 'ios/Runner.xcodeproj/project.pbxproj'
diff --git a/packages/camera/camera/example/README.md b/packages/camera/camera/example/README.md
new file mode 100644
index 000000000000..2b3fce4c86a5
--- /dev/null
+++ b/packages/camera/camera/example/README.md
@@ -0,0 +1,16 @@
+# example
+
+A new Flutter project.
+
+## Getting Started
+
+This project is a starting point for a Flutter application.
+
+A few resources to get you started if this is your first Flutter project:
+
+- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab)
+- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook)
+
+For help getting started with Flutter development, view the
+[online documentation](https://docs.flutter.dev/), which offers tutorials,
+samples, guidance on mobile development, and a full API reference.
diff --git a/packages/camera/camera/example/analysis_options.yaml b/packages/camera/camera/example/analysis_options.yaml
new file mode 100644
index 000000000000..0d2902135cae
--- /dev/null
+++ b/packages/camera/camera/example/analysis_options.yaml
@@ -0,0 +1,28 @@
+# This file configures the analyzer, which statically analyzes Dart code to
+# check for errors, warnings, and lints.
+#
+# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
+# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
+# invoked from the command line by running `flutter analyze`.
+
+# The following line activates a set of recommended lints for Flutter apps,
+# packages, and plugins designed to encourage good coding practices.
+include: package:flutter_lints/flutter.yaml
+
+linter:
+ # The lint rules applied to this project can be customized in the
+ # section below to disable rules from the `package:flutter_lints/flutter.yaml`
+ # included above or to enable additional rules. A list of all available lints
+ # and their documentation is published at https://dart.dev/lints.
+ #
+ # Instead of disabling a lint rule for the entire project in the
+ # section below, it can also be suppressed for a single line of code
+ # or a specific dart file by using the `// ignore: name_of_lint` and
+ # `// ignore_for_file: name_of_lint` syntax on the line or in the file
+ # producing the lint.
+ rules:
+ # avoid_print: false # Uncomment to disable the `avoid_print` rule
+ # prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule
+
+# Additional information about this file can be found at
+# https://dart.dev/guides/language/analysis-options
diff --git a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist
index 7c5696400627..1dc6cf7652ba 100644
--- a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist
+++ b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist
@@ -21,6 +21,6 @@
CFBundleVersion
1.0
MinimumOSVersion
- 12.0
+ 13.0
diff --git a/packages/camera/camera/example/ios/Podfile b/packages/camera/camera/example/ios/Podfile
index 01d4aa611bb9..17adeb14132e 100644
--- a/packages/camera/camera/example/ios/Podfile
+++ b/packages/camera/camera/example/ios/Podfile
@@ -1,5 +1,5 @@
# Uncomment this line to define a global platform for your project
-# platform :ios, '12.0'
+# platform :ios, '13.0'
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
index 92de69f31b7a..e9a714f09ec6 100644
--- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
@@ -141,6 +141,7 @@
97C146EC1CF9000F007C117D /* Resources */,
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
+ 786AFE0EBD00D4651BA88F35 /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
@@ -222,6 +223,23 @@
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
};
+ 786AFE0EBD00D4651BA88F35 /* [CP] Embed Pods Frameworks */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputFileListPaths = (
+ "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
+ );
+ name = "[CP] Embed Pods Frameworks";
+ outputFileListPaths = (
+ "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;
@@ -336,7 +354,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 12.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SUPPORTED_PLATFORMS = iphoneos;
@@ -415,7 +433,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 12.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
@@ -466,7 +484,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 12.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SUPPORTED_PLATFORMS = iphoneos;
diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart
index cca528c04fda..75cc60897b8f 100644
--- a/packages/camera/camera/example/lib/main.dart
+++ b/packages/camera/camera/example/lib/main.dart
@@ -6,6 +6,9 @@ import 'dart:async';
import 'dart:io';
import 'package:camera/camera.dart';
+import 'package:camera_avfoundation/camera_avfoundation.dart';
+import 'package:camera_linux/camera_linux.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
@@ -64,6 +67,13 @@ class _CameraExampleHomeState extends State
double _maxAvailableZoom = 1.0;
double _currentScale = 1.0;
double _baseScale = 1.0;
+ double _currentLensPosition = 0.0;
+
+ // Transform state (iOS 17+)
+ double _transformRotation = 0;
+ bool _transformFlipH = false;
+ bool _transformFlipV = false;
+ bool _transformCropEnabled = false;
// Counting pointers (number of user fingers on screen)
int _pointers = 0;
@@ -155,6 +165,44 @@ class _CameraExampleHomeState extends State
),
_captureControlRowWidget(),
_modeControlRowWidget(),
+ if (!kIsWeb && Platform.isIOS) _lensPositionWidget(),
+ if (!kIsWeb && Platform.isIOS) _transformControlRowWidget(),
+ Row(
+ children: [
+ ElevatedButton(
+ onPressed: () {
+ setState(() {
+ if (TargetPlatform.linux != defaultTargetPlatform) {
+ return;
+ }
+ final CameraLinux nativeCamera =
+ CameraPlatform.instance as CameraLinux;
+ nativeCamera.setImageFormatGroup(
+ controller!.cameraId,
+ PlatformImageFormatGroup.mono8,
+ );
+ });
+ },
+ child: Text('mono8'),
+ ),
+ ElevatedButton(
+ onPressed: () {
+ setState(() {
+ if (TargetPlatform.linux != defaultTargetPlatform) {
+ return;
+ }
+ final CameraLinux nativeCamera =
+ CameraPlatform.instance as CameraLinux;
+ nativeCamera.setImageFormatGroup(
+ controller!.cameraId,
+ PlatformImageFormatGroup.rgb8,
+ );
+ });
+ },
+ child: Text('rgb8'),
+ ),
+ ],
+ ),
Padding(
padding: const EdgeInsets.all(5.0),
child: Row(
@@ -489,6 +537,168 @@ class _CameraExampleHomeState extends State
);
}
+ /// Lens position slider, shown on iOS only.
+ Widget _lensPositionWidget() {
+ return ColoredBox(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: [
+ const Center(child: Text('Lens Position (lock focus first)')),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: [
+ const Text('0.0'),
+ Expanded(
+ child: Slider(
+ value: _currentLensPosition,
+ min: 0.0,
+ max: 1.0,
+ divisions: 100,
+ label: _currentLensPosition.toStringAsFixed(2),
+ onChanged:
+ controller != null &&
+ controller!.value.focusMode == FocusMode.locked
+ ? (double value) {
+ setState(() => _currentLensPosition = value);
+ (CameraPlatform.instance as AVFoundationCamera)
+ .setLensPosition(value);
+ }
+ : null,
+ ),
+ ),
+ const Text('1.0'),
+ ],
+ ),
+ ],
+ ),
+ );
+ }
+
+ Future _applyTransform() async {
+ if (controller == null || !controller!.value.isInitialized) {
+ return;
+ }
+ await (CameraPlatform.instance as AVFoundationCamera).setTransform(
+ controller!.cameraId,
+ CameraTransform(
+ rotationDegrees: _transformRotation,
+ flipHorizontally: _transformFlipH,
+ flipVertically: _transformFlipV,
+ cropRect: _transformCropEnabled
+ ? const CameraTransformRect(
+ x: 0.1,
+ y: 0.1,
+ width: 0.8,
+ height: 0.8,
+ )
+ : null,
+ ),
+ );
+ }
+
+ /// Transform controls panel (iOS 17+ only).
+ Widget _transformControlRowWidget() {
+ final bool enabled =
+ controller != null && controller!.value.isInitialized;
+ return ColoredBox(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: [
+ const Center(
+ child: Text(
+ 'Camera Transform (iOS 17+)',
+ style: TextStyle(fontWeight: FontWeight.bold),
+ ),
+ ),
+ // Rotation
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: [
+ const Text('Rotation:'),
+ for (final double deg in [0, 90, 180, 270])
+ TextButton(
+ style: TextButton.styleFrom(
+ foregroundColor: _transformRotation == deg
+ ? Colors.orange
+ : Colors.blue,
+ ),
+ onPressed: enabled
+ ? () {
+ setState(() => _transformRotation = deg);
+ _applyTransform();
+ }
+ : null,
+ child: Text('${deg.toInt()}\u00b0'),
+ ),
+ ],
+ ),
+ // Flip, crop, reset
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: [
+ TextButton(
+ style: TextButton.styleFrom(
+ foregroundColor:
+ _transformFlipH ? Colors.orange : Colors.blue,
+ ),
+ onPressed: enabled
+ ? () {
+ setState(() => _transformFlipH = !_transformFlipH);
+ _applyTransform();
+ }
+ : null,
+ child: const Text('Flip H'),
+ ),
+ TextButton(
+ style: TextButton.styleFrom(
+ foregroundColor:
+ _transformFlipV ? Colors.orange : Colors.blue,
+ ),
+ onPressed: enabled
+ ? () {
+ setState(() => _transformFlipV = !_transformFlipV);
+ _applyTransform();
+ }
+ : null,
+ child: const Text('Flip V'),
+ ),
+ TextButton(
+ style: TextButton.styleFrom(
+ foregroundColor:
+ _transformCropEnabled ? Colors.orange : Colors.blue,
+ ),
+ onPressed: enabled
+ ? () {
+ setState(
+ () =>
+ _transformCropEnabled = !_transformCropEnabled,
+ );
+ _applyTransform();
+ }
+ : null,
+ child: const Text('Crop 80%'),
+ ),
+ TextButton(
+ onPressed: enabled
+ ? () {
+ setState(() {
+ _transformRotation = 0;
+ _transformFlipH = false;
+ _transformFlipV = false;
+ _transformCropEnabled = false;
+ });
+ _applyTransform();
+ }
+ : null,
+ child: const Text('Reset'),
+ ),
+ ],
+ ),
+ ],
+ ),
+ );
+ }
+
/// Display the control bar with buttons to take pictures and record videos.
Widget _captureControlRowWidget() {
final CameraController? cameraController = controller;
@@ -635,7 +845,7 @@ class _CameraExampleHomeState extends State
) async {
final cameraController = CameraController(
cameraDescription,
- kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
+ ResolutionPreset.max,
enableAudio: enableAudio,
imageFormatGroup: ImageFormatGroup.jpeg,
);
diff --git a/packages/camera/camera/example/linux/.gitignore b/packages/camera/camera/example/linux/.gitignore
new file mode 100644
index 000000000000..d3896c98444f
--- /dev/null
+++ b/packages/camera/camera/example/linux/.gitignore
@@ -0,0 +1 @@
+flutter/ephemeral
diff --git a/packages/camera/camera/example/linux/CMakeLists.txt b/packages/camera/camera/example/linux/CMakeLists.txt
new file mode 100644
index 000000000000..c6d1dcf8e0e3
--- /dev/null
+++ b/packages/camera/camera/example/linux/CMakeLists.txt
@@ -0,0 +1,145 @@
+# Project-level configuration.
+cmake_minimum_required(VERSION 3.10)
+project(runner LANGUAGES CXX)
+
+# The name of the executable created for the application. Change this to change
+# the on-disk name of your application.
+set(BINARY_NAME "example")
+# The unique GTK application identifier for this application. See:
+# https://wiki.gnome.org/HowDoI/ChooseApplicationID
+set(APPLICATION_ID "io.flutter.plugins.example")
+
+# Explicitly opt in to modern CMake behaviors to avoid warnings with recent
+# versions of CMake.
+cmake_policy(SET CMP0063 NEW)
+
+# Load bundled libraries from the lib/ directory relative to the binary.
+set(CMAKE_INSTALL_RPATH "$ORIGIN/lib")
+
+# Root filesystem for cross-building.
+if(FLUTTER_TARGET_PLATFORM_SYSROOT)
+ set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT})
+ set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT})
+ set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+ set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
+ set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+ set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+endif()
+
+# Define build configuration options.
+if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
+ set(CMAKE_BUILD_TYPE "Debug" CACHE
+ STRING "Flutter build mode" FORCE)
+ set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS
+ "Debug" "Profile" "Release")
+endif()
+
+# Compilation settings that should be applied to most targets.
+#
+# Be cautious about adding new options here, as plugins use this function by
+# default. In most cases, you should add new options to specific targets instead
+# of modifying this function.
+function(APPLY_STANDARD_SETTINGS TARGET)
+ target_compile_features(${TARGET} PUBLIC cxx_std_14)
+ target_compile_options(${TARGET} PRIVATE -Wall -Werror)
+ target_compile_options(${TARGET} PRIVATE "$<$>:-O3>")
+ target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>")
+endfunction()
+
+# Flutter library and tool build rules.
+set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter")
+add_subdirectory(${FLUTTER_MANAGED_DIR})
+
+# System-level dependencies.
+find_package(PkgConfig REQUIRED)
+pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0)
+
+add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}")
+
+# Define the application target. To change its name, change BINARY_NAME above,
+# not the value here, or `flutter run` will no longer work.
+#
+# Any new source files that you add to the application should be added here.
+add_executable(${BINARY_NAME}
+ "main.cc"
+ "my_application.cc"
+ "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc"
+)
+
+# Apply the standard set of build settings. This can be removed for applications
+# that need different build settings.
+apply_standard_settings(${BINARY_NAME})
+
+# Add dependency libraries. Add any application-specific dependencies here.
+target_link_libraries(${BINARY_NAME} PRIVATE flutter)
+target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK)
+
+# Run the Flutter tool portions of the build. This must not be removed.
+add_dependencies(${BINARY_NAME} flutter_assemble)
+
+# Only the install-generated bundle's copy of the executable will launch
+# correctly, since the resources must in the right relative locations. To avoid
+# people trying to run the unbundled copy, put it in a subdirectory instead of
+# the default top-level location.
+set_target_properties(${BINARY_NAME}
+ PROPERTIES
+ RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run"
+)
+
+
+# Generated plugin build rules, which manage building the plugins and adding
+# them to the application.
+include(flutter/generated_plugins.cmake)
+
+
+# === Installation ===
+# By default, "installing" just makes a relocatable bundle in the build
+# directory.
+set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle")
+if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+ set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE)
+endif()
+
+# Start with a clean build bundle directory every time.
+install(CODE "
+ file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\")
+ " COMPONENT Runtime)
+
+set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data")
+set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib")
+
+install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}"
+ COMPONENT Runtime)
+
+install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}"
+ COMPONENT Runtime)
+
+install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}"
+ COMPONENT Runtime)
+
+foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES})
+ install(FILES "${bundled_library}"
+ DESTINATION "${INSTALL_BUNDLE_LIB_DIR}"
+ COMPONENT Runtime)
+endforeach(bundled_library)
+
+# Copy the native assets provided by the build.dart from all packages.
+set(NATIVE_ASSETS_DIR "${PROJECT_BUILD_DIR}native_assets/linux/")
+install(DIRECTORY "${NATIVE_ASSETS_DIR}"
+ DESTINATION "${INSTALL_BUNDLE_LIB_DIR}"
+ COMPONENT Runtime)
+
+# Fully re-copy the assets directory on each build to avoid having stale files
+# from a previous install.
+set(FLUTTER_ASSET_DIR_NAME "flutter_assets")
+install(CODE "
+ file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\")
+ " COMPONENT Runtime)
+install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}"
+ DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime)
+
+# Install the AOT library on non-Debug builds only.
+if(NOT CMAKE_BUILD_TYPE MATCHES "Debug")
+ install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}"
+ COMPONENT Runtime)
+endif()
diff --git a/packages/camera/camera/example/linux/flutter/CMakeLists.txt b/packages/camera/camera/example/linux/flutter/CMakeLists.txt
new file mode 100644
index 000000000000..d5bd01648a96
--- /dev/null
+++ b/packages/camera/camera/example/linux/flutter/CMakeLists.txt
@@ -0,0 +1,88 @@
+# This file controls Flutter-level build steps. It should not be edited.
+cmake_minimum_required(VERSION 3.10)
+
+set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral")
+
+# Configuration provided via flutter tool.
+include(${EPHEMERAL_DIR}/generated_config.cmake)
+
+# TODO: Move the rest of this into files in ephemeral. See
+# https://github.com/flutter/flutter/issues/57146.
+
+# Serves the same purpose as list(TRANSFORM ... PREPEND ...),
+# which isn't available in 3.10.
+function(list_prepend LIST_NAME PREFIX)
+ set(NEW_LIST "")
+ foreach(element ${${LIST_NAME}})
+ list(APPEND NEW_LIST "${PREFIX}${element}")
+ endforeach(element)
+ set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE)
+endfunction()
+
+# === Flutter Library ===
+# System-level dependencies.
+find_package(PkgConfig REQUIRED)
+pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0)
+pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0)
+pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0)
+
+set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so")
+
+# Published to parent scope for install step.
+set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE)
+set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE)
+set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE)
+set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE)
+
+list(APPEND FLUTTER_LIBRARY_HEADERS
+ "fl_basic_message_channel.h"
+ "fl_binary_codec.h"
+ "fl_binary_messenger.h"
+ "fl_dart_project.h"
+ "fl_engine.h"
+ "fl_json_message_codec.h"
+ "fl_json_method_codec.h"
+ "fl_message_codec.h"
+ "fl_method_call.h"
+ "fl_method_channel.h"
+ "fl_method_codec.h"
+ "fl_method_response.h"
+ "fl_plugin_registrar.h"
+ "fl_plugin_registry.h"
+ "fl_standard_message_codec.h"
+ "fl_standard_method_codec.h"
+ "fl_string_codec.h"
+ "fl_value.h"
+ "fl_view.h"
+ "flutter_linux.h"
+)
+list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/")
+add_library(flutter INTERFACE)
+target_include_directories(flutter INTERFACE
+ "${EPHEMERAL_DIR}"
+)
+target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}")
+target_link_libraries(flutter INTERFACE
+ PkgConfig::GTK
+ PkgConfig::GLIB
+ PkgConfig::GIO
+)
+add_dependencies(flutter flutter_assemble)
+
+# === Flutter tool backend ===
+# _phony_ is a non-existent file to force this command to run every time,
+# since currently there's no way to get a full input/output list from the
+# flutter tool.
+add_custom_command(
+ OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS}
+ ${CMAKE_CURRENT_BINARY_DIR}/_phony_
+ COMMAND ${CMAKE_COMMAND} -E env
+ ${FLUTTER_TOOL_ENVIRONMENT}
+ "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh"
+ ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE}
+ VERBATIM
+)
+add_custom_target(flutter_assemble DEPENDS
+ "${FLUTTER_LIBRARY}"
+ ${FLUTTER_LIBRARY_HEADERS}
+)
diff --git a/packages/camera/camera/example/linux/flutter/generated_plugins.cmake b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake
new file mode 100644
index 000000000000..1aa0c3eb78bd
--- /dev/null
+++ b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake
@@ -0,0 +1,24 @@
+#
+# Generated file, do not edit.
+#
+
+list(APPEND FLUTTER_PLUGIN_LIST
+ camera_linux
+)
+
+list(APPEND FLUTTER_FFI_PLUGIN_LIST
+)
+
+set(PLUGIN_BUNDLED_LIBRARIES)
+
+foreach(plugin ${FLUTTER_PLUGIN_LIST})
+ add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/linux plugins/${plugin})
+ target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin)
+ list(APPEND PLUGIN_BUNDLED_LIBRARIES $)
+ list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries})
+endforeach(plugin)
+
+foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST})
+ add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/linux plugins/${ffi_plugin})
+ list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries})
+endforeach(ffi_plugin)
diff --git a/packages/camera/camera/example/linux/main.cc b/packages/camera/camera/example/linux/main.cc
new file mode 100644
index 000000000000..e7c5c5437037
--- /dev/null
+++ b/packages/camera/camera/example/linux/main.cc
@@ -0,0 +1,6 @@
+#include "my_application.h"
+
+int main(int argc, char** argv) {
+ g_autoptr(MyApplication) app = my_application_new();
+ return g_application_run(G_APPLICATION(app), argc, argv);
+}
diff --git a/packages/camera/camera/example/linux/my_application.cc b/packages/camera/camera/example/linux/my_application.cc
new file mode 100644
index 000000000000..c0530d422cdd
--- /dev/null
+++ b/packages/camera/camera/example/linux/my_application.cc
@@ -0,0 +1,124 @@
+#include "my_application.h"
+
+#include
+#ifdef GDK_WINDOWING_X11
+#include
+#endif
+
+#include "flutter/generated_plugin_registrant.h"
+
+struct _MyApplication {
+ GtkApplication parent_instance;
+ char** dart_entrypoint_arguments;
+};
+
+G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION)
+
+// Implements GApplication::activate.
+static void my_application_activate(GApplication* application) {
+ MyApplication* self = MY_APPLICATION(application);
+ GtkWindow* window =
+ GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application)));
+
+ // Use a header bar when running in GNOME as this is the common style used
+ // by applications and is the setup most users will be using (e.g. Ubuntu
+ // desktop).
+ // If running on X and not using GNOME then just use a traditional title bar
+ // in case the window manager does more exotic layout, e.g. tiling.
+ // If running on Wayland assume the header bar will work (may need changing
+ // if future cases occur).
+ gboolean use_header_bar = TRUE;
+#ifdef GDK_WINDOWING_X11
+ GdkScreen* screen = gtk_window_get_screen(window);
+ if (GDK_IS_X11_SCREEN(screen)) {
+ const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen);
+ if (g_strcmp0(wm_name, "GNOME Shell") != 0) {
+ use_header_bar = FALSE;
+ }
+ }
+#endif
+ if (use_header_bar) {
+ GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new());
+ gtk_widget_show(GTK_WIDGET(header_bar));
+ gtk_header_bar_set_title(header_bar, "example");
+ gtk_header_bar_set_show_close_button(header_bar, TRUE);
+ gtk_window_set_titlebar(window, GTK_WIDGET(header_bar));
+ } else {
+ gtk_window_set_title(window, "example");
+ }
+
+ gtk_window_set_default_size(window, 1280, 720);
+ gtk_widget_show(GTK_WIDGET(window));
+
+ g_autoptr(FlDartProject) project = fl_dart_project_new();
+ fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments);
+
+ FlView* view = fl_view_new(project);
+ gtk_widget_show(GTK_WIDGET(view));
+ gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view));
+
+ fl_register_plugins(FL_PLUGIN_REGISTRY(view));
+
+ gtk_widget_grab_focus(GTK_WIDGET(view));
+}
+
+// Implements GApplication::local_command_line.
+static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) {
+ MyApplication* self = MY_APPLICATION(application);
+ // Strip out the first argument as it is the binary name.
+ self->dart_entrypoint_arguments = g_strdupv(*arguments + 1);
+
+ g_autoptr(GError) error = nullptr;
+ if (!g_application_register(application, nullptr, &error)) {
+ g_warning("Failed to register: %s", error->message);
+ *exit_status = 1;
+ return TRUE;
+ }
+
+ g_application_activate(application);
+ *exit_status = 0;
+
+ return TRUE;
+}
+
+// Implements GApplication::startup.
+static void my_application_startup(GApplication* application) {
+ //MyApplication* self = MY_APPLICATION(object);
+
+ // Perform any actions required at application startup.
+
+ G_APPLICATION_CLASS(my_application_parent_class)->startup(application);
+}
+
+// Implements GApplication::shutdown.
+static void my_application_shutdown(GApplication* application) {
+ //MyApplication* self = MY_APPLICATION(object);
+
+ // Perform any actions required at application shutdown.
+
+ G_APPLICATION_CLASS(my_application_parent_class)->shutdown(application);
+}
+
+// Implements GObject::dispose.
+static void my_application_dispose(GObject* object) {
+ MyApplication* self = MY_APPLICATION(object);
+ g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev);
+ G_OBJECT_CLASS(my_application_parent_class)->dispose(object);
+}
+
+static void my_application_class_init(MyApplicationClass* klass) {
+ G_APPLICATION_CLASS(klass)->activate = my_application_activate;
+ G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line;
+ G_APPLICATION_CLASS(klass)->startup = my_application_startup;
+ G_APPLICATION_CLASS(klass)->shutdown = my_application_shutdown;
+ G_OBJECT_CLASS(klass)->dispose = my_application_dispose;
+}
+
+static void my_application_init(MyApplication* self) {}
+
+MyApplication* my_application_new() {
+ return MY_APPLICATION(g_object_new(my_application_get_type(),
+ "application-id", APPLICATION_ID,
+ "flags", G_APPLICATION_NON_UNIQUE,
+ nullptr));
+}
diff --git a/packages/camera/camera/example/linux/my_application.h b/packages/camera/camera/example/linux/my_application.h
new file mode 100644
index 000000000000..72271d5e4170
--- /dev/null
+++ b/packages/camera/camera/example/linux/my_application.h
@@ -0,0 +1,18 @@
+#ifndef FLUTTER_MY_APPLICATION_H_
+#define FLUTTER_MY_APPLICATION_H_
+
+#include
+
+G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION,
+ GtkApplication)
+
+/**
+ * my_application_new:
+ *
+ * Creates a new Flutter-based application.
+ *
+ * Returns: a new #MyApplication.
+ */
+MyApplication* my_application_new();
+
+#endif // FLUTTER_MY_APPLICATION_H_
diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml
index b5d9dff6e913..af6c4b52fa02 100644
--- a/packages/camera/camera/example/pubspec.yaml
+++ b/packages/camera/camera/example/pubspec.yaml
@@ -29,5 +29,13 @@ dev_dependencies:
sdk: flutter
leak_tracker_flutter_testing: any
+dependency_overrides:
+ camera_avfoundation:
+ path: ../../camera_avfoundation
+ camera_web:
+ path: ../../camera_web
+ camera_linux:
+ path: ../../camera_linux
+
flutter:
uses-material-design: true
diff --git a/packages/camera/camera/example/test/widget_test.dart b/packages/camera/camera/example/test/widget_test.dart
new file mode 100644
index 000000000000..092d222f7e16
--- /dev/null
+++ b/packages/camera/camera/example/test/widget_test.dart
@@ -0,0 +1,30 @@
+// This is a basic Flutter widget test.
+//
+// To perform an interaction with a widget in your test, use the WidgetTester
+// utility in the flutter_test package. For example, you can send tap and scroll
+// gestures. You can also use WidgetTester to find child widgets in the widget
+// tree, read text, and verify that the values of widget properties are correct.
+
+import 'package:flutter/material.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'package:example/main.dart';
+
+void main() {
+ testWidgets('Counter increments smoke test', (WidgetTester tester) async {
+ // Build our app and trigger a frame.
+ await tester.pumpWidget(const MyApp());
+
+ // Verify that our counter starts at 0.
+ expect(find.text('0'), findsOneWidget);
+ expect(find.text('1'), findsNothing);
+
+ // Tap the '+' icon and trigger a frame.
+ await tester.tap(find.byIcon(Icons.add));
+ await tester.pump();
+
+ // Verify that our counter has incremented.
+ expect(find.text('0'), findsNothing);
+ expect(find.text('1'), findsOneWidget);
+ });
+}
diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml
index 1c9e8bb0b145..26673869459a 100644
--- a/packages/camera/camera/pubspec.yaml
+++ b/packages/camera/camera/pubspec.yaml
@@ -19,11 +19,18 @@ flutter:
default_package: camera_avfoundation
web:
default_package: camera_web
+ linux:
+ default_package: camera_linux
dependencies:
camera_android_camerax: ^0.7.0
camera_avfoundation: ^0.10.0
camera_platform_interface: ^2.12.0
+ camera_linux:
+ git:
+ url: git@github.com:LightX-Innovations/flutter_packages.git
+ path: packages/camera/camera_linux
+ ref: camera_0.6
camera_web: ^0.3.3
flutter:
sdk: flutter
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index ca97b101df8b..9a532157ea2e 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.10.2
+
+* Adds setLensPosition support for manually controlling lens focus position on iOS.
+
## 0.10.1
* Fixes fatal crash on iPhone 17 when using `ResolutionPreset.max`.
diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart
index f9576b96398f..cda6e362fc30 100644
--- a/packages/camera/camera_avfoundation/example/lib/main.dart
+++ b/packages/camera/camera_avfoundation/example/lib/main.dart
@@ -6,6 +6,7 @@ import 'dart:async';
import 'dart:io';
import 'dart:math';
+import 'package:camera_avfoundation/camera_avfoundation.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
@@ -47,8 +48,7 @@ void _logError(String code, String? message) {
print('Error: $code${message == null ? '' : '\nError Message: $message'}');
}
-class _CameraExampleHomeState extends State
- with WidgetsBindingObserver, TickerProviderStateMixin {
+class _CameraExampleHomeState extends State with WidgetsBindingObserver, TickerProviderStateMixin {
CameraController? controller;
XFile? imageFile;
XFile? videoFile;
@@ -68,6 +68,7 @@ class _CameraExampleHomeState extends State
double _maxAvailableZoom = 1.0;
double _currentScale = 1.0;
double _baseScale = 1.0;
+ double _lenPosition = 0.0;
// Counting pointers (number of user fingers on screen)
int _pointers = 0;
@@ -151,6 +152,7 @@ class _CameraExampleHomeState extends State
),
),
),
+ _buildLensPositionSettings(),
_captureControlRowWidget(),
_modeControlRowWidget(),
Padding(
@@ -164,6 +166,37 @@ class _CameraExampleHomeState extends State
);
}
+ Widget _buildLensPositionSettings() {
+ return ButtonBar(
+ layoutBehavior: ButtonBarLayoutBehavior.constrained,
+ alignment: MainAxisAlignment.center,
+ children: [
+ ElevatedButton(
+ onPressed: () {
+ final camera = CameraPlatform.instance as AVFoundationCamera;
+ setState(() {
+ _lenPosition = (_lenPosition + 0.1).clamp(0.0, 1.0);
+ });
+ camera.setFocusMode(controller!.cameraId, FocusMode.locked);
+ camera.setLensPosition(_lenPosition);
+ },
+ child: const Text('Increment Lens Position'),
+ ),
+ ElevatedButton(
+ onPressed: () {
+ final camera = CameraPlatform.instance as AVFoundationCamera;
+ setState(() {
+ _lenPosition = (_lenPosition - 0.1).clamp(0.0, 1.0);
+ });
+ camera.setFocusMode(controller!.cameraId, FocusMode.locked);
+ camera.setLensPosition(_lenPosition);
+ },
+ child: const Text('Decrement Lens Position'),
+ ),
+ ],
+ );
+ }
+
/// Display the preview from the camera (or a message if the preview is not available).
Widget _cameraPreviewWidget() {
final CameraController? cameraController = controller;
@@ -281,9 +314,7 @@ class _CameraExampleHomeState extends State
IconButton(
icon: const Icon(Icons.exposure),
color: Colors.blue,
- onPressed: controller != null
- ? onExposureModeButtonPressed
- : null,
+ onPressed: controller != null ? onExposureModeButtonPressed : null,
),
IconButton(
icon: const Icon(Icons.filter_center_focus),
@@ -306,9 +337,7 @@ class _CameraExampleHomeState extends State
: Icons.screen_rotation,
),
color: Colors.blue,
- onPressed: controller != null
- ? onCaptureOrientationLockButtonPressed
- : null,
+ onPressed: controller != null ? onCaptureOrientationLockButtonPressed : null,
),
],
),
@@ -328,39 +357,23 @@ class _CameraExampleHomeState extends State
children: [
IconButton(
icon: const Icon(Icons.flash_off),
- color: controller?.value.flashMode == FlashMode.off
- ? Colors.orange
- : Colors.blue,
- onPressed: controller != null
- ? () => onSetFlashModeButtonPressed(FlashMode.off)
- : null,
+ color: controller?.value.flashMode == FlashMode.off ? Colors.orange : Colors.blue,
+ onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.off) : null,
),
IconButton(
icon: const Icon(Icons.flash_auto),
- color: controller?.value.flashMode == FlashMode.auto
- ? Colors.orange
- : Colors.blue,
- onPressed: controller != null
- ? () => onSetFlashModeButtonPressed(FlashMode.auto)
- : null,
+ color: controller?.value.flashMode == FlashMode.auto ? Colors.orange : Colors.blue,
+ onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.auto) : null,
),
IconButton(
icon: const Icon(Icons.flash_on),
- color: controller?.value.flashMode == FlashMode.always
- ? Colors.orange
- : Colors.blue,
- onPressed: controller != null
- ? () => onSetFlashModeButtonPressed(FlashMode.always)
- : null,
+ color: controller?.value.flashMode == FlashMode.always ? Colors.orange : Colors.blue,
+ onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.always) : null,
),
IconButton(
icon: const Icon(Icons.highlight),
- color: controller?.value.flashMode == FlashMode.torch
- ? Colors.orange
- : Colors.blue,
- onPressed: controller != null
- ? () => onSetFlashModeButtonPressed(FlashMode.torch)
- : null,
+ color: controller?.value.flashMode == FlashMode.torch ? Colors.orange : Colors.blue,
+ onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.torch) : null,
),
],
),
@@ -370,14 +383,10 @@ class _CameraExampleHomeState extends State
Widget _exposureModeControlRowWidget() {
final ButtonStyle styleAuto = TextButton.styleFrom(
- foregroundColor: controller?.value.exposureMode == ExposureMode.auto
- ? Colors.orange
- : Colors.blue,
+ foregroundColor: controller?.value.exposureMode == ExposureMode.auto ? Colors.orange : Colors.blue,
);
final ButtonStyle styleLocked = TextButton.styleFrom(
- foregroundColor: controller?.value.exposureMode == ExposureMode.locked
- ? Colors.orange
- : Colors.blue,
+ foregroundColor: controller?.value.exposureMode == ExposureMode.locked ? Colors.orange : Colors.blue,
);
return SizeTransition(
@@ -419,9 +428,7 @@ class _CameraExampleHomeState extends State
),
TextButton(
style: styleLocked,
- onPressed: controller != null
- ? () => controller!.setExposureOffset(0.0)
- : null,
+ onPressed: controller != null ? () => controller!.setExposureOffset(0.0) : null,
child: const Text('RESET OFFSET'),
),
],
@@ -454,14 +461,10 @@ class _CameraExampleHomeState extends State
Widget _focusModeControlRowWidget() {
final ButtonStyle styleAuto = TextButton.styleFrom(
- foregroundColor: controller?.value.focusMode == FocusMode.auto
- ? Colors.orange
- : Colors.blue,
+ foregroundColor: controller?.value.focusMode == FocusMode.auto ? Colors.orange : Colors.blue,
);
final ButtonStyle styleLocked = TextButton.styleFrom(
- foregroundColor: controller?.value.focusMode == FocusMode.locked
- ? Colors.orange
- : Colors.blue,
+ foregroundColor: controller?.value.focusMode == FocusMode.locked ? Colors.orange : Colors.blue,
);
return SizeTransition(
@@ -477,9 +480,7 @@ class _CameraExampleHomeState extends State
children: [
TextButton(
style: styleAuto,
- onPressed: controller != null
- ? () => onSetFocusModeButtonPressed(FocusMode.auto)
- : null,
+ onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.auto) : null,
onLongPress: () {
if (controller != null) {
CameraPlatform.instance.setFocusPoint(
@@ -493,9 +494,7 @@ class _CameraExampleHomeState extends State
),
TextButton(
style: styleLocked,
- onPressed: controller != null
- ? () => onSetFocusModeButtonPressed(FocusMode.locked)
- : null,
+ onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.locked) : null,
child: const Text('LOCKED'),
),
],
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift
index d86662a9b8b3..c57ee1dfa569 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift
@@ -27,6 +27,9 @@ protocol AssetWriterInput: NSObjectProtocol {
var expectsMediaDataInRealTime: Bool { get set }
var isReadyForMoreMediaData: Bool { get }
+ /// The transform to apply to the visual media data before writing it.
+ var transform: CGAffineTransform { get set }
+
func append(_ sampleBuffer: CMSampleBuffer) -> Bool
}
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift
index 117fd909d32e..0dbd949aa786 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift
@@ -95,6 +95,9 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate,
completion: @escaping (Result) -> Void
)
+ /// Sets the lens position to the given value in the (0,1) range and locks focus.
+ func setLensPosition(_ position: Float, completion: @escaping (Result) -> Void)
+
func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (Result) -> Void)
func setVideoStabilizationMode(
@@ -111,6 +114,9 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate,
func pausePreview()
func resumePreview()
+ /// Applies a geometric transform (rotation, mirroring, optional crop) to all camera outputs.
+ func setTransform(_ transform: PlatformCameraTransform)
+
func setDescriptionWhileRecording(
_ cameraName: String,
withCompletion: @escaping (Result) -> Void
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift
index 43ef5f48916c..ff8d63609ba9 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift
@@ -477,6 +477,12 @@ extension CameraPlugin: CameraApi {
}
}
+ func setLensPosition(position: Double, completion: @escaping (Result) -> Void) {
+ captureSessionQueue.async { [weak self] in
+ self?.camera?.setLensPosition(Float(position), completion: completion)
+ }
+ }
+
func getMinZoomLevel(completion: @escaping (Result) -> Void) {
captureSessionQueue.async { [weak self] in
if let minZoom = self?.camera?.minimumAvailableZoomFactor {
@@ -555,4 +561,13 @@ extension CameraPlugin: CameraApi {
completion(.success(()))
}
}
+
+ func setTransform(
+ transform: PlatformCameraTransform, completion: @escaping (Result) -> Void
+ ) {
+ captureSessionQueue.async { [weak self] in
+ self?.camera?.setTransform(transform)
+ completion(.success(()))
+ }
+ }
}
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift
index d119afb9474a..74b297a75192 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift
@@ -25,6 +25,14 @@ protocol CaptureConnection: NSObjectProtocol {
/// Corresponds to the preferredVideoStabilizationMode property of `AVCaptureConnection`
var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode { get set }
+ /// Corresponds to the `videoRotationAngle` property of `AVCaptureConnection` (iOS 17+).
+ @available(iOS 17.0, *)
+ var videoRotationAngle: CGFloat { get set }
+
+ /// Corresponds to the `isVideoRotationAngleSupported(_:)` method of `AVCaptureConnection` (iOS 17+).
+ @available(iOS 17.0, *)
+ func isVideoRotationAngleSupported(_ angle: CGFloat) -> Bool
+
}
extension AVCaptureConnection: CaptureConnection {}
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift
index b007cb39b4d1..a87c5b1a7ba8 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift
@@ -37,6 +37,8 @@ protocol CaptureDevice: NSObjectProtocol {
func isFocusModeSupported(_ mode: AVCaptureDevice.FocusMode) -> Bool
var focusMode: AVCaptureDevice.FocusMode { get set }
var focusPointOfInterest: CGPoint { get set }
+ var lensPosition: Float { get }
+ func setFocusModeLocked(lensPosition: Float, completionHandler handler: ((CMTime) -> Void)?)
// Exposure
var isExposurePointOfInterestSupported: Bool { get }
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift
index 16d1637d23fa..3fa1af49d782 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift
@@ -126,6 +126,12 @@ final class DefaultCamera: NSObject, Camera {
private var focusMode = PlatformFocusMode.auto
private var flashMode: PlatformFlashMode
+ /// The current camera transform applied to all outputs.
+ private var cameraTransform: PlatformCameraTransform?
+
+ /// Metal-backed Core Image context, lazily initialised. Used only when a crop is active.
+ private lazy var ciContext = CIContext(options: [.useSoftwareRenderer: false])
+
private static func pigeonErrorFromNSError(_ error: NSError) -> PigeonError {
return PigeonError(
code: "Error \(error.code)",
@@ -752,7 +758,9 @@ final class DefaultCamera: NSObject, Camera {
assert(path != nil, "Path must not be nil if no error.")
completion(.success(path!))
}
- }
+ },
+ cropRect: cameraTransform?.cropRect,
+ ciContext: cameraTransform?.cropRect != nil ? ciContext : nil
)
assert(
@@ -796,6 +804,8 @@ final class DefaultCamera: NSObject, Camera {
updateOrientation(orientation, forCaptureOutput: capturePhotoOutput)
updateOrientation(orientation, forCaptureOutput: captureVideoOutput)
+
+ applyConnectionTransform()
}
private func updateOrientation(
@@ -808,6 +818,84 @@ final class DefaultCamera: NSObject, Camera {
}
}
+ // MARK: - Transform
+
+ func setTransform(_ transform: PlatformCameraTransform) {
+ cameraTransform = transform
+ applyConnectionTransform()
+ }
+
+ /// Applies the current rotation/mirror transform at the hardware connection level.
+ ///
+ /// `AVCaptureConnection.videoRotationAngle` (iOS 17+) instructs the camera ISP to rotate
+ /// the pixel data in hardware – zero CPU/GPU cost – and the effect propagates to the
+ /// preview texture, image stream, video recording, and photo capture simultaneously.
+ private func applyConnectionTransform() {
+ guard let transform = cameraTransform else { return }
+
+ for output in [captureVideoOutput as CaptureOutput, capturePhotoOutput as CaptureOutput] {
+ guard let connection = output.connection(with: .video) else { continue }
+
+ if #available(iOS 17.0, *) {
+ // The iOS camera sensor's native orientation is landscape (0°). The
+ // system normally compensates with 90° to produce an upright portrait
+ // image. We offset the caller's angle by 90° so that
+ // rotationDegrees = 0 means "upright / no rotation" from the API
+ // user's point of view.
+ let angle = (transform.rotationDegrees + 90).truncatingRemainder(dividingBy: 360)
+ if connection.isVideoRotationAngleSupported(angle) {
+ connection.videoRotationAngle = angle
+ }
+ }
+
+ // Vertical flip is implemented as a composition: mirror horizontally + rotate 180°.
+ let mirrorH = transform.flipHorizontally != transform.flipVertically
+ if connection.isVideoMirroringSupported {
+ connection.isVideoMirrored = mirrorH
+ }
+ }
+ }
+
+ /// Crops `pixelBuffer` to the normalised rect from `transform.cropRect`.
+ ///
+ /// The crop is performed on the GPU via Metal-backed Core Image (`ciContext`).
+ /// Returns `nil` when allocation fails; callers should fall back to the original buffer.
+ private func applyCrop(
+ _ pixelBuffer: CVPixelBuffer, cropRect: PlatformRect
+ ) -> CVPixelBuffer? {
+ let fullWidth = CVPixelBufferGetWidth(pixelBuffer)
+ let fullHeight = CVPixelBufferGetHeight(pixelBuffer)
+
+ let cropX = cropRect.x * Double(fullWidth)
+ let cropY = cropRect.y * Double(fullHeight)
+ let cropW = cropRect.width * Double(fullWidth)
+ let cropH = cropRect.height * Double(fullHeight)
+
+ // Core Image origin is bottom-left; convert from top-left.
+ let ciCropRect = CGRect(
+ x: cropX,
+ y: Double(fullHeight) - cropY - cropH,
+ width: cropW,
+ height: cropH)
+
+ let ciImage = CIImage(cvPixelBuffer: pixelBuffer).cropped(to: ciCropRect)
+ .transformed(by: CGAffineTransform(translationX: -ciCropRect.origin.x, y: -ciCropRect.origin.y))
+
+ var outBuffer: CVPixelBuffer?
+ let attrs: [String: Any] = [
+ kCVPixelBufferPixelFormatTypeKey as String: videoFormat,
+ kCVPixelBufferWidthKey as String: Int(cropW),
+ kCVPixelBufferHeightKey as String: Int(cropH),
+ kCVPixelBufferIOSurfacePropertiesKey as String: [:],
+ ]
+ guard CVPixelBufferCreate(kCFAllocatorDefault, Int(cropW), Int(cropH), videoFormat, attrs as CFDictionary, &outBuffer) == kCVReturnSuccess,
+ let out = outBuffer
+ else { return nil }
+
+ ciContext.render(ciImage, to: out)
+ return out
+ }
+
private func videoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation)
-> AVCaptureVideoOrientation
{
@@ -928,6 +1016,25 @@ final class DefaultCamera: NSObject, Camera {
completion(.success(()))
}
+ func setLensPosition(
+ _ position: Float, completion: @escaping (Result) -> Void
+ ) {
+ guard position >= 0, position <= 1 else {
+ completion(
+ .failure(
+ PigeonError(
+ code: "LENS_POSITION_ERROR",
+ message:
+ "Lens position out of bounds (should be between 0.0 and 1.0).",
+ details: nil)))
+ return
+ }
+ try? captureDevice.lockForConfiguration()
+ captureDevice.setFocusModeLocked(lensPosition: position, completionHandler: nil)
+ captureDevice.unlockForConfiguration()
+ completion(.success(()))
+ }
+
private func applyFocusMode() {
applyFocusMode(focusMode, onDevice: captureDevice)
}
@@ -1160,6 +1267,9 @@ final class DefaultCamera: NSObject, Camera {
newConnection.videoOrientation = oldConnection.videoOrientation
}
+ // Re-apply any camera transform that was set by the caller.
+ applyConnectionTransform()
+
// Add the new connections to the session.
if !videoCaptureSession.canAddInput(captureVideoInput) {
completion(
@@ -1250,9 +1360,17 @@ final class DefaultCamera: NSObject, Camera {
) {
if output == captureVideoOutput.avOutput {
if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
+ // Apply crop transform if one is active (GPU path via Core Image / Metal).
+ // When no crop is set this is a zero-overhead fast path.
+ let processedBuffer: CVPixelBuffer
+ if let cropRect = cameraTransform?.cropRect {
+ processedBuffer = applyCrop(newBuffer, cropRect: cropRect) ?? newBuffer
+ } else {
+ processedBuffer = newBuffer
+ }
pixelBufferSynchronizationQueue.sync {
- latestPixelBuffer = newBuffer
+ latestPixelBuffer = processedBuffer
}
onFrameAvailable?()
@@ -1323,10 +1441,17 @@ final class DefaultCamera: NSObject, Camera {
}
if output == captureVideoOutput.avOutput {
- let nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
+ let rawBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
let nextSampleTime = CMTimeSubtract(sampleTime, recordingTimeOffset)
if nextSampleTime > lastAppendedVideoSampleTime {
- let _ = videoAdaptor?.append(nextBuffer!, withPresentationTime: nextSampleTime)
+ // Apply crop transform to the recorded frame if needed.
+ let writeBuffer: CVPixelBuffer
+ if let cropRect = cameraTransform?.cropRect {
+ writeBuffer = applyCrop(rawBuffer, cropRect: cropRect) ?? rawBuffer
+ } else {
+ writeBuffer = rawBuffer
+ }
+ let _ = videoAdaptor?.append(writeBuffer, withPresentationTime: nextSampleTime)
lastAppendedVideoSampleTime = nextSampleTime
}
} else {
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift
index 47e6abbbe750..f2b909166992 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift
@@ -1,7 +1,7 @@
// Copyright 2013 The Flutter Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Autogenerated from Pigeon (v26.1.5), do not edit directly.
+// Autogenerated from Pigeon (v26.2.3), do not edit directly.
// See also: https://pub.dev/packages/pigeon
import Foundation
@@ -59,9 +59,7 @@ private func wrapError(_ error: Any) -> [Any?] {
}
private func createConnectionError(withChannelName channelName: String) -> PigeonError {
- return PigeonError(
- code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.",
- details: "")
+ return PigeonError(code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.", details: "")
}
private func isNullish(_ value: Any?) -> Bool {
@@ -116,12 +114,12 @@ func deepEqualsMessages(_ lhs: Any?, _ rhs: Any?) -> Bool {
func deepHashMessages(value: Any?, hasher: inout Hasher) {
if let valueList = value as? [AnyHashable] {
- for item in valueList { deepHashMessages(value: item, hasher: &hasher) }
- return
+ for item in valueList { deepHashMessages(value: item, hasher: &hasher) }
+ return
}
if let valueDict = value as? [AnyHashable: AnyHashable] {
- for key in valueDict.keys {
+ for key in valueDict.keys {
hasher.combine(key)
deepHashMessages(value: valueDict[key]!, hasher: &hasher)
}
@@ -135,6 +133,8 @@ func deepHashMessages(value: Any?, hasher: inout Hasher) {
return hasher.combine(String(describing: value))
}
+
+
enum PlatformCameraLensDirection: Int {
/// Front facing camera (a user looking at the screen is seen by the camera).
case front = 0
@@ -215,6 +215,7 @@ struct PlatformCameraDescription: Hashable {
/// The type of the camera lens.
var lensType: PlatformCameraLensType
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraDescription? {
let name = pigeonVar_list[0] as! String
@@ -235,8 +236,7 @@ struct PlatformCameraDescription: Hashable {
]
}
static func == (lhs: PlatformCameraDescription, rhs: PlatformCameraDescription) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
- }
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -255,6 +255,7 @@ struct PlatformCameraState: Hashable {
/// Whether setting focus points is supported.
var focusPointSupported: Bool
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraState? {
let previewSize = pigeonVar_list[0] as! PlatformSize
@@ -281,8 +282,7 @@ struct PlatformCameraState: Hashable {
]
}
static func == (lhs: PlatformCameraState, rhs: PlatformCameraState) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
- }
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -299,6 +299,7 @@ struct PlatformCameraImageData: Hashable {
var sensorExposureTimeNanoseconds: Int64
var sensorSensitivity: Double
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraImageData? {
let formatCode = pigeonVar_list[0] as! Int64
@@ -331,8 +332,7 @@ struct PlatformCameraImageData: Hashable {
]
}
static func == (lhs: PlatformCameraImageData, rhs: PlatformCameraImageData) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
- }
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -345,6 +345,7 @@ struct PlatformCameraImagePlane: Hashable {
var width: Int64
var height: Int64
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraImagePlane? {
let bytes = pigeonVar_list[0] as! FlutterStandardTypedData
@@ -368,8 +369,7 @@ struct PlatformCameraImagePlane: Hashable {
]
}
static func == (lhs: PlatformCameraImagePlane, rhs: PlatformCameraImagePlane) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
- }
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -383,6 +383,7 @@ struct PlatformMediaSettings: Hashable {
var audioBitrate: Int64? = nil
var enableAudio: Bool
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformMediaSettings? {
let resolutionPreset = pigeonVar_list[0] as! PlatformResolutionPreset
@@ -409,8 +410,7 @@ struct PlatformMediaSettings: Hashable {
]
}
static func == (lhs: PlatformMediaSettings, rhs: PlatformMediaSettings) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
- }
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -421,6 +421,7 @@ struct PlatformPoint: Hashable {
var x: Double
var y: Double
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformPoint? {
let x = pigeonVar_list[0] as! Double
@@ -438,8 +439,7 @@ struct PlatformPoint: Hashable {
]
}
static func == (lhs: PlatformPoint, rhs: PlatformPoint) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
- }
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -450,6 +450,7 @@ struct PlatformSize: Hashable {
var width: Double
var height: Double
+
// swift-format-ignore: AlwaysUseLowerCamelCase
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformSize? {
let width = pigeonVar_list[0] as! Double
@@ -467,8 +468,99 @@ struct PlatformSize: Hashable {
]
}
static func == (lhs: PlatformSize, rhs: PlatformSize) -> Bool {
- return deepEqualsMessages(lhs.toList(), rhs.toList())
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
+ func hash(into hasher: inout Hasher) {
+ deepHashMessages(value: toList(), hasher: &hasher)
+ }
+}
+
+/// Generated class from Pigeon that represents data sent in messages.
+struct PlatformRect: Hashable {
+ var x: Double
+ var y: Double
+ var width: Double
+ var height: Double
+
+
+ // swift-format-ignore: AlwaysUseLowerCamelCase
+ static func fromList(_ pigeonVar_list: [Any?]) -> PlatformRect? {
+ let x = pigeonVar_list[0] as! Double
+ let y = pigeonVar_list[1] as! Double
+ let width = pigeonVar_list[2] as! Double
+ let height = pigeonVar_list[3] as! Double
+
+ return PlatformRect(
+ x: x,
+ y: y,
+ width: width,
+ height: height
+ )
+ }
+ func toList() -> [Any?] {
+ return [
+ x,
+ y,
+ width,
+ height,
+ ]
}
+ static func == (lhs: PlatformRect, rhs: PlatformRect) -> Bool {
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
+ func hash(into hasher: inout Hasher) {
+ deepHashMessages(value: toList(), hasher: &hasher)
+ }
+}
+
+/// Pigeon version of a geometric camera transform.
+///
+/// Rotation and mirroring are applied at the hardware connection level
+/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no
+/// CPU/GPU cost and the effect is visible in the preview, image stream, photos,
+/// and recorded video simultaneously.
+///
+/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small
+/// (~1–3 ms) cost per frame.
+///
+/// Generated class from Pigeon that represents data sent in messages.
+struct PlatformCameraTransform: Hashable {
+ /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270.
+ var rotationDegrees: Double
+ /// Whether to flip the image along the horizontal axis (left–right mirror).
+ var flipHorizontally: Bool
+ /// Whether to flip the image along the vertical axis (upside-down mirror).
+ ///
+ /// Implemented as a 180° rotation composed with a horizontal flip.
+ var flipVertically: Bool
+ /// Optional crop rectangle in normalized (0,1) coordinate space.
+ ///
+ /// Applied after rotation/mirroring. Null means no crop.
+ var cropRect: PlatformRect? = nil
+
+
+ // swift-format-ignore: AlwaysUseLowerCamelCase
+ static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraTransform? {
+ let rotationDegrees = pigeonVar_list[0] as! Double
+ let flipHorizontally = pigeonVar_list[1] as! Bool
+ let flipVertically = pigeonVar_list[2] as! Bool
+ let cropRect: PlatformRect? = nilOrValue(pigeonVar_list[3])
+
+ return PlatformCameraTransform(
+ rotationDegrees: rotationDegrees,
+ flipHorizontally: flipHorizontally,
+ flipVertically: flipVertically,
+ cropRect: cropRect
+ )
+ }
+ func toList() -> [Any?] {
+ return [
+ rotationDegrees,
+ flipHorizontally,
+ flipVertically,
+ cropRect,
+ ]
+ }
+ static func == (lhs: PlatformCameraTransform, rhs: PlatformCameraTransform) -> Bool {
+ return deepEqualsMessages(lhs.toList(), rhs.toList()) }
func hash(into hasher: inout Hasher) {
deepHashMessages(value: toList(), hasher: &hasher)
}
@@ -551,6 +643,10 @@ private class MessagesPigeonCodecReader: FlutterStandardReader {
return PlatformPoint.fromList(self.readValue() as! [Any?])
case 145:
return PlatformSize.fromList(self.readValue() as! [Any?])
+ case 146:
+ return PlatformRect.fromList(self.readValue() as! [Any?])
+ case 147:
+ return PlatformCameraTransform.fromList(self.readValue() as! [Any?])
default:
return super.readValue(ofType: type)
}
@@ -610,6 +706,12 @@ private class MessagesPigeonCodecWriter: FlutterStandardWriter {
} else if let value = value as? PlatformSize {
super.writeByte(145)
super.writeValue(value.toList())
+ } else if let value = value as? PlatformRect {
+ super.writeByte(146)
+ super.writeValue(value.toList())
+ } else if let value = value as? PlatformCameraTransform {
+ super.writeByte(147)
+ super.writeValue(value.toList())
} else {
super.writeValue(value)
}
@@ -630,22 +732,17 @@ class MessagesPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable {
static let shared = MessagesPigeonCodec(readerWriter: MessagesPigeonCodecReaderWriter())
}
-var messagesPigeonMethodCodec = FlutterStandardMethodCodec(
- readerWriter: MessagesPigeonCodecReaderWriter())
+var messagesPigeonMethodCodec = FlutterStandardMethodCodec(readerWriter: MessagesPigeonCodecReaderWriter());
+
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol CameraApi {
/// Returns the list of available cameras.
- func getAvailableCameras(
- completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void)
+ func getAvailableCameras(completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void)
/// Create a new camera with the given settings, and returns its ID.
- func create(
- cameraName: String, settings: PlatformMediaSettings,
- completion: @escaping (Result) -> Void)
+ func create(cameraName: String, settings: PlatformMediaSettings, completion: @escaping (Result) -> Void)
/// Initializes the camera with the given ID.
- func initialize(
- cameraId: Int64, imageFormat: PlatformImageFormatGroup,
- completion: @escaping (Result) -> Void)
+ func initialize(cameraId: Int64, imageFormat: PlatformImageFormatGroup, completion: @escaping (Result) -> Void)
/// Begins streaming frames from the camera.
func startImageStream(completion: @escaping (Result) -> Void)
/// Stops streaming frames from the camera.
@@ -659,8 +756,7 @@ protocol CameraApi {
/// and any associated resources can be cleaned up.
func dispose(cameraId: Int64, completion: @escaping (Result) -> Void)
/// Locks the camera capture to the current device orientation.
- func lockCaptureOrientation(
- orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void)
+ func lockCaptureOrientation(orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void)
/// Unlocks camera capture orientation, allowing it to automatically adapt to
/// device orientation.
func unlockCaptureOrientation(completion: @escaping (Result) -> Void)
@@ -681,12 +777,16 @@ protocol CameraApi {
/// Switches the camera to the given flash mode.
func setFlashMode(mode: PlatformFlashMode, completion: @escaping (Result) -> Void)
/// Switches the camera to the given exposure mode.
- func setExposureMode(
- mode: PlatformExposureMode, completion: @escaping (Result) -> Void)
+ func setExposureMode(mode: PlatformExposureMode, completion: @escaping (Result) -> Void)
/// Anchors auto-exposure to the given point in (0,1) coordinate space.
///
/// A null value resets to the default exposure point.
func setExposurePoint(point: PlatformPoint?, completion: @escaping (Result) -> Void)
+ /// Sets the lens position manually to the given value.
+ /// The value should be between 0 and 1.
+ /// 0 means the lens is at the minimum position.
+ /// 1 means the lens is at the maximum position.
+ func setLensPosition(position: Double, completion: @escaping (Result) -> Void)
/// Returns the minimum exposure offset supported by the camera.
func getMinExposureOffset(completion: @escaping (Result) -> Void)
/// Returns the maximum exposure offset supported by the camera.
@@ -706,11 +806,9 @@ protocol CameraApi {
/// Sets the zoom factor.
func setZoomLevel(zoom: Double, completion: @escaping (Result) -> Void)
/// Sets the video stabilization mode.
- func setVideoStabilizationMode(
- mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void)
+ func setVideoStabilizationMode(mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void)
/// Gets if the given video stabilization mode is supported.
- func isVideoStabilizationModeSupported(
- mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void)
+ func isVideoStabilizationModeSupported(mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void)
/// Pauses streaming of preview frames.
func pausePreview(completion: @escaping (Result) -> Void)
/// Resumes a previously paused preview stream.
@@ -718,25 +816,27 @@ protocol CameraApi {
/// Changes the camera used while recording video.
///
/// This should only be called while video recording is active.
- func updateDescriptionWhileRecording(
- cameraName: String, completion: @escaping (Result) -> Void)
+ func updateDescriptionWhileRecording(cameraName: String, completion: @escaping (Result) -> Void)
/// Sets the file format used for taking pictures.
- func setImageFileFormat(
- format: PlatformImageFileFormat, completion: @escaping (Result) -> Void)
+ func setImageFileFormat(format: PlatformImageFileFormat, completion: @escaping (Result) -> Void)
+ /// Applies a geometric transform (rotation, mirroring, crop) to the camera
+ /// output. The transform is applied to the preview, image stream, captured
+ /// photos, and recorded video simultaneously.
+ ///
+ /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS
+ /// versions the rotation part of the transform is silently ignored and only
+ /// the crop (if any) is applied in software.
+ func setTransform(transform: PlatformCameraTransform, completion: @escaping (Result) -> Void)
}
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
class CameraApiSetup {
static var codec: FlutterStandardMessageCodec { MessagesPigeonCodec.shared }
/// Sets up an instance of `CameraApi` to handle messages through the `binaryMessenger`.
- static func setUp(
- binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = ""
- ) {
+ static func setUp(binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = "") {
let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : ""
/// Returns the list of available cameras.
- let getAvailableCamerasChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let getAvailableCamerasChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
getAvailableCamerasChannel.setMessageHandler { _, reply in
api.getAvailableCameras { result in
@@ -752,9 +852,7 @@ class CameraApiSetup {
getAvailableCamerasChannel.setMessageHandler(nil)
}
/// Create a new camera with the given settings, and returns its ID.
- let createChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let createChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
createChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -773,9 +871,7 @@ class CameraApiSetup {
createChannel.setMessageHandler(nil)
}
/// Initializes the camera with the given ID.
- let initializeChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let initializeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
initializeChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -794,9 +890,7 @@ class CameraApiSetup {
initializeChannel.setMessageHandler(nil)
}
/// Begins streaming frames from the camera.
- let startImageStreamChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let startImageStreamChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
startImageStreamChannel.setMessageHandler { _, reply in
api.startImageStream { result in
@@ -812,9 +906,7 @@ class CameraApiSetup {
startImageStreamChannel.setMessageHandler(nil)
}
/// Stops streaming frames from the camera.
- let stopImageStreamChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let stopImageStreamChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
stopImageStreamChannel.setMessageHandler { _, reply in
api.stopImageStream { result in
@@ -833,10 +925,7 @@ class CameraApiSetup {
/// frame sent.
///
/// This is used to throttle sending frames across the channel.
- let receivedImageStreamDataChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let receivedImageStreamDataChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
receivedImageStreamDataChannel.setMessageHandler { _, reply in
api.receivedImageStreamData { result in
@@ -853,9 +942,7 @@ class CameraApiSetup {
}
/// Indicates that the given camera is no longer being used on the Dart side,
/// and any associated resources can be cleaned up.
- let disposeChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let disposeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
disposeChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -873,10 +960,7 @@ class CameraApiSetup {
disposeChannel.setMessageHandler(nil)
}
/// Locks the camera capture to the current device orientation.
- let lockCaptureOrientationChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let lockCaptureOrientationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
lockCaptureOrientationChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -895,10 +979,7 @@ class CameraApiSetup {
}
/// Unlocks camera capture orientation, allowing it to automatically adapt to
/// device orientation.
- let unlockCaptureOrientationChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let unlockCaptureOrientationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
unlockCaptureOrientationChannel.setMessageHandler { _, reply in
api.unlockCaptureOrientation { result in
@@ -915,9 +996,7 @@ class CameraApiSetup {
}
/// Takes a picture with the current settings, and returns the path to the
/// resulting file.
- let takePictureChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let takePictureChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
takePictureChannel.setMessageHandler { _, reply in
api.takePicture { result in
@@ -933,10 +1012,7 @@ class CameraApiSetup {
takePictureChannel.setMessageHandler(nil)
}
/// Does any preprocessing necessary before beginning to record video.
- let prepareForVideoRecordingChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let prepareForVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
prepareForVideoRecordingChannel.setMessageHandler { _, reply in
api.prepareForVideoRecording { result in
@@ -953,9 +1029,7 @@ class CameraApiSetup {
}
/// Begins recording video, optionally enabling streaming to Dart at the same
/// time.
- let startVideoRecordingChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let startVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
startVideoRecordingChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -973,9 +1047,7 @@ class CameraApiSetup {
startVideoRecordingChannel.setMessageHandler(nil)
}
/// Stops recording video, and results the path to the resulting file.
- let stopVideoRecordingChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let stopVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
stopVideoRecordingChannel.setMessageHandler { _, reply in
api.stopVideoRecording { result in
@@ -991,9 +1063,7 @@ class CameraApiSetup {
stopVideoRecordingChannel.setMessageHandler(nil)
}
/// Pauses video recording.
- let pauseVideoRecordingChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let pauseVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
pauseVideoRecordingChannel.setMessageHandler { _, reply in
api.pauseVideoRecording { result in
@@ -1009,9 +1079,7 @@ class CameraApiSetup {
pauseVideoRecordingChannel.setMessageHandler(nil)
}
/// Resumes a previously paused video recording.
- let resumeVideoRecordingChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let resumeVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
resumeVideoRecordingChannel.setMessageHandler { _, reply in
api.resumeVideoRecording { result in
@@ -1027,9 +1095,7 @@ class CameraApiSetup {
resumeVideoRecordingChannel.setMessageHandler(nil)
}
/// Switches the camera to the given flash mode.
- let setFlashModeChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setFlashModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setFlashModeChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1047,9 +1113,7 @@ class CameraApiSetup {
setFlashModeChannel.setMessageHandler(nil)
}
/// Switches the camera to the given exposure mode.
- let setExposureModeChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setExposureModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setExposureModeChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1069,9 +1133,7 @@ class CameraApiSetup {
/// Anchors auto-exposure to the given point in (0,1) coordinate space.
///
/// A null value resets to the default exposure point.
- let setExposurePointChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setExposurePointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setExposurePointChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1088,10 +1150,29 @@ class CameraApiSetup {
} else {
setExposurePointChannel.setMessageHandler(nil)
}
+ /// Sets the lens position manually to the given value.
+ /// The value should be between 0 and 1.
+ /// 0 means the lens is at the minimum position.
+ /// 1 means the lens is at the maximum position.
+ let setLensPositionChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
+ if let api = api {
+ setLensPositionChannel.setMessageHandler { message, reply in
+ let args = message as! [Any?]
+ let positionArg = args[0] as! Double
+ api.setLensPosition(position: positionArg) { result in
+ switch result {
+ case .success:
+ reply(wrapResult(nil))
+ case .failure(let error):
+ reply(wrapError(error))
+ }
+ }
+ }
+ } else {
+ setLensPositionChannel.setMessageHandler(nil)
+ }
/// Returns the minimum exposure offset supported by the camera.
- let getMinExposureOffsetChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let getMinExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
getMinExposureOffsetChannel.setMessageHandler { _, reply in
api.getMinExposureOffset { result in
@@ -1107,9 +1188,7 @@ class CameraApiSetup {
getMinExposureOffsetChannel.setMessageHandler(nil)
}
/// Returns the maximum exposure offset supported by the camera.
- let getMaxExposureOffsetChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let getMaxExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
getMaxExposureOffsetChannel.setMessageHandler { _, reply in
api.getMaxExposureOffset { result in
@@ -1125,9 +1204,7 @@ class CameraApiSetup {
getMaxExposureOffsetChannel.setMessageHandler(nil)
}
/// Sets the exposure offset manually to the given value.
- let setExposureOffsetChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setExposureOffsetChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1145,9 +1222,7 @@ class CameraApiSetup {
setExposureOffsetChannel.setMessageHandler(nil)
}
/// Switches the camera to the given focus mode.
- let setFocusModeChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setFocusModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setFocusModeChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1167,9 +1242,7 @@ class CameraApiSetup {
/// Anchors auto-focus to the given point in (0,1) coordinate space.
///
/// A null value resets to the default focus point.
- let setFocusPointChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setFocusPointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setFocusPointChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1187,9 +1260,7 @@ class CameraApiSetup {
setFocusPointChannel.setMessageHandler(nil)
}
/// Returns the minimum zoom level supported by the camera.
- let getMinZoomLevelChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let getMinZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
getMinZoomLevelChannel.setMessageHandler { _, reply in
api.getMinZoomLevel { result in
@@ -1205,9 +1276,7 @@ class CameraApiSetup {
getMinZoomLevelChannel.setMessageHandler(nil)
}
/// Returns the maximum zoom level supported by the camera.
- let getMaxZoomLevelChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let getMaxZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
getMaxZoomLevelChannel.setMessageHandler { _, reply in
api.getMaxZoomLevel { result in
@@ -1223,9 +1292,7 @@ class CameraApiSetup {
getMaxZoomLevelChannel.setMessageHandler(nil)
}
/// Sets the zoom factor.
- let setZoomLevelChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setZoomLevelChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1243,10 +1310,7 @@ class CameraApiSetup {
setZoomLevelChannel.setMessageHandler(nil)
}
/// Sets the video stabilization mode.
- let setVideoStabilizationModeChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.setVideoStabilizationMode\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setVideoStabilizationModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setVideoStabilizationMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setVideoStabilizationModeChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1264,10 +1328,7 @@ class CameraApiSetup {
setVideoStabilizationModeChannel.setMessageHandler(nil)
}
/// Gets if the given video stabilization mode is supported.
- let isVideoStabilizationModeSupportedChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.isVideoStabilizationModeSupported\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let isVideoStabilizationModeSupportedChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.isVideoStabilizationModeSupported\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
isVideoStabilizationModeSupportedChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1285,9 +1346,7 @@ class CameraApiSetup {
isVideoStabilizationModeSupportedChannel.setMessageHandler(nil)
}
/// Pauses streaming of preview frames.
- let pausePreviewChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let pausePreviewChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
pausePreviewChannel.setMessageHandler { _, reply in
api.pausePreview { result in
@@ -1303,9 +1362,7 @@ class CameraApiSetup {
pausePreviewChannel.setMessageHandler(nil)
}
/// Resumes a previously paused preview stream.
- let resumePreviewChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let resumePreviewChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
resumePreviewChannel.setMessageHandler { _, reply in
api.resumePreview { result in
@@ -1323,10 +1380,7 @@ class CameraApiSetup {
/// Changes the camera used while recording video.
///
/// This should only be called while video recording is active.
- let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel(
- name:
- "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
updateDescriptionWhileRecordingChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1344,9 +1398,7 @@ class CameraApiSetup {
updateDescriptionWhileRecordingChannel.setMessageHandler(nil)
}
/// Sets the file format used for taking pictures.
- let setImageFileFormatChannel = FlutterBasicMessageChannel(
- name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)",
- binaryMessenger: binaryMessenger, codec: codec)
+ let setImageFileFormatChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
setImageFileFormatChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
@@ -1363,6 +1415,30 @@ class CameraApiSetup {
} else {
setImageFileFormatChannel.setMessageHandler(nil)
}
+ /// Applies a geometric transform (rotation, mirroring, crop) to the camera
+ /// output. The transform is applied to the preview, image stream, captured
+ /// photos, and recorded video simultaneously.
+ ///
+ /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS
+ /// versions the rotation part of the transform is silently ignored and only
+ /// the crop (if any) is applied in software.
+ let setTransformChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setTransform\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
+ if let api = api {
+ setTransformChannel.setMessageHandler { message, reply in
+ let args = message as! [Any?]
+ let transformArg = args[0] as! PlatformCameraTransform
+ api.setTransform(transform: transformArg) { result in
+ switch result {
+ case .success:
+ reply(wrapResult(nil))
+ case .failure(let error):
+ reply(wrapError(error))
+ }
+ }
+ }
+ } else {
+ setTransformChannel.setMessageHandler(nil)
+ }
}
}
@@ -1416,31 +1492,25 @@ class PigeonEventSink {
}
class ImageDataStreamStreamHandler: PigeonEventChannelWrapper {
- static func register(
- with messenger: FlutterBinaryMessenger,
- instanceName: String = "",
- streamHandler: ImageDataStreamStreamHandler
- ) {
- var channelName =
- "dev.flutter.pigeon.camera_avfoundation.CameraImageStreamEventApi.imageDataStream"
+ static func register(with messenger: FlutterBinaryMessenger,
+ instanceName: String = "",
+ streamHandler: ImageDataStreamStreamHandler) {
+ var channelName = "dev.flutter.pigeon.camera_avfoundation.CameraImageStreamEventApi.imageDataStream"
if !instanceName.isEmpty {
channelName += ".\(instanceName)"
}
let internalStreamHandler = PigeonStreamHandler(wrapper: streamHandler)
- let channel = FlutterEventChannel(
- name: channelName, binaryMessenger: messenger, codec: messagesPigeonMethodCodec)
+ let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: messagesPigeonMethodCodec)
channel.setStreamHandler(internalStreamHandler)
}
}
-
+
/// Handler for native callbacks that are not tied to a specific camera ID.
///
/// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift.
protocol CameraGlobalEventApiProtocol {
/// Called when the device's physical orientation changes.
- func deviceOrientationChanged(
- orientation orientationArg: PlatformDeviceOrientation,
- completion: @escaping (Result) -> Void)
+ func deviceOrientationChanged(orientation orientationArg: PlatformDeviceOrientation, completion: @escaping (Result) -> Void)
}
class CameraGlobalEventApi: CameraGlobalEventApiProtocol {
private let binaryMessenger: FlutterBinaryMessenger
@@ -1453,14 +1523,9 @@ class CameraGlobalEventApi: CameraGlobalEventApiProtocol {
return MessagesPigeonCodec.shared
}
/// Called when the device's physical orientation changes.
- func deviceOrientationChanged(
- orientation orientationArg: PlatformDeviceOrientation,
- completion: @escaping (Result) -> Void
- ) {
- let channelName: String =
- "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)"
- let channel = FlutterBasicMessageChannel(
- name: channelName, binaryMessenger: binaryMessenger, codec: codec)
+ func deviceOrientationChanged(orientation orientationArg: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) {
+ let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)"
+ let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec)
channel.sendMessage([orientationArg] as [Any?]) { response in
guard let listResponse = response as? [Any?] else {
completion(.failure(createConnectionError(withChannelName: channelName)))
@@ -1484,9 +1549,7 @@ class CameraGlobalEventApi: CameraGlobalEventApiProtocol {
/// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift.
protocol CameraEventApiProtocol {
/// Called when the camera is inialitized for use.
- func initialized(
- initialState initialStateArg: PlatformCameraState,
- completion: @escaping (Result) -> Void)
+ func initialized(initialState initialStateArg: PlatformCameraState, completion: @escaping (Result) -> Void)
/// Called when an error occurs in the camera.
///
/// This should be used for errors that occur outside of the context of
@@ -1504,14 +1567,9 @@ class CameraEventApi: CameraEventApiProtocol {
return MessagesPigeonCodec.shared
}
/// Called when the camera is inialitized for use.
- func initialized(
- initialState initialStateArg: PlatformCameraState,
- completion: @escaping (Result) -> Void
- ) {
- let channelName: String =
- "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)"
- let channel = FlutterBasicMessageChannel(
- name: channelName, binaryMessenger: binaryMessenger, codec: codec)
+ func initialized(initialState initialStateArg: PlatformCameraState, completion: @escaping (Result) -> Void) {
+ let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)"
+ let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec)
channel.sendMessage([initialStateArg] as [Any?]) { response in
guard let listResponse = response as? [Any?] else {
completion(.failure(createConnectionError(withChannelName: channelName)))
@@ -1531,12 +1589,9 @@ class CameraEventApi: CameraEventApiProtocol {
///
/// This should be used for errors that occur outside of the context of
/// handling a specific HostApi call, such as during streaming.
- func error(message messageArg: String, completion: @escaping (Result) -> Void)
- {
- let channelName: String =
- "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)"
- let channel = FlutterBasicMessageChannel(
- name: channelName, binaryMessenger: binaryMessenger, codec: codec)
+ func error(message messageArg: String, completion: @escaping (Result) -> Void) {
+ let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)"
+ let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec)
channel.sendMessage([messageArg] as [Any?]) { response in
guard let listResponse = response as? [Any?] else {
completion(.failure(createConnectionError(withChannelName: channelName)))
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift
index 35050120e118..824cd4dd37f6 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift
@@ -3,6 +3,7 @@
// found in the LICENSE file.
import AVFoundation
+import CoreImage
import Flutter
import Foundation
@@ -25,6 +26,13 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate {
/// The completion handler block for capture and save photo operations.
let completionHandler: SavePhotoDelegateCompletionHandler
+ /// Optional crop rectangle in normalised (0,1) coordinate space.
+ /// When non-nil the photo is cropped (GPU path) before it is written to disk.
+ private let cropRect: PlatformRect?
+
+ /// Core Image context shared with the camera (Metal-backed). Only used when `cropRect` is set.
+ private let ciContext: CIContext?
+
/// The path for captured photo file.
/// Exposed for unit tests to verify the captured photo file path.
var filePath: String {
@@ -36,14 +44,20 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate {
/// ioQueue - the queue on which captured photos are written to disk.
/// completionHandler - The completion handler block for save photo operations. Can
/// be called from either main queue or IO queue.
+ /// cropRect - optional crop in normalised (0,1) coordinates; applied before writing.
+ /// ciContext - Core Image context to use for crop rendering; must be non-nil when cropRect is set.
init(
path: String,
ioQueue: DispatchQueue,
- completionHandler: @escaping SavePhotoDelegateCompletionHandler
+ completionHandler: @escaping SavePhotoDelegateCompletionHandler,
+ cropRect: PlatformRect? = nil,
+ ciContext: CIContext? = nil
) {
self.path = path
self.ioQueue = ioQueue
self.completionHandler = completionHandler
+ self.cropRect = cropRect
+ self.ciContext = ciContext
super.init()
}
@@ -65,7 +79,42 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate {
do {
let data = photoDataProvider()
- try data?.writeToPath(strongSelf.path, options: .atomic)
+ let finalData: WritableData?
+
+ // If a crop is requested, apply it in Core Image before writing.
+ if let crop = strongSelf.cropRect,
+ let ctx = strongSelf.ciContext,
+ let rawData = data as? Data
+ {
+ let ci = CIImage(data: rawData)
+ let fullW = ci.map { Double($0.extent.width) } ?? 0
+ let fullH = ci.map { Double($0.extent.height) } ?? 0
+ if let ci = ci, fullW > 0, fullH > 0 {
+ // Core Image origin is bottom-left; convert from top-left.
+ let ciCrop = CGRect(
+ x: crop.x * fullW,
+ y: (1.0 - crop.y - crop.height) * fullH,
+ width: crop.width * fullW,
+ height: crop.height * fullH)
+ let cropped = ci.cropped(to: ciCrop)
+ .transformed(
+ by: CGAffineTransform(translationX: -ciCrop.origin.x, y: -ciCrop.origin.y))
+ if let encoded = ctx.jpegRepresentation(
+ of: cropped,
+ colorSpace: CGColorSpaceCreateDeviceRGB())
+ {
+ finalData = encoded
+ } else {
+ finalData = data
+ }
+ } else {
+ finalData = data
+ }
+ } else {
+ finalData = data
+ }
+
+ try finalData?.writeToPath(strongSelf.path, options: .atomic)
strongSelf.completionHandler(strongSelf.path, nil)
} catch {
strongSelf.completionHandler(nil, error)
diff --git a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
index e6f7340ed7a4..da12c0d6ec34 100644
--- a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
+++ b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
@@ -3,3 +3,4 @@
// found in the LICENSE file.
export 'src/avfoundation_camera.dart';
+export 'src/camera_transform.dart';
diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
index 3907ed89219b..8dd7019596b3 100644
--- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
+++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
@@ -11,6 +11,7 @@ import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'package:stream_transform/stream_transform.dart';
+import 'camera_transform.dart';
import 'messages.g.dart';
import 'type_conversion.dart';
import 'utils.dart';
@@ -191,6 +192,14 @@ class AVFoundationCamera extends CameraPlatform {
await _hostApi.unlockCaptureOrientation();
}
+ /// Sets the lens position manually to the given value.
+ /// The value should be between 0 and 1.
+ /// 0 means the lens is at the minimum position.
+ /// 1 means the lens is at the maximum position.
+ Future setLensPosition(double position) async {
+ await _hostApi.setLensPosition(position);
+ }
+
@override
Future takePicture(int cameraId) async {
final String path = await _hostApi.takePicture();
@@ -442,6 +451,33 @@ class AVFoundationCamera extends CameraPlatform {
await _hostApi.setImageFileFormat(_pigeonImageFileFormat(format));
}
+ /// Applies a geometric [transform] to all camera outputs on iOS.
+ ///
+ /// The [cameraId] parameter is currently unused on iOS (there is only ever
+ /// one active camera) but is included for API consistency.
+ ///
+ /// - Rotation and mirroring are applied at the hardware AVCaptureConnection
+ /// level (requires iOS 17+) and cost nothing in CPU / GPU.
+ /// - Crop is applied per-frame by Core Image on the GPU and costs ~1–3 ms
+ /// per frame. Pass `null` (or omit `cropRect`) to disable it.
+ Future setTransform(int cameraId, CameraTransform transform) async {
+ await _hostApi.setTransform(
+ PlatformCameraTransform(
+ rotationDegrees: transform.rotationDegrees,
+ flipHorizontally: transform.flipHorizontally,
+ flipVertically: transform.flipVertically,
+ cropRect: transform.cropRect == null
+ ? null
+ : PlatformRect(
+ x: transform.cropRect!.x,
+ y: transform.cropRect!.y,
+ width: transform.cropRect!.width,
+ height: transform.cropRect!.height,
+ ),
+ ),
+ );
+ }
+
@override
Widget buildPreview(int cameraId) {
return Texture(textureId: cameraId);
diff --git a/packages/camera/camera_avfoundation/lib/src/camera_transform.dart b/packages/camera/camera_avfoundation/lib/src/camera_transform.dart
new file mode 100644
index 000000000000..6c3e26cc3c0b
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/camera_transform.dart
@@ -0,0 +1,76 @@
+// Copyright 2013 The Flutter Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// A normalized rectangle within the (0,1) coordinate space used to describe
+/// a crop region for [CameraTransform].
+///
+/// The origin (0,0) is the top-left corner of the image.
+class CameraTransformRect {
+ /// Creates a normalized crop rectangle.
+ const CameraTransformRect({
+ required this.x,
+ required this.y,
+ required this.width,
+ required this.height,
+ }) : assert(x >= 0 && x <= 1, 'x must be in [0, 1]'),
+ assert(y >= 0 && y <= 1, 'y must be in [0, 1]'),
+ assert(width > 0 && width <= 1, 'width must be in (0, 1]'),
+ assert(height > 0 && height <= 1, 'height must be in (0, 1]'),
+ assert(x + width <= 1, 'x + width must be <= 1'),
+ assert(y + height <= 1, 'y + height must be <= 1');
+
+ /// Left edge in normalized [0,1] coordinates.
+ final double x;
+
+ /// Top edge in normalized [0,1] coordinates.
+ final double y;
+
+ /// Width in normalized [0,1] coordinates.
+ final double width;
+
+ /// Height in normalized [0,1] coordinates.
+ final double height;
+}
+
+/// A geometric transform to apply to all camera outputs simultaneously:
+/// the preview texture, the image stream, captured photos, and recorded video.
+///
+/// On iOS 17+ rotation and mirroring are applied at the hardware
+/// `AVCaptureConnection` level (zero CPU / GPU cost). Crop uses Core Image on
+/// the GPU (~1–3 ms per frame).
+class CameraTransform {
+ /// Creates a camera transform.
+ ///
+ /// Defaults to identity (no rotation, no flip, no crop).
+ const CameraTransform({
+ this.rotationDegrees = 0,
+ this.flipHorizontally = false,
+ this.flipVertically = false,
+ this.cropRect,
+ }) : assert(
+ rotationDegrees == 0 ||
+ rotationDegrees == 90 ||
+ rotationDegrees == 180 ||
+ rotationDegrees == 270,
+ 'rotationDegrees must be 0, 90, 180, or 270',
+ );
+
+ /// Clockwise rotation in degrees.
+ ///
+ /// Must be one of: `0`, `90`, `180`, `270`.
+ final double rotationDegrees;
+
+ /// Flip the image left–right (horizontal mirror).
+ final bool flipHorizontally;
+
+ /// Flip the image upside-down (vertical mirror).
+ ///
+ /// Implemented as a horizontal flip composed with a 180° rotation.
+ final bool flipVertically;
+
+ /// Optional crop region in normalized (0,1) coordinate space.
+ ///
+ /// Applied after rotation and mirroring. `null` means no crop.
+ final CameraTransformRect? cropRect;
+}
diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart
index 46c94d58f8a1..4df80c329803 100644
--- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart
+++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart
@@ -1,21 +1,40 @@
// Copyright 2013 The Flutter Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Autogenerated from Pigeon (v26.1.5), do not edit directly.
+// Autogenerated from Pigeon (v26.2.3), do not edit directly.
// See also: https://pub.dev/packages/pigeon
-// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, omit_obvious_local_variable_types, unused_shown_name, unnecessary_import, no_leading_underscores_for_local_identifiers
+// ignore_for_file: unused_import, unused_shown_name
+// ignore_for_file: type=lint
import 'dart:async';
-import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List;
+import 'dart:typed_data' show Float64List, Int32List, Int64List;
-import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer;
import 'package:flutter/services.dart';
+import 'package:meta/meta.dart' show immutable, protected, visibleForTesting;
-PlatformException _createConnectionError(String channelName) {
- return PlatformException(
- code: 'channel-error',
- message: 'Unable to establish connection on channel: "$channelName".',
- );
+Object? _extractReplyValueOrThrow(
+ List